From 0896c29c61b382169392d7aca3368e019f394e48 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 2 May 2015 11:55:50 +1000 Subject: [PATCH 0001/2793] [backport] Follow HTTP redirects when downloading bootstrap binaries After a recent change to the repository that hosts these JARs, we now get a HTTP redirect to the new destination. We need to explicitly instruct curl to follow this. (cherry picked from commit c75547f342e7795e9cd7d23d5d6c4c44c179d21b) --- tools/binary-repo-lib.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh index 92ef3a032906..2f5d481e39ed 100755 --- a/tools/binary-repo-lib.sh +++ b/tools/binary-repo-lib.sh @@ -55,7 +55,7 @@ curlDownload() { if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then jar=$(cygpath -m $1) fi - http_code=$(curl --write-out '%{http_code}' --silent --fail --output "$jar" "$url") + http_code=$(curl --write-out '%{http_code}' --silent --fail -L --output "$jar" "$url") if (( $? != 0 )); then echo "Error downloading $jar: response code: $http_code" echo "$url" From 7d1b1292db82f33905f9a9ca214cf22f0a16591f Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Sat, 30 May 2015 11:35:25 -0700 Subject: [PATCH 0002/2793] Clean implementation of sorts for scala.util.Sorting. Removed code based on Sun JDK sorts and implemented new (basic) sorts from scratch. Deferred to Java Arrays.sort whenever practical. Behavior of `scala.util.Sorting` should be unchanged, but changed documentation to specify when the Java methods are being used (as they're typically very fast). A JUnit test is provided. Performance is important for sorts. Everything is better with this patch, though it could be better yet, as described below. Below are sort times (in microseconds, SEM < 5%) for various 1024-element arrays of small case classes that compare on an int field (quickSort), or int arrays that use custom ordering (stableSort). Note: "degenerate" means there are only 16 values possible, so there are lots of ties. Times are all with fresh data (no re-using cache from run to run). Results: ``` random sorted reverse degenerate big:64k tiny:16 Old Sorting.quickSort 234 181 178 103 25,700 1.4 New Sorting.quickSort 170 27 115 74 18,600 0.8 Old Sorting.stableSort 321 234 236 282 32,600 2.1 New Sorting.stableSort 239 16 194 194 25,100 1.2 java.util.Arrays.sort 124 4 8 105 13,500 0.8 java.util.Arrays.sort|Box 126 15 13 112 13,200 0.9 ``` The new versions are uniformly faster, but uniformly slower than Java sorting. scala.util.Sorting has use cases that don't map easily in to Java unless everything is pre-boxed, but the overhead of pre-boxing is minimal compared to the sort. A snapshot of some of my benchmarking code is below. (Yes, lots of repeating myself--it's dangerous not to when trying to get somewhat accurate benchmarks.) ``` import java.util.Arrays import java.util.Comparator import math.Ordering import util.Sorting import reflect.ClassTag val th = ichi.bench.Thyme.warmed() case class N(i: Int, j: Int) {} val a = Array.fill(1024)( Array.tabulate(1024)(i => N(util.Random.nextInt, i)) ) var ai = 0 val b = Array.fill(1024)( Array.tabulate(1024)(i => N(i, i)) ) var bi = 0 val c = Array.fill(1024)( Array.tabulate(1024)(i => N(1024-i, i)) ) var ci = 0 val d = Array.fill(1024)( Array.tabulate(1024)(i => N(util.Random.nextInt(16), i)) ) var di = 0 val e = Array.fill(16)( Array.tabulate(65536)(i => N(util.Random.nextInt, i)) ) var ei = 0 val f = Array.fill(65535)( Array.tabulate(16)(i => N(util.Random.nextInt, i)) ) var fi = 0 val o = new Ordering[N]{ def compare(a: N, b: N) = if (a.i < b.i) -1 else if (a.i > b.i) 1 else 0 } for (s <- Seq("one", "two", "three")) { println(s) th.pbench{ val x = a(ai).clone; ai = (ai+1)%a.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = b(bi).clone; bi = (bi+1)%b.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = c(ci).clone; ci = (ci+1)%c.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = d(di).clone; di = (di+1)%d.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = e(ei).clone; ei = (ei+1)%e.length; Sorting.quickSort(x)(o); x(x.length/3) } th.pbench{ val x = f(fi).clone; fi = (fi+1)%f.length; Sorting.quickSort(x)(o); x(x.length/3) } } def ix(ns: Array[N]) = { val is = new Array[Int](ns.length) var i = 0 while (i < ns.length) { is(i) = ns(i).i i += 1 } is } val p = new Ordering[Int]{ def compare(a: Int, b: Int) = if (a > b) 1 else if (a < b) -1 else 0 } for (s <- Seq("one", "two", "three")) { println(s) val tag: ClassTag[Int] = implicitly[ClassTag[Int]] th.pbench{ val x = ix(a(ai)); ai = (ai+1)%a.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(b(bi)); bi = (bi+1)%b.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(c(ci)); ci = (ci+1)%c.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(d(di)); di = (di+1)%d.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(e(ei)); ei = (ei+1)%e.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } th.pbench{ val x = ix(f(fi)); fi = (fi+1)%f.length; Sorting.stableSort(x)(tag, p); x(x.length/3) } } for (s <- Seq("one", "two", "three")) { println(s) th.pbench{ val x = a(ai).clone; ai = (ai+1)%a.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = b(bi).clone; bi = (bi+1)%b.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = c(ci).clone; ci = (ci+1)%c.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = d(di).clone; di = (di+1)%d.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = e(ei).clone; ei = (ei+1)%e.length; Arrays.sort(x, o); x(x.length/3) } th.pbench{ val x = f(fi).clone; fi = (fi+1)%f.length; Arrays.sort(x, o); x(x.length/3) } } def bx(is: Array[Int]): Array[java.lang.Integer] = { val Is = new Array[java.lang.Integer](is.length) var i = 0 while (i < is.length) { Is(i) = java.lang.Integer.valueOf(is(i)) i += 1 } Is } def xb(Is: Array[java.lang.Integer]): Array[Int] = { val is = new Array[Int](Is.length) var i = 0 while (i < is.length) { is(i) = Is(i).intValue i += 1 } is } val q = new Comparator[java.lang.Integer]{ def compare(a: java.lang.Integer, b: java.lang.Integer) = o.compare(a.intValue, b.intValue) } for (s <- Seq("one", "two", "three")) { println(s) val tag: ClassTag[Int] = implicitly[ClassTag[Int]] th.pbench{ val x = bx(ix(a(ai))); ai = (ai+1)%a.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(b(bi))); bi = (bi+1)%b.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(c(ci))); ci = (ci+1)%c.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(d(di))); di = (di+1)%d.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(e(ei))); ei = (ei+1)%e.length; Arrays.sort(x, q); xb(x)(x.length/3) } th.pbench{ val x = bx(ix(f(fi))); fi = (fi+1)%f.length; Arrays.sort(x, q); xb(x)(x.length/3) } } ``` --- bincompat-forward.whitelist.conf | 52 ++ src/library/scala/util/Sorting.scala | 712 ++++++++---------------- test/junit/scala/util/SortingTest.scala | 69 +++ 3 files changed, 356 insertions(+), 477 deletions(-) create mode 100644 test/junit/scala/util/SortingTest.scala diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf index 1c532889c23e..b81929c9f851 100644 --- a/bincompat-forward.whitelist.conf +++ b/bincompat-forward.whitelist.conf @@ -195,6 +195,58 @@ filter { { matchName="scala.xml.pull.ExceptionEvent$" problemName=MissingClassProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$default$5" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mBc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mFc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mJc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mCc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mSc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$insertionSort" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mZc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mDc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSort$mIc$sp" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$mergeSorted" + problemName=MissingMethodProblem + }, + { + matchName="scala.util.Sorting.scala$util$Sorting$$booleanSort" + problemName=MissingMethodProblem } ] } diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index 276e157f554d..ee2bdbc4a764 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -1,6 +1,6 @@ /* __ *\ ** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2009, Ross Judson ** +** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** @@ -9,518 +9,276 @@ package scala package util -import scala.reflect.{ ClassTag, classTag } -import scala.math.{ Ordering, max, min } +import scala.reflect.ClassTag +import scala.math.Ordering -/** The Sorting object provides functions that can sort various kinds of - * objects. You can provide a comparison function, or you can request a sort - * of items that are viewable as [[scala.math.Ordered]]. Some sorts that - * operate directly on a subset of value types are also provided. These - * implementations are derived from those in the Sun JDK. +/** The `Sorting` object provides convenience wrappers for `java.util.Arrays.sort`. + * Methods that defer to `java.util.Arrays.sort` say that they do or under what + * conditions that they do. * - * Note that stability doesn't matter for value types, so use the `quickSort` - * variants for those. `stableSort` is intended to be used with - * objects when the prior ordering should be preserved, where possible. + * `Sorting` also implements a general-purpose quicksort and stable (merge) sort + * for those cases where `java.util.Arrays.sort` could only be used at the cost + * of a large memory penalty. If performance rather than memory usage is the + * primary concern, one may wish to find alternate strategies to use + * `java.util.Arrays.sort` directly e.g. by boxing primitives to use + * a custom ordering on them. + * + * `Sorting` provides methods where you can provide a comparison function, or + * can request a sort of items that are [[scala.math.Ordered]] or that + * otherwise have an implicit or explicit [[scala.math.Ordering]]. + * + * Note also that high-performance non-default sorts for numeric types + * are not provided. If this is required, it is advisable to investigate + * other libraries that cover this use case. * * @author Ross Judson - * @version 1.0 + * @author Adriaan Moors + * @author Rex Kerr + * @version 1.1 */ object Sorting { - /** Quickly sort an array of Doubles. */ - def quickSort(a: Array[Double]) { sort1(a, 0, a.length) } - - /** Quickly sort an array of items with an implicit Ordering. */ - def quickSort[K: Ordering](a: Array[K]) { sort1(a, 0, a.length) } - - /** Quickly sort an array of Ints. */ - def quickSort(a: Array[Int]) { sort1(a, 0, a.length) } - - /** Quickly sort an array of Floats. */ - def quickSort(a: Array[Float]) { sort1(a, 0, a.length) } - - /** Sort an array of K where K is Ordered, preserving the existing order - * where the values are equal. */ - def stableSort[K: ClassTag: Ordering](a: Array[K]) { - stableSort(a, 0, a.length-1, new Array[K](a.length), Ordering[K].lt _) - } + /** Sort an array of Doubles using `java.util.Arrays.sort`. */ + def quickSort(a: Array[Double]): Unit = java.util.Arrays.sort(a) - /** Sorts an array of `K` given an ordering function `f`. - * `f` should return `true` iff its first parameter is strictly less than its second parameter. - */ - def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean) { - stableSort(a, 0, a.length-1, new Array[K](a.length), f) - } + /** Sort an array of Ints using `java.util.Arrays.sort`. */ + def quickSort(a: Array[Int]): Unit = java.util.Arrays.sort(a) - /** Sorts an arbitrary sequence into an array, given a comparison function - * that should return `true` iff parameter one is strictly less than parameter two. - * - * @param a the sequence to be sorted. - * @param f the comparison function. - * @return the sorted sequence of items. - */ - def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = { - val ret = a.toArray - stableSort(ret, f) - ret - } + /** Sort an array of Floats using `java.util.Arrays.sort`. */ + def quickSort(a: Array[Float]): Unit = java.util.Arrays.sort(a) + + private final val qsortThreshold = 16 - /** Sorts an arbitrary sequence of items that are viewable as ordered. */ - def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = - stableSort(a, Ordering[K].lt _) - - /** Stably sorts a sequence of items given an extraction function that will - * return an ordered key from an item. - * - * @param a the sequence to be sorted. - * @param f the comparison function. - * @return the sorted sequence of items. - */ - def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = - stableSort(a)(implicitly[ClassTag[K]], Ordering[M] on f) - - private def sort1[K: Ordering](x: Array[K], off: Int, len: Int) { - val ord = Ordering[K] - import ord._ - - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t - } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) - i += 1 - a += 1 - b += 1 - } - } - def med3(a: Int, b: Int, c: Int) = { - if (x(a) < x(b)) { - if (x(b) < x(c)) b else if (x(a) < x(c)) c else a - } else { - if (x(b) > x(c)) b else if (x(a) > x(c)) c else a - } - } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j > off && x(j-1) > x(j)) { - swap(j, j-1) - j -= 1 + /** Sort array `a` with quicksort, using the Ordering on its elements. + * This algorithm sorts in place, so no additional memory is used aside from + * what might be required to box individual elements during comparison. + */ + def quickSort[K: Ordering](a: Array[K]): Unit = { + // Must have iN >= i0 or math will fail. Also, i0 >= 0. + def inner(a: Array[K], i0: Int, iN: Int, ord: Ordering[K]): Unit = { + if (iN - i0 < qsortThreshold) insertionSort(a, i0, iN, ord) + else { + var iK = (i0 + iN) >>> 1 // Unsigned div by 2 + // Find index of median of first, central, and last elements + var pL = + if (ord.compare(a(i0), a(iN - 1)) <= 0) + if (ord.compare(a(i0), a(iK)) < 0) + if (ord.compare(a(iN - 1), a(iK)) < 0) iN - 1 else iK + else i0 + else + if (ord.compare(a(i0), a(iK)) < 0) i0 + else + if (ord.compare(a(iN - 1), a(iK)) <= 0) iN - 1 + else iK + val pivot = a(pL) + // pL is the start of the pivot block; move it into the middle if needed + if (pL != iK) { a(pL) = a(iK); a(iK) = pivot; pL = iK } + // Elements equal to the pivot will be in range pL until pR + var pR = pL + 1 + // Items known to be less than pivot are below iA (range i0 until iA) + var iA = i0 + // Items known to be greater than pivot are at or above iB (range iB until iN) + var iB = iN + // Scan through everything in the buffer before the pivot(s) + while (pL - iA > 0) { + val current = a(iA) + ord.compare(current, pivot) match { + case 0 => + // Swap current out with pivot block + a(iA) = a(pL - 1) + a(pL - 1) = current + pL -= 1 + case x if x < 0 => + // Already in place. Just update indicies. + iA += 1 + case _ if iB > pR => + // Wrong side. There's room on the other side, so swap + a(iA) = a(iB - 1) + a(iB - 1) = current + iB -= 1 + case _ => + // Wrong side and there is no room. Swap by rotating pivot block. + a(iA) = a(pL - 1) + a(pL - 1) = a(pR - 1) + a(pR - 1) = current + pL -= 1 + pR -= 1 + iB -= 1 } - i += 1 } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) + // Get anything remaining in buffer after the pivot(s) + while (iB - pR > 0) { + val current = a(iB - 1) + ord.compare(current, pivot) match { + case 0 => + // Swap current out with pivot block + a(iB - 1) = a(pR) + a(pR) = current + pR += 1 + case x if x > 0 => + // Already in place. Just update indices. + iB -= 1 + case _ => + // Wrong side and we already know there is no room. Swap by rotating pivot block. + a(iB - 1) = a(pR) + a(pR) = a(pL) + a(pL) = current + iA += 1 + pL += 1 + pR += 1 } - m = med3(l, m, n) // Mid-size, med of 3 } - val v = x(m) - - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - while (b <= c && x(b) <= v) { - if (x(b) == v) { - swap(a, b) - a += 1 - } - b += 1 - } - while (c >= b && x(c) >= v) { - if (x(c) == v) { - swap(c, d) - d -= 1 - } - c -= 1 - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } + // Use tail recursion on large half (Sedgewick's method) so we don't blow up the stack if pivots are poorly chosen + if (iA - i0 < iN - iB) { + inner(a, i0, iA, ord) // True recursion + inner(a, iB, iN, ord) // Should be tail recursion + } + else { + inner(a, iB, iN, ord) // True recursion + inner(a, i0, iA, ord) // Should be tail recursion } - - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) - - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) } } - sort2(off, len) + inner(a, 0, a.length, implicitly[Ordering[K]]) } - - private def sort1(x: Array[Int], off: Int, len: Int) { - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t + + private final val mergeThreshold = 32 + + // Ordering[T] might be slow especially for boxed primitives, so use binary search variant of insertion sort + // Caller must pass iN >= i0 or math will fail. Also, i0 >= 0. + private def insertionSort[@specialized T](a: Array[T], i0: Int, iN: Int, ord: Ordering[T]): Unit = { + val n = iN - i0 + if (n < 2) return + if (ord.compare(a(i0), a(i0+1)) > 0) { + val temp = a(i0) + a(i0) = a(i0+1) + a(i0+1) = temp } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) - i += 1 - a += 1 - b += 1 - } - } - def med3(a: Int, b: Int, c: Int) = { - if (x(a) < x(b)) { - if (x(b) < x(c)) b else if (x(a) < x(c)) c else a - } else { - if (x(b) > x(c)) b else if (x(a) > x(c)) c else a - } - } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j>off && x(j-1) > x(j)) { - swap(j, j-1) - j -= 1 - } - i += 1 + var m = 2 + while (m < n) { + // Speed up already-sorted case by checking last element first + val next = a(i0 + m) + if (ord.compare(next, a(i0+m-1)) < 0) { + var iA = i0 + var iB = i0 + m - 1 + while (iB - iA > 1) { + val ix = (iA + iB) >>> 1 // Use bit shift to get unsigned div by 2 + if (ord.compare(next, a(ix)) < 0) iB = ix + else iA = ix } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) - } - m = med3(l, m, n) // Mid-size, med of 3 - } - val v = x(m) - - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - while (b <= c && x(b) <= v) { - if (x(b) == v) { - swap(a, b) - a += 1 - } - b += 1 - } - while (c >= b && x(c) >= v) { - if (x(c) == v) { - swap(c, d) - d -= 1 - } - c -= 1 - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } + val ix = iA + (if (ord.compare(next, a(iA)) < 0) 0 else 1) + var i = i0 + m + while (i > ix) { + a(i) = a(i-1) + i -= 1 } - - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) - - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) + a(ix) = next } + m += 1 } - sort2(off, len) } - - private def sort1(x: Array[Double], off: Int, len: Int) { - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t + + // Caller is required to pass iN >= i0, else math will fail. Also, i0 >= 0. + private def mergeSort[@specialized T: ClassTag](a: Array[T], i0: Int, iN: Int, ord: Ordering[T], scratch: Array[T] = null): Unit = { + if (iN - i0 < mergeThreshold) insertionSort(a, i0, iN, ord) + else { + val iK = (i0 + iN) >>> 1 // Bit shift equivalent to unsigned math, no overflow + val sc = if (scratch eq null) new Array[T](iK - i0) else scratch + mergeSort(a, i0, iK, ord, sc) + mergeSort(a, iK, iN, ord, sc) + mergeSorted(a, i0, iK, iN, ord, sc) } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) + } + + // Must have 0 <= i0 < iK < iN + private def mergeSorted[@specialized T](a: Array[T], i0: Int, iK: Int, iN: Int, ord: Ordering[T], scratch: Array[T]): Unit = { + // Check to make sure we're not already in order + if (ord.compare(a(iK-1), a(iK)) > 0) { + var i = i0 + val jN = iK - i0 + var j = 0 + while (i < iK) { + scratch (j) = a(i) i += 1 - a += 1 - b += 1 - } - } - def med3(a: Int, b: Int, c: Int) = { - val ab = x(a) compare x(b) - val bc = x(b) compare x(c) - val ac = x(a) compare x(c) - if (ab < 0) { - if (bc < 0) b else if (ac < 0) c else a - } else { - if (bc > 0) b else if (ac > 0) c else a + j += 1 } - } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j > off && (x(j-1) compare x(j)) > 0) { - swap(j, j-1) - j -= 1 - } - i += 1 - } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) - } - m = med3(l, m, n) // Mid-size, med of 3 - } - val v = x(m) - - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - var bv = x(b) compare v - while (b <= c && bv <= 0) { - if (bv == 0) { - swap(a, b) - a += 1 - } - b += 1 - if (b <= c) bv = x(b) compare v - } - var cv = x(c) compare v - while (c >= b && cv >= 0) { - if (cv == 0) { - swap(c, d) - d -= 1 - } - c -= 1 - if (c >= b) cv = x(c) compare v - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } - } - - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) - - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) + var k = i0 + j = 0 + while (i < iN && j < jN) { + if (ord.compare(a(i), scratch(j)) < 0) { a(k) = a(i); i += 1 } + else { a(k) = scratch(j); j += 1 } + k += 1 } + while (j < jN) { a(k) = scratch(j); j += 1; k += 1 } + // Don't need to finish a(i) because it's already in place, k = i } - sort2(off, len) } - - private def sort1(x: Array[Float], off: Int, len: Int) { - def swap(a: Int, b: Int) { - val t = x(a) - x(a) = x(b) - x(b) = t + + // Why would you even do this? + private def booleanSort(a: Array[Boolean]): Unit = { + var i = 0 + var n = 0 + while (i < a.length) { + if (!a(i)) n += 1 + i += 1 } - def vecswap(_a: Int, _b: Int, n: Int) { - var a = _a - var b = _b - var i = 0 - while (i < n) { - swap(a, b) - i += 1 - a += 1 - b += 1 - } + i = 0 + while (i < n) { + a(i) = false + i += 1 } - def med3(a: Int, b: Int, c: Int) = { - val ab = x(a) compare x(b) - val bc = x(b) compare x(c) - val ac = x(a) compare x(c) - if (ab < 0) { - if (bc < 0) b else if (ac < 0) c else a - } else { - if (bc > 0) b else if (ac > 0) c else a - } + while (i < a.length) { + a(i) = true + i += 1 } - def sort2(off: Int, len: Int) { - // Insertion sort on smallest arrays - if (len < 7) { - var i = off - while (i < len + off) { - var j = i - while (j > off && (x(j-1) compare x(j)) > 0) { - swap(j, j-1) - j -= 1 - } - i += 1 - } - } else { - // Choose a partition element, v - var m = off + (len >> 1) // Small arrays, middle element - if (len > 7) { - var l = off - var n = off + len - 1 - if (len > 40) { // Big arrays, pseudomedian of 9 - val s = len / 8 - l = med3(l, l+s, l+2*s) - m = med3(m-s, m, m+s) - n = med3(n-2*s, n-s, n) - } - m = med3(l, m, n) // Mid-size, med of 3 - } - val v = x(m) + } - // Establish Invariant: v* (v)* v* - var a = off - var b = a - var c = off + len - 1 - var d = c - var done = false - while (!done) { - var bv = x(b) compare v - while (b <= c && bv <= 0) { - if (bv == 0) { - swap(a, b) - a += 1 - } - b += 1 - if (b <= c) bv = x(b) compare v - } - var cv = x(c) compare v - while (c >= b && cv >= 0) { - if (cv == 0) { - swap(c, d) - d -= 1 - } - c -= 1 - if (c >= b) cv = x(c) compare v - } - if (b > c) { - done = true - } else { - swap(b, c) - c -= 1 - b += 1 - } - } + // TODO: add upper bound: T <: AnyRef, propagate to callers below (not binary compatible) + // Maybe also rename all these methods to `sort`. + @inline private def sort[T](a: Array[T], ord: Ordering[T]): Unit = a match { + case _: Array[AnyRef] => + // Note that runtime matches are covariant, so could actually be any Array[T] s.t. T is not primitive (even boxed value classes) + if (a.length > 1 && (ord eq null)) throw new NullPointerException("Ordering") + java.util.Arrays.sort(a, ord) + case a: Array[Int] => if (ord eq Ordering.Int) java.util.Arrays.sort(a) else mergeSort[Int](a, 0, a.length, ord) + case a: Array[Double] => mergeSort[Double](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! + case a: Array[Long] => if (ord eq Ordering.Long) java.util.Arrays.sort(a) else mergeSort[Long](a, 0, a.length, ord) + case a: Array[Float] => mergeSort[Float](a, 0, a.length, ord) // Because not all NaNs are identical, stability is meaningful! + case a: Array[Char] => if (ord eq Ordering.Char) java.util.Arrays.sort(a) else mergeSort[Char](a, 0, a.length, ord) + case a: Array[Byte] => if (ord eq Ordering.Byte) java.util.Arrays.sort(a) else mergeSort[Byte](a, 0, a.length, ord) + case a: Array[Short] => if (ord eq Ordering.Short) java.util.Arrays.sort(a) else mergeSort[Short](a, 0, a.length, ord) + case a: Array[Boolean] => if (ord eq Ordering.Boolean) booleanSort(a) else mergeSort[Boolean](a, 0, a.length, ord) + // Array[Unit] is matched as an Array[AnyRef] due to covariance in runtime matching. Not worth catching it as a special case. + case null => throw new NullPointerException + } - // Swap partition elements back to middle - val n = off + len - var s = math.min(a-off, b-a) - vecswap(off, b-s, s) - s = math.min(d-c, n-d-1) - vecswap(b, n-s, s) + // TODO: remove unnecessary ClassTag (not binary compatible) + /** Sort array `a` using the Ordering on its elements, preserving the original ordering where possible. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag: Ordering](a: Array[K]): Unit = sort(a, Ordering[K]) - // Recursively sort non-partition-elements - s = b - a - if (s > 1) - sort2(off, s) - s = d - c - if (s > 1) - sort2(n-s, s) - } - } - sort2(off, len) + // TODO: Remove unnecessary ClassTag (not binary compatible) + // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) + /** Sort array `a` using function `f` that computes the less-than relation for each element. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag](a: Array[K], f: (K, K) => Boolean): Unit = sort(a, Ordering fromLessThan f) + + /** A sorted Array, using the Ordering for the elements in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag: Ordering](a: Seq[K]): Array[K] = { + val ret = a.toArray + sort(ret, Ordering[K]) + ret } - private def stableSort[K : ClassTag](a: Array[K], lo: Int, hi: Int, scratch: Array[K], f: (K,K) => Boolean) { - if (lo < hi) { - val mid = (lo+hi) / 2 - stableSort(a, lo, mid, scratch, f) - stableSort(a, mid+1, hi, scratch, f) - var k, t_lo = lo - var t_hi = mid + 1 - while (k <= hi) { - if ((t_lo <= mid) && ((t_hi > hi) || (!f(a(t_hi), a(t_lo))))) { - scratch(k) = a(t_lo) - t_lo += 1 - } else { - scratch(k) = a(t_hi) - t_hi += 1 - } - k += 1 - } - k = lo - while (k <= hi) { - a(k) = scratch(k) - k += 1 - } - } + // TODO: make this fast for primitive K (could be specialized if it didn't go through Ordering) + /** A sorted Array, given a function `f` that computes the less-than relation for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag](a: Seq[K], f: (K, K) => Boolean): Array[K] = { + val ret = a.toArray + sort(ret, Ordering fromLessThan f) + ret + } + + /** A sorted Array, given an extraction function `f` that returns an ordered key for each item in the sequence `a`. Uses `java.util.Arrays.sort` unless `K` is a primitive type. */ + def stableSort[K: ClassTag, M: Ordering](a: Seq[K], f: K => M): Array[K] = { + val ret = a.toArray + sort(ret, Ordering[M] on f) + ret } } diff --git a/test/junit/scala/util/SortingTest.scala b/test/junit/scala/util/SortingTest.scala new file mode 100644 index 000000000000..15a00c89032c --- /dev/null +++ b/test/junit/scala/util/SortingTest.scala @@ -0,0 +1,69 @@ +package scala.util + +import org.junit.Test +import org.junit.Assert._ +import scala.math.{ Ordered, Ordering } +import scala.reflect.ClassTag + +class SortingTest { + case class N(i: Int, j: Int) extends Ordered[N] { def compare(n: N) = if (i < n.i) -1 else if (i > n.i) 1 else 0 } + + def mkA(n: Int, max: Int) = Array.tabulate(n)(i => N(util.Random.nextInt(max), i)) + + def isStable(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i < a(i-1).i || (a(i).i == a(i-1).i && a(i).j < a(i-1).j)) return false; i += 1 }; true } + + def isAntistable(a: Array[N]): Boolean = + { var i = 1; while (i < a.length) { if (a(i).i > a(i-1).i || (a(i).i == a(i-1).i && a(i).j < a(i-1).j)) return false; i += 1 }; true } + + def isSorted(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i < a(i-1).i) return false; i += 1 }; true } + + def isAntisorted(a: Array[N]): Boolean = { var i = 1; while (i < a.length) { if (a(i).i > a(i-1).i) return false; i += 1 }; true } + + val sizes = Seq.range(0, 65) ++ Seq(256, 1024, 9121, 65539) + val variety = Seq(1, 2, 10, 100, 1000, Int.MaxValue) + val workLimit = 1e6 + val rng = new util.Random(198571) + + val backwardsN = Ordering by ((n: N) => -n.i) + + def runOneTest(size: Int, variety: Int): Unit = { + val xs = Array.tabulate(size)(i => N(rng.nextInt(variety), i)) + val ys = Array.range(0, xs.length) + val zs = { val temp = xs.clone; java.util.Arrays.sort(temp, new java.util.Comparator[N] { def compare(a: N, b: N) = a.compare(b) }); temp } + val qxs = { val temp = xs.clone; Sorting.quickSort(temp); temp } + val pxs = { val temp = xs.clone; Sorting.quickSort(temp)(backwardsN); temp } + val sxs = { val temp = xs.clone; Sorting.stableSort(temp); temp } + val rxs = { val temp = xs.clone; Sorting.stableSort(temp)(implicitly[ClassTag[N]], backwardsN); temp } + val sys = Sorting.stableSort(ys.clone: Seq[Int], (i: Int) => xs(i)) + + assertTrue("Quicksort should be in order", isSorted(qxs)) + assertTrue("Quicksort should be in reverse order", isAntisorted(pxs)) + assertTrue("Stable sort should be sorted and stable", isStable(sxs)) + assertTrue("Stable sort should be reverse sorted but stable", isAntistable(rxs)) + assertTrue("Stable sorting by proxy should produce sorted stable list", isStable(sys.map(i => xs(i)))) + assertTrue("Quicksort should produce canonical ordering", (qxs zip zs).forall{ case (a,b) => a.i == b.i }) + assertTrue("Reverse quicksort should produce canonical ordering", (pxs.reverse zip zs).forall{ case (a,b) => a.i == b.i }) + assertTrue("Stable sort should produce exact ordering", (sxs zip zs).forall{ case (a,b) => a == b }) + assertTrue("Reverse stable sort should produce canonical ordering", (rxs.reverse zip zs).forall{ case (a,b) => a.i == b.i }) + assertTrue("Proxy sort and direct sort should produce exactly the same thing", (sxs zip sys.map(i => xs(i))).forall{ case (a,b) => a == b }) + } + + @Test def testSortConsistency: Unit = { + for { + size <- sizes + v <- variety + i <- 0 until math.min(100, math.max(math.min(math.floor(math.pow(v, size)/2), math.ceil(workLimit / (math.log(math.max(2,size))/math.log(2) * size))), 1).toInt) + } runOneTest(size, v) + + for (size <- sizes) { + val b = Array.fill(size)(rng.nextBoolean) + val bfwd = Sorting.stableSort(b.clone: Seq[Boolean]) + val bbkw = Sorting.stableSort(b.clone: Seq[Boolean], (x: Boolean, y: Boolean) => x && !y) + assertTrue("All falses should be first", bfwd.dropWhile(_ == false).forall(_ == true)) + assertTrue("All falses should be last when sorted backwards", bbkw.dropWhile(_ == true).forall(_ == false)) + assertTrue("Sorting booleans should preserve the number of trues", b.count(_ == true) == bfwd.count(_ == true)) + assertTrue("Backwards sorting booleans should preserve the number of trues", b.count(_ == true) == bbkw.count(_ == true)) + assertTrue("Sorting should not change the sizes of arrays", b.length == bfwd.length && b.length == bbkw.length) + } + } +} From bda53196ebbeb1369c70f3d1ec066796c06a6409 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 18 Sep 2015 10:50:36 +0200 Subject: [PATCH 0003/2793] Bump version number one last time? So that osgi version is set correctly by build. --- build.number | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.number b/build.number index de2c2fb824d6..ddb01678e5e4 100644 --- a/build.number +++ b/build.number @@ -1,7 +1,7 @@ #Tue Sep 11 19:21:09 CEST 2007 version.major=2 version.minor=10 -version.patch=5 +version.patch=6 # This is the -N part of a version. if it's 0, it's dropped from maven versions. version.bnum=0 From cb5daf3a5d9ab8195746e21801c3300d78efb52c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 20 Feb 2016 22:51:12 -0800 Subject: [PATCH 0004/2793] SI-9665 Backquoted vbar in extractor pattern Allow an infix extractor named `|`, when backquoted. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 4 ++-- test/files/pos/t9665.scala | 7 +++++++ 2 files changed, 9 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t9665.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 831a0412cdaa..53cd2f6d59ed 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1966,8 +1966,8 @@ self => case _ => EmptyTree } def loop(top: Tree): Tree = reducePatternStack(base, top) match { - case next if isIdentExcept(raw.BAR) => pushOpInfo(next) ; loop(simplePattern(badPattern3)) - case next => next + case next if isIdent && !isRawBar => pushOpInfo(next) ; loop(simplePattern(badPattern3)) + case next => next } checkWildStar orElse stripParens(loop(top)) } diff --git a/test/files/pos/t9665.scala b/test/files/pos/t9665.scala new file mode 100644 index 000000000000..1aa7a5d459f1 --- /dev/null +++ b/test/files/pos/t9665.scala @@ -0,0 +1,7 @@ + +object | { def unapply(x: (Any, Any)) = Some(x) } + +trait Test { + def f() = (1,2) match { case 1 `|` 2 => } + def g() = 2 match { case 1 | 2 => } +} From 5fd8483d53521f17c1a697fdf419d35f1cacfac6 Mon Sep 17 00:00:00 2001 From: Taras Boiko Date: Sun, 20 Mar 2016 15:34:33 +0200 Subject: [PATCH 0005/2793] Added applyOrElse to MapLike This overrides default implementation from PartialFunction which used both contains(x) and get(x) with getOrElse. --- src/library/scala/collection/MapLike.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 4ac87b29a90e..d4d85c43ec66 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -158,6 +158,10 @@ self => */ def isDefinedAt(key: A) = contains(key) + override /*PartialFunction*/ + def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = + getOrElse(x, default(x)) + /** Collects all keys of this map in a set. * @return a set containing all keys of this map. */ From 9ec6278e05f6acd471473342067e922cb85293d6 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Tue, 19 Apr 2016 11:35:40 +0100 Subject: [PATCH 0006/2793] SI-9760 Fix for higher-kinded GADT refinement --- .../scala/tools/nsc/typechecker/Infer.scala | 1 - test/files/pos/hkgadt.scala | 18 ++++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/hkgadt.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index dc91d23011c9..7112edd75d0e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1257,7 +1257,6 @@ trait Infer extends Checkable { def isFreeTypeParamOfTerm(sym: Symbol) = ( sym.isAbstractType && sym.owner.isTerm - && !sym.info.bounds.exists(_.typeParams.nonEmpty) ) // Intentionally *not* using `Type#typeSymbol` here, which would normalize `tp` diff --git a/test/files/pos/hkgadt.scala b/test/files/pos/hkgadt.scala new file mode 100644 index 000000000000..efd7d3df21a4 --- /dev/null +++ b/test/files/pos/hkgadt.scala @@ -0,0 +1,18 @@ +package test + +object HKGADT { + sealed trait Foo[F[_]] + final case class Bar() extends Foo[List] + + def frob[F[_]](foo: Foo[F]): F[Int] = + foo match { + case Bar() => + List(1) + } + + sealed trait Foo1[F] + final case class Bar1() extends Foo1[Int] + def frob1[A](foo: Foo1[A]) = foo match { + case Bar1() => 1 + } +} From fa65623cd8d4e60368126fc958c7185ca5706a6b Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Wed, 20 Apr 2016 10:37:38 +0100 Subject: [PATCH 0007/2793] Added missing result type to test. --- test/files/pos/hkgadt.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/files/pos/hkgadt.scala b/test/files/pos/hkgadt.scala index efd7d3df21a4..0f3739f4d400 100644 --- a/test/files/pos/hkgadt.scala +++ b/test/files/pos/hkgadt.scala @@ -12,7 +12,7 @@ object HKGADT { sealed trait Foo1[F] final case class Bar1() extends Foo1[Int] - def frob1[A](foo: Foo1[A]) = foo match { + def frob1[A](foo: Foo1[A]): A = foo match { case Bar1() => 1 } } From 770d6e9b62adc97137f26ebf8696cfdd4ad2920b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 27 Apr 2016 11:54:19 +1000 Subject: [PATCH 0008/2793] SD-129 Mark JFunctionN as serializable Before this change, if these were used as the target type of a lambda in Java source code, the lambda would not be serializable. This is somewhat suprising when contrasted with the way that Scala lambdas work in Scala source. Since we copied these classes over from scala-java8-compat, that project has opted to add Serializable as a parent to solve this issue. This commit brings our copy of these interfaces into line with that change. --- src/library/scala/runtime/java8/JFunction0.java | 2 +- src/library/scala/runtime/java8/JFunction1.java | 2 +- src/library/scala/runtime/java8/JFunction10.java | 2 +- src/library/scala/runtime/java8/JFunction11.java | 2 +- src/library/scala/runtime/java8/JFunction12.java | 2 +- src/library/scala/runtime/java8/JFunction13.java | 2 +- src/library/scala/runtime/java8/JFunction14.java | 2 +- src/library/scala/runtime/java8/JFunction15.java | 2 +- src/library/scala/runtime/java8/JFunction16.java | 2 +- src/library/scala/runtime/java8/JFunction17.java | 2 +- src/library/scala/runtime/java8/JFunction18.java | 2 +- src/library/scala/runtime/java8/JFunction19.java | 2 +- src/library/scala/runtime/java8/JFunction2.java | 2 +- src/library/scala/runtime/java8/JFunction20.java | 2 +- src/library/scala/runtime/java8/JFunction21.java | 2 +- src/library/scala/runtime/java8/JFunction22.java | 2 +- src/library/scala/runtime/java8/JFunction3.java | 2 +- src/library/scala/runtime/java8/JFunction4.java | 2 +- src/library/scala/runtime/java8/JFunction5.java | 2 +- src/library/scala/runtime/java8/JFunction6.java | 2 +- src/library/scala/runtime/java8/JFunction7.java | 2 +- src/library/scala/runtime/java8/JFunction8.java | 2 +- src/library/scala/runtime/java8/JFunction9.java | 2 +- 23 files changed, 23 insertions(+), 23 deletions(-) diff --git a/src/library/scala/runtime/java8/JFunction0.java b/src/library/scala/runtime/java8/JFunction0.java index bdeb7d5f8e15..13426cc8af61 100644 --- a/src/library/scala/runtime/java8/JFunction0.java +++ b/src/library/scala/runtime/java8/JFunction0.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0 extends scala.Function0 { +public interface JFunction0 extends scala.Function0, java.io.Serializable { default void $init$() { }; default void apply$mcV$sp() { diff --git a/src/library/scala/runtime/java8/JFunction1.java b/src/library/scala/runtime/java8/JFunction1.java index 2b8580271a82..e1f886dad748 100644 --- a/src/library/scala/runtime/java8/JFunction1.java +++ b/src/library/scala/runtime/java8/JFunction1.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1 extends scala.Function1 { +public interface JFunction1 extends scala.Function1, java.io.Serializable { default void apply$mcVI$sp(int v1) { apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1)); } diff --git a/src/library/scala/runtime/java8/JFunction10.java b/src/library/scala/runtime/java8/JFunction10.java index 9b9ab4a6c58c..f7a25c0df2ae 100644 --- a/src/library/scala/runtime/java8/JFunction10.java +++ b/src/library/scala/runtime/java8/JFunction10.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction10 extends scala.Function10 { +public interface JFunction10 extends scala.Function10, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction11.java b/src/library/scala/runtime/java8/JFunction11.java index b24c9a1ed09e..9a548b8fc990 100644 --- a/src/library/scala/runtime/java8/JFunction11.java +++ b/src/library/scala/runtime/java8/JFunction11.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction11 extends scala.Function11 { +public interface JFunction11 extends scala.Function11, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction12.java b/src/library/scala/runtime/java8/JFunction12.java index 09c90cb7e780..12fb73faafba 100644 --- a/src/library/scala/runtime/java8/JFunction12.java +++ b/src/library/scala/runtime/java8/JFunction12.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction12 extends scala.Function12 { +public interface JFunction12 extends scala.Function12, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction13.java b/src/library/scala/runtime/java8/JFunction13.java index e8cc2b53e61b..c85c63448ab5 100644 --- a/src/library/scala/runtime/java8/JFunction13.java +++ b/src/library/scala/runtime/java8/JFunction13.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction13 extends scala.Function13 { +public interface JFunction13 extends scala.Function13, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction14.java b/src/library/scala/runtime/java8/JFunction14.java index 327e442b4ca0..9a578833aaad 100644 --- a/src/library/scala/runtime/java8/JFunction14.java +++ b/src/library/scala/runtime/java8/JFunction14.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction14 extends scala.Function14 { +public interface JFunction14 extends scala.Function14, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction15.java b/src/library/scala/runtime/java8/JFunction15.java index bd2e3c00da5d..e993643953ce 100644 --- a/src/library/scala/runtime/java8/JFunction15.java +++ b/src/library/scala/runtime/java8/JFunction15.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction15 extends scala.Function15 { +public interface JFunction15 extends scala.Function15, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction16.java b/src/library/scala/runtime/java8/JFunction16.java index fb961e60ec94..a252cb5303bd 100644 --- a/src/library/scala/runtime/java8/JFunction16.java +++ b/src/library/scala/runtime/java8/JFunction16.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction16 extends scala.Function16 { +public interface JFunction16 extends scala.Function16, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction17.java b/src/library/scala/runtime/java8/JFunction17.java index 90a0b1d44160..045aa7196fa9 100644 --- a/src/library/scala/runtime/java8/JFunction17.java +++ b/src/library/scala/runtime/java8/JFunction17.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction17 extends scala.Function17 { +public interface JFunction17 extends scala.Function17, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction18.java b/src/library/scala/runtime/java8/JFunction18.java index cac24309e206..ba2bf31206c1 100644 --- a/src/library/scala/runtime/java8/JFunction18.java +++ b/src/library/scala/runtime/java8/JFunction18.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction18 extends scala.Function18 { +public interface JFunction18 extends scala.Function18, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction19.java b/src/library/scala/runtime/java8/JFunction19.java index bbfceac8c317..dde48242930f 100644 --- a/src/library/scala/runtime/java8/JFunction19.java +++ b/src/library/scala/runtime/java8/JFunction19.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction19 extends scala.Function19 { +public interface JFunction19 extends scala.Function19, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction2.java b/src/library/scala/runtime/java8/JFunction2.java index 1e0293a7e8e4..548ff60cf618 100644 --- a/src/library/scala/runtime/java8/JFunction2.java +++ b/src/library/scala/runtime/java8/JFunction2.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2 extends scala.Function2 { +public interface JFunction2 extends scala.Function2, java.io.Serializable { default void apply$mcVII$sp(int v1, int v2) { apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2)); } diff --git a/src/library/scala/runtime/java8/JFunction20.java b/src/library/scala/runtime/java8/JFunction20.java index 543e657ea7a1..5505743c20e4 100644 --- a/src/library/scala/runtime/java8/JFunction20.java +++ b/src/library/scala/runtime/java8/JFunction20.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction20 extends scala.Function20 { +public interface JFunction20 extends scala.Function20, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction21.java b/src/library/scala/runtime/java8/JFunction21.java index ecb0d8d28794..80e96d37150f 100644 --- a/src/library/scala/runtime/java8/JFunction21.java +++ b/src/library/scala/runtime/java8/JFunction21.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction21 extends scala.Function21 { +public interface JFunction21 extends scala.Function21, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction22.java b/src/library/scala/runtime/java8/JFunction22.java index 4945cd9db3c3..45e689458b5a 100644 --- a/src/library/scala/runtime/java8/JFunction22.java +++ b/src/library/scala/runtime/java8/JFunction22.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction22 extends scala.Function22 { +public interface JFunction22 extends scala.Function22, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction3.java b/src/library/scala/runtime/java8/JFunction3.java index ff657dbfd31c..6d81bb3a189f 100644 --- a/src/library/scala/runtime/java8/JFunction3.java +++ b/src/library/scala/runtime/java8/JFunction3.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction3 extends scala.Function3 { +public interface JFunction3 extends scala.Function3, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction4.java b/src/library/scala/runtime/java8/JFunction4.java index 246c0d5c7214..6c5cd3b61d3c 100644 --- a/src/library/scala/runtime/java8/JFunction4.java +++ b/src/library/scala/runtime/java8/JFunction4.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction4 extends scala.Function4 { +public interface JFunction4 extends scala.Function4, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction5.java b/src/library/scala/runtime/java8/JFunction5.java index 1d85c2989e99..eca1a406a63f 100644 --- a/src/library/scala/runtime/java8/JFunction5.java +++ b/src/library/scala/runtime/java8/JFunction5.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction5 extends scala.Function5 { +public interface JFunction5 extends scala.Function5, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction6.java b/src/library/scala/runtime/java8/JFunction6.java index 0699c90830a1..1c9daed5aa05 100644 --- a/src/library/scala/runtime/java8/JFunction6.java +++ b/src/library/scala/runtime/java8/JFunction6.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction6 extends scala.Function6 { +public interface JFunction6 extends scala.Function6, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction7.java b/src/library/scala/runtime/java8/JFunction7.java index 57bc16a0662e..c1aa130ba1d1 100644 --- a/src/library/scala/runtime/java8/JFunction7.java +++ b/src/library/scala/runtime/java8/JFunction7.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction7 extends scala.Function7 { +public interface JFunction7 extends scala.Function7, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction8.java b/src/library/scala/runtime/java8/JFunction8.java index af22b888a3f9..425e694df8cc 100644 --- a/src/library/scala/runtime/java8/JFunction8.java +++ b/src/library/scala/runtime/java8/JFunction8.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction8 extends scala.Function8 { +public interface JFunction8 extends scala.Function8, java.io.Serializable { } diff --git a/src/library/scala/runtime/java8/JFunction9.java b/src/library/scala/runtime/java8/JFunction9.java index d3c6b2676971..21c3c8c6e30c 100644 --- a/src/library/scala/runtime/java8/JFunction9.java +++ b/src/library/scala/runtime/java8/JFunction9.java @@ -6,5 +6,5 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction9 extends scala.Function9 { +public interface JFunction9 extends scala.Function9, java.io.Serializable { } From 86ae2f95ce33ef22f9c9ad40d6a966fbef7d352f Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 7 Apr 2016 12:32:40 -0700 Subject: [PATCH 0009/2793] SI-9740 Repl import fix -Yrepl-class-based Under `-Yrepl-class-based`, templating must follow the same scoping as under traditional object-based. The new test shows a typical case where two values of the same simple name must be imported in different scopes. --- .../scala/tools/nsc/interpreter/Imports.scala | 35 ++++++----- .../scala/tools/nsc/interpreter/package.scala | 9 +-- test/files/jvm/interpreter.check | 2 +- test/files/run/repl-classbased.check | 23 +++++++ test/files/run/repl-classbased.scala | 22 +++++++ test/files/run/repl-javap-app.check | 60 ------------------- test/files/run/repl-javap-app.scala | 15 ++--- test/files/run/t7319.check | 6 +- 8 files changed, 80 insertions(+), 92 deletions(-) create mode 100644 test/files/run/repl-classbased.check create mode 100644 test/files/run/repl-classbased.scala diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala index f04e2e808ce7..71a5e9f00a3f 100644 --- a/src/repl/scala/tools/nsc/interpreter/Imports.scala +++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala @@ -127,7 +127,11 @@ trait Imports { case rh :: rest if !keepHandler(rh.handler) => select(rest, wanted) case rh :: rest => import rh.handler._ - val newWanted = wanted ++ referencedNames -- definedNames -- importedNames + val augment = rh match { + case ReqAndHandler(_, _: ImportHandler) => referencedNames // for "import a.b", add "a" to names to be resolved + case _ => Nil + } + val newWanted = wanted ++ augment -- definedNames -- importedNames rh :: select(rest, newWanted) } } @@ -161,6 +165,8 @@ trait Imports { val tempValLines = mutable.Set[Int]() for (ReqAndHandler(req, handler) <- reqsToUse) { val objName = req.lineRep.readPathInstance + if (isReplTrace) + code.append(ss"// $objName definedNames ${handler.definedNames}, curImps $currentImps\n") handler match { case h: ImportHandler if checkHeader(h) => header.clear() @@ -175,21 +181,20 @@ trait Imports { currentImps ++= x.importedNames case x if isClassBased => - for (imv <- x.definedNames) { - if (!currentImps.contains(imv)) { - x match { - case _: ClassHandler => - code.append("import " + objName + req.accessPath + ".`" + imv + "`\n") - case _ => - val valName = req.lineRep.packageName + req.lineRep.readName - if (!tempValLines.contains(req.lineRep.lineId)) { - code.append(s"val $valName: ${objName}.type = $objName\n") - tempValLines += req.lineRep.lineId - } - code.append(s"import $valName${req.accessPath}.`$imv`;\n") - } - currentImps += imv + for (sym <- x.definedSymbols) { + maybeWrap(sym.name) + x match { + case _: ClassHandler => + code.append(s"import ${objName}${req.accessPath}.`${sym.name}`\n") + case _ => + val valName = s"${req.lineRep.packageName}${req.lineRep.readName}" + if (!tempValLines.contains(req.lineRep.lineId)) { + code.append(s"val $valName: ${objName}.type = $objName\n") + tempValLines += req.lineRep.lineId + } + code.append(s"import ${valName}${req.accessPath}.`${sym.name}`\n") } + currentImps += sym.name } // For other requests, import each defined name. // import them explicitly instead of with _, so that diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala index 56f1e6537673..7934d819b486 100644 --- a/src/repl/scala/tools/nsc/interpreter/package.scala +++ b/src/repl/scala/tools/nsc/interpreter/package.scala @@ -198,13 +198,14 @@ package object interpreter extends ReplConfig with ReplStrings { } } - /* debug assist + /* An s-interpolator that uses `stringOf(arg)` instead of `String.valueOf(arg)`. */ private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal { import StringContext._, runtime.ScalaRunTime.stringOf def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf) - } debug assist */ + } + /* Try (body) lastly (more) */ private[nsc] implicit class `try lastly`[A](val t: Try[A]) extends AnyVal { - private def effect[X](last: =>Unit)(a: X): Try[A] = { last; t } - def lastly(last: =>Unit): Try[A] = t transform (effect(last) _, effect(last) _) + private def effect[X](last: => Unit)(a: X): Try[A] = { last; t } + def lastly(last: => Unit): Try[A] = t transform (effect(last) _, effect(last) _) } } diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index ce3c8062d721..9a2162a906d9 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -353,7 +353,7 @@ defined class Term scala> def f(e: Exp) = e match { // non-exhaustive warning here case _:Fact => 3 } -:22: warning: match may not be exhaustive. +:18: warning: match may not be exhaustive. It would fail on the following inputs: Exp(), Term() def f(e: Exp) = e match { // non-exhaustive warning here ^ diff --git a/test/files/run/repl-classbased.check b/test/files/run/repl-classbased.check new file mode 100644 index 000000000000..e11fc170e5b8 --- /dev/null +++ b/test/files/run/repl-classbased.check @@ -0,0 +1,23 @@ + +scala> case class K(s: String) +defined class K + +scala> class C { implicit val k: K = K("OK?"); override def toString = s"C($k)" } +defined class C + +scala> val c = new C +c: C = C(K(OK?)) + +scala> import c.k +import c.k + +scala> implicitly[K] +res0: K = K(OK?) + +scala> val k = 42 +k: Int = 42 + +scala> k // was K(OK?) +res1: Int = 42 + +scala> :quit diff --git a/test/files/run/repl-classbased.scala b/test/files/run/repl-classbased.scala new file mode 100644 index 000000000000..595e123159e8 --- /dev/null +++ b/test/files/run/repl-classbased.scala @@ -0,0 +1,22 @@ + +import scala.tools.partest.ReplTest +import scala.tools.nsc.Settings + +//SI-9740 +object Test extends ReplTest { + override def transformSettings(s: Settings): Settings = { + s.Yreplclassbased.value = true + s + } + + def code = + """ +case class K(s: String) +class C { implicit val k: K = K("OK?"); override def toString = s"C($k)" } +val c = new C +import c.k +implicitly[K] +val k = 42 +k // was K(OK?) + """ +} diff --git a/test/files/run/repl-javap-app.check b/test/files/run/repl-javap-app.check index bace9534dae7..e69de29bb2d1 100644 --- a/test/files/run/repl-javap-app.check +++ b/test/files/run/repl-javap-app.check @@ -1,60 +0,0 @@ -#partest java6 -Welcome to Scala -Type in expressions for evaluation. Or try :help. - -scala> :javap -app MyApp$ -public final void delayedEndpoint$MyApp$1(); - Code: - Stack=2, Locals=1, Args_size=1 - 0: getstatic #XX; //Field scala/Console$.MODULE$:Lscala/Console$; - 3: ldc #XX; //String Hello, delayed world. - 5: invokevirtual #XX; //Method scala/Console$.println:(Ljava/lang/Object;)V - 8: return - LocalVariableTable: - Start Length Slot Name Signature - 0 9 0 this LMyApp$; - -scala> :quit -#partest java7 -Welcome to Scala -Type in expressions for evaluation. Or try :help. - -scala> :javap -app MyApp$ - public final void delayedEndpoint$MyApp$1(); - flags: ACC_PUBLIC, ACC_FINAL - Code: - stack=2, locals=1, args_size=1 - 0: getstatic #XX // Field scala/Console$.MODULE$:Lscala/Console$; - 3: ldc #XX // String Hello, delayed world. - 5: invokevirtual #XX // Method scala/Console$.println:(Ljava/lang/Object;)V - 8: return - LocalVariableTable: - Start Length Slot Name Signature - 0 9 0 this LMyApp$; - LineNumberTable: - line 5: 0 -} - -scala> :quit -#partest java8 -Welcome to Scala -Type in expressions for evaluation. Or try :help. - -scala> :javap -app MyApp$ - public final void delayedEndpoint$MyApp$1(); - descriptor: ()V - flags: ACC_PUBLIC, ACC_FINAL - Code: - stack=2, locals=1, args_size=1 - 0: getstatic #XX // Field scala/Console$.MODULE$:Lscala/Console$; - 3: ldc #XX // String Hello, delayed world. - 5: invokevirtual #XX // Method scala/Console$.println:(Ljava/lang/Object;)V - 8: return - LocalVariableTable: - Start Length Slot Name Signature - 0 9 0 this LMyApp$; - LineNumberTable: - line 5: 0 -} - -scala> :quit diff --git a/test/files/run/repl-javap-app.scala b/test/files/run/repl-javap-app.scala index ad6076c2d590..f7e3baa2a100 100644 --- a/test/files/run/repl-javap-app.scala +++ b/test/files/run/repl-javap-app.scala @@ -8,14 +8,11 @@ object MyApp extends App { object Test extends ReplTest { def code = ":javap -app MyApp$" - override def welcoming = true - - // The constant pool indices are not the same for GenASM / GenBCode, so - // replacing the exact numbers by XX. - lazy val hasConstantPoolRef = """(.*)(#\d\d)(.*)""".r - - override def normalize(s: String) = s match { - case hasConstantPoolRef(start, ref, end) => start + "#XX" + end - case _ => super.normalize(s) + override def show() = { + val coded = "Code:" + val strung = "String Hello, delayed world." + val lines = eval().toList + assert (lines.count(s => s.endsWith(coded)) == 1) + assert (lines.count(s => s.endsWith(strung)) == 1) } } diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check index 4d8429e8f204..31923e71193c 100644 --- a/test/files/run/t7319.check +++ b/test/files/run/t7319.check @@ -15,21 +15,21 @@ warning: there was one feature warning; re-run with -feature for details convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int scala> convert(Some[Int](0)) -:16: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int]) +:15: error: no type parameters for method convert: (builder: F[_ <: F[_]])Int exist so that it can be applied to arguments (Some[Int]) --- because --- argument expression's type is not compatible with formal parameter type; found : Some[Int] required: ?F[_$1] forSome { type _$1 <: ?F[_$2] forSome { type _$2 } } convert(Some[Int](0)) ^ -:16: error: type mismatch; +:15: error: type mismatch; found : Some[Int] required: F[_ <: F[_]] convert(Some[Int](0)) ^ scala> Range(1,2).toArray: Seq[_] -:15: error: polymorphic expression cannot be instantiated to expected type; +:14: error: polymorphic expression cannot be instantiated to expected type; found : [B >: Int]Array[B] required: Seq[_] Range(1,2).toArray: Seq[_] From 56c5d92236daf8a8094429072ec70cf830fd10ac Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Mon, 21 Mar 2016 13:21:19 -0700 Subject: [PATCH 0010/2793] Add JMH to the benchmark framework. Add an example benchmark for OpenHashMap. --- test/benchmarks/.gitignore | 6 ++ test/benchmarks/README.md | 65 ++++++++++++++++ test/benchmarks/build.sbt | 8 ++ test/benchmarks/project/plugins.sbt | 1 + .../mutable/OpenHashMapBenchmark.scala | 76 +++++++++++++++++++ 5 files changed, 156 insertions(+) create mode 100644 test/benchmarks/.gitignore create mode 100644 test/benchmarks/README.md create mode 100644 test/benchmarks/build.sbt create mode 100644 test/benchmarks/project/plugins.sbt create mode 100644 test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala diff --git a/test/benchmarks/.gitignore b/test/benchmarks/.gitignore new file mode 100644 index 000000000000..6e3ddad6d22c --- /dev/null +++ b/test/benchmarks/.gitignore @@ -0,0 +1,6 @@ +/project/project/ +/project/target/ +/target/ + +# what appears to be a Scala IDE-generated file +.cache-main diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md new file mode 100644 index 000000000000..99b358dd99ef --- /dev/null +++ b/test/benchmarks/README.md @@ -0,0 +1,65 @@ +# Scala library benchmarks + +This directory is a standalone SBT project +that makes use of the [SBT plugin for JMH](https://github.com/ktoso/sbt-jmh), +with the usual directory structure: +source code for the benchmarks, which utilize [JMH](http://openjdk.java.net/projects/code-tools/jmh/), +should be placed in `src/main/scala`. + +The benchmarks require first building Scala into `../../build/pack`. +They can then be (built and) run from `sbt` with "`jmh:run`". +"`jmh:run -h`" displays the usual JMH options available. + +## some useful HotSpot options +Adding these to the `jmh:run` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. +They require prefixing with `-jvmArgs`. +See [the Java documentation](http://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. + +### viewing JIT compilation events +Adding `-XX:+PrintCompilation` shows when Java methods are being compiled or deoptimized. +At the most basic level, +these messages will tell you whether the code that you're measuring is still being tuned, +so that you know whether you're running enough warm-up iterations. +See [Kris Mok's notes](https://gist.github.com/rednaxelafx/1165804#file-notes-md) to interpret the output in detail. + +### consider GC events +If you're not explicitly performing `System.gc()` calls outside of your benchmarking code, +you should add the JVM option `-verbose:gc` to understand the effect that GCs may be having on your tests. + +### "diagnostic" options +These require the `-XX:+UnlockDiagnosticVMOptions` JVM option. + +#### viewing inlining events +Add `-XX:+PrintInlining`. + +#### viewing the disassembled code +To show the assembly code corresponding to the code generated by the JIT compiler for specific methods, +add `-XX:CompileCommand=print,scala.collection.mutable.OpenHashMap::*`, +for example, to show all of the methods in the `scala.collection.mutable.OpenHashMap` class. +If you're running OpenJDK, you may need to install the disassembler library (`hsdis-amd64.so` for the `amd64` architecture). +In Debian, this is available in the `libhsdis0-fcml` package. + +To show it for _all_ methods, add `-XX:+PrintAssembly`. +(This is usually excessive.) + +## useful reading +* [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) +* "[Measuring performance](http://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections +* Brian Goetz's "Java theory and practice" articles: + * "[Dynamic compilation and performance measurement](http://www.ibm.com/developerworks/java/library/j-jtp12214/)" + * "[Anatomy of a flawed benchmark](http://www.ibm.com/developerworks/java/library/j-jtp02225/)" + +## legacy frameworks + +An older version of the benchmarking framework is still present in this directory, in the following locations: + +
+
bench
+
A script to run the old benchmarks.
+
source.list
+
A temporary file used by bench.
+
src/scala/
+
The older benchmarks, including the previous framework.
+
+ +Another, older set of benchmarks is present in `../benchmarking/`. diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt new file mode 100644 index 000000000000..92a5fce177d2 --- /dev/null +++ b/test/benchmarks/build.sbt @@ -0,0 +1,8 @@ +scalaHome := Some(file("../../build/pack")) + +lazy val root = (project in file(".")). + enablePlugins(JmhPlugin). + settings( + name := "test-benchmarks", + version := "0.0.1" + ) diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt new file mode 100644 index 000000000000..f5319fb18747 --- /dev/null +++ b/test/benchmarks/project/plugins.sbt @@ -0,0 +1 @@ +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.6") diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala new file mode 100644 index 000000000000..eeea8f6508df --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -0,0 +1,76 @@ +package scala.collection.mutable; + +import java.util.concurrent.TimeUnit +import org.openjdk.jmh.annotations._ + +private object OpenHashMapBenchmark { + /** State container for the `put()` bulk calling tests. + * + * Provides a thread-scoped map, so that allocation for the hash table will be done + * in the first warm-up iteration, not during measurement. + * + * Performs a GC after every invocation, so that only the GCs caused by the invocation + * contribute to the measurement. + */ + @State(Scope.Thread) + class BulkPutState { + val map = new OpenHashMap[Int,Int].empty + + @TearDown(Level.Invocation) + def teardown { map.clear(); System.gc() } + } +} + +/** Benchmark for the library's [[OpenHashMap]]. + * + * The `put()` calls are tested by looping to the size desired for the map; + * instead of using the JMH harness, which iterates for a fixed length of time. + */ +@BenchmarkMode(Array(Mode.AverageTime)) +@Threads(1) +@Fork(1) +@Warmup(iterations = 20) +@Measurement(iterations = 20) +@OutputTimeUnit(TimeUnit.MICROSECONDS) +@State(Scope.Benchmark) +class OpenHashMapBenchmark { + import OpenHashMapBenchmark._ + + @Param(Array("100", "250", "1000", "2500", "10000", "25000", "100000", "250000", "1000000", "2500000", + "5000000", "7500000", "10000000", "25000000")) + var size: Int = _ + + /** Put elements into the given map. */ + private[this] def put_Int(map: OpenHashMap[Int,Int], from: Int, to: Int) { + var i = from + while (i <= to) { // using a `for` expression instead adds significant overhead + map.put(i, i) + i += 1 + } + } + + /** Test putting elements to a map of `Int` to `Int`. */ + @Benchmark + def put_Int(state: BulkPutState) { put_Int(state.map, 1, size) } + + /** Test putting and removing elements to a growing map of `Int` to `Int`. */ + @Benchmark + def put_remove_Int(state: BulkPutState) { + val blocks = 50 // should be a factor of `size` + val totalPuts = 2 * size // add twice as many, because we remove half of them + val blockSize: Int = totalPuts / blocks + var base = 0 + while (base < totalPuts) { + put_Int(state.map, base + 1, base + blockSize) + + // remove every other entry + var i = base + 1 + while (i <= base + blockSize) { + state.map.remove(i) + i += 2 + } + + base += blockSize + } + } +} From 303130b81599528db35d9612fff42cf7e570e15a Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Tue, 22 Mar 2016 23:12:31 -0700 Subject: [PATCH 0011/2793] Add a reference to Doug Lea's benchmarks. --- test/benchmarks/README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index 99b358dd99ef..aea72e90edf9 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -44,10 +44,11 @@ To show it for _all_ methods, add `-XX:+PrintAssembly`. ## useful reading * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) -* "[Measuring performance](http://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections * Brian Goetz's "Java theory and practice" articles: * "[Dynamic compilation and performance measurement](http://www.ibm.com/developerworks/java/library/j-jtp12214/)" * "[Anatomy of a flawed benchmark](http://www.ibm.com/developerworks/java/library/j-jtp02225/)" +* [Doug Lea's JSR 166 benchmarks](http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/test/loops/) +* "[Measuring performance](http://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections ## legacy frameworks From e1b58ccafc598c06b8011e3e0f411f6e91b99353 Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Wed, 23 Mar 2016 12:07:00 -0700 Subject: [PATCH 0012/2793] Add get() tests to OpenHashMap, reduce timing artifacts. In order to get a better exploration of the variance of tests in a limited time, I've reduced the number of measurement iterations and increased the number of forks. By sight, the measurement iterations seemed pretty consistent within a trial, whereas they would vary widely on occasional forks. I extended testing down to 50-entry maps, to explore the rise in service times that I was seeing at small scale. This is probably a timing artifact, from too-short invocations, since I'm using @Level.Invocation in the put() tests. To fix that, I enlarged the unit of testing, by creating multiple, sometimes thousands, of maps for the invocation to fill. This has also changed the test from filling a previously-filled map, to filling a new, but sufficiently sized map. The put()/remove() test now performs much worse (on a more realistic scenario). This also adds a couple tests for calling get() against a map that's been filled only with put()s, or with a mix of put() and remove(). --- .../mutable/OpenHashMapBenchmark.scala | 179 ++++++++++++++---- 1 file changed, 144 insertions(+), 35 deletions(-) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index eeea8f6508df..73ab5e40d0ce 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -2,46 +2,104 @@ package scala.collection.mutable; import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole +import org.openjdk.jmh.infra.BenchmarkParams +/** Utilities for the [[OpenHashMapBenchmark]]. + * + * The method calls are tested by looping to the size desired for the map; + * instead of using the JMH harness, which iterates for a fixed length of time. + */ private object OpenHashMapBenchmark { /** State container for the `put()` bulk calling tests. * - * Provides a thread-scoped map, so that allocation for the hash table will be done - * in the first warm-up iteration, not during measurement. + * Provides an array of adequately-sized, empty maps to each invocation, + * so that hash table allocation won't be done during measurement. * - * Performs a GC after every invocation, so that only the GCs caused by the invocation - * contribute to the measurement. + * Empties the map and performs a GC after every invocation, + * so that only the GCs caused by the invocation contribute to the measurement. */ @State(Scope.Thread) + @AuxCounters class BulkPutState { + /** A lower-bound estimate of the number of nanoseconds per `put()` call */ + private[this] val nanosPerPut: Double = 5 + + /** Minimum number of nanoseconds per invocation, so as to avoid timing artifacts. */ + private[this] val minNanosPerInvocation = 1000000 // one millisecond + + /** The minimum number of `put()` calls to make per invocation, so as to avoid timing artifacts. */ + private[this] val minPutsPerInvocation = minNanosPerInvocation / nanosPerPut + + /** Size of the maps created in this trial. */ + private[this] var size: Int = _ + + /** Number of maps created in each invocation; the size of `maps`. */ + private[this] var n: Int = _ + + /** Number of operations performed in the current invocation. */ + var operations: Int = _ + + var maps: Array[OpenHashMap[Int,Int]] = null + + @Setup + def threadSetup(params: BenchmarkParams) { + size = params.getParam("size").toInt + n = math.ceil(minPutsPerInvocation / size).toInt + maps = new Array(n) + } + + @Setup(Level.Iteration) + def iterationSetup { + operations = 0 + } + + @Setup(Level.Invocation) + def setup { + for (i <- 0 until n) maps(i) = new OpenHashMap[Int,Int](size) + operations += size * n + System.gc() // clean up after last invocation + } + } + + /** State container for the `get()` bulk calling tests. + * + * Provides a thread-scoped map of the expected size. + * Performs a GC after loading the map. + */ + @State(Scope.Thread) + class BulkGetState { val map = new OpenHashMap[Int,Int].empty - - @TearDown(Level.Invocation) - def teardown { map.clear(); System.gc() } + + /** Load the map with keys from `1` to `size`. */ + @Setup + def setup(params: BenchmarkParams) { + val size = params.getParam("size").toInt + put_Int(map, 1, size) + System.gc() + } } -} -/** Benchmark for the library's [[OpenHashMap]]. - * - * The `put()` calls are tested by looping to the size desired for the map; - * instead of using the JMH harness, which iterates for a fixed length of time. - */ -@BenchmarkMode(Array(Mode.AverageTime)) -@Threads(1) -@Fork(1) -@Warmup(iterations = 20) -@Measurement(iterations = 20) -@OutputTimeUnit(TimeUnit.MICROSECONDS) -@State(Scope.Benchmark) -class OpenHashMapBenchmark { - import OpenHashMapBenchmark._ + /** State container for the `get()` bulk calling tests with deleted entries. + * + * Provides a thread-scoped map of the expected size, from which entries have been removed. + * Performs a GC after loading the map. + */ + @State(Scope.Thread) + class BulkRemovedGetState { + val map = new OpenHashMap[Int,Int].empty - @Param(Array("100", "250", "1000", "2500", "10000", "25000", "100000", "250000", "1000000", "2500000", - "5000000", "7500000", "10000000", "25000000")) - var size: Int = _ + /** Load the map with keys from `1` to `size`, removing half of them. */ + @Setup + def setup(params: BenchmarkParams) { + val size = params.getParam("size").toInt + put_remove_Int(map, size) + System.gc() + } + } /** Put elements into the given map. */ - private[this] def put_Int(map: OpenHashMap[Int,Int], from: Int, to: Int) { + private def put_Int(map: OpenHashMap[Int,Int], from: Int, to: Int) { var i = from while (i <= to) { // using a `for` expression instead adds significant overhead map.put(i, i) @@ -49,28 +107,79 @@ class OpenHashMapBenchmark { } } - /** Test putting elements to a map of `Int` to `Int`. */ - @Benchmark - def put_Int(state: BulkPutState) { put_Int(state.map, 1, size) } - - /** Test putting and removing elements to a growing map of `Int` to `Int`. */ - @Benchmark - def put_remove_Int(state: BulkPutState) { + /** Put elements into the given map, removing half of them as they're added. + * + * @param size number of entries to leave in the map on return + */ + def put_remove_Int(map: OpenHashMap[Int,Int], size: Int) { val blocks = 50 // should be a factor of `size` val totalPuts = 2 * size // add twice as many, because we remove half of them val blockSize: Int = totalPuts / blocks var base = 0 while (base < totalPuts) { - put_Int(state.map, base + 1, base + blockSize) + put_Int(map, base + 1, base + blockSize) // remove every other entry var i = base + 1 while (i <= base + blockSize) { - state.map.remove(i) + map.remove(i) i += 2 } base += blockSize } } + + /** Get elements from the given map. */ + def get_Int(map: OpenHashMap[Int,Int], size: Int, bh: Blackhole) { + var i = 1 + while (i <= size) { + bh.consume(map.get(i).getOrElse(0)) + i += 1 + } + } +} + +/** Benchmark for the library's [[OpenHashMap]]. */ +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(10) +@Threads(1) +@Warmup(iterations = 20) +@Measurement(iterations = 6) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class OpenHashMapBenchmark { + import OpenHashMapBenchmark._ + + @Param(Array("50", "100", "250", "1000", "2500", "10000", "25000", "100000", "250000", "1000000", "2500000", + "5000000", "7500000", "10000000", "25000000")) + var size: Int = _ + + /** Test putting elements to a map of `Int` to `Int`. */ + @Benchmark + def put_Int(state: BulkPutState) { + var i = 0 + while (i < state.maps.length) { + OpenHashMapBenchmark.put_Int(state.maps(i), 1, size) + i += 1 + } + } + + /** Test putting and removing elements to a growing map of `Int` to `Int`. */ + @Benchmark + def put_remove_Int(state: BulkPutState) { + var i = 0 + while (i < state.maps.length) { + OpenHashMapBenchmark.put_remove_Int(state.maps(i), size) + i += 1 + } + } + + /** Test getting elements from a map of `Int` to `Int`. */ + @Benchmark + def put_get_Int(state: BulkGetState, bh: Blackhole) = OpenHashMapBenchmark.get_Int(state.map, size, bh) + + /** Test getting elements from a map of `Int` to `Int` from which elements have been removed. */ + @Benchmark + def put_remove_get_Int(state: BulkRemovedGetState, bh: Blackhole) = OpenHashMapBenchmark.get_Int(state.map, size, bh) } From b88933eb84f1f1f5215b0feb43f4ecfc12c8847d Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Fri, 25 Mar 2016 21:44:50 -0700 Subject: [PATCH 0013/2793] Benchmark the OpenHashMap memory usage. Also add sbteclipse to the benchmark project. --- test/benchmarks/.gitignore | 8 +++++ test/benchmarks/build.sbt | 5 ++- test/benchmarks/project/plugins.sbt | 1 + .../mutable/OpenHashMapBenchmark.scala | 32 +++++++++++++------ 4 files changed, 35 insertions(+), 11 deletions(-) diff --git a/test/benchmarks/.gitignore b/test/benchmarks/.gitignore index 6e3ddad6d22c..ce4d893417db 100644 --- a/test/benchmarks/.gitignore +++ b/test/benchmarks/.gitignore @@ -4,3 +4,11 @@ # what appears to be a Scala IDE-generated file .cache-main + +# standard Eclipse output directory +/bin/ + +# sbteclipse-generated Eclipse files +/.classpath +/.project +/.settings/ diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt index 92a5fce177d2..2959e4986ad2 100644 --- a/test/benchmarks/build.sbt +++ b/test/benchmarks/build.sbt @@ -1,8 +1,11 @@ scalaHome := Some(file("../../build/pack")) +scalaVersion := "2.11.8" +scalacOptions += "-feature" lazy val root = (project in file(".")). enablePlugins(JmhPlugin). settings( name := "test-benchmarks", - version := "0.0.1" + version := "0.0.1", + libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.4" ) diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt index f5319fb18747..e11aa29f3bfc 100644 --- a/test/benchmarks/project/plugins.sbt +++ b/test/benchmarks/project/plugins.sbt @@ -1 +1,2 @@ +addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.6") diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index 73ab5e40d0ce..13be9e6206f2 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -4,6 +4,9 @@ import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ import org.openjdk.jmh.infra.Blackhole import org.openjdk.jmh.infra.BenchmarkParams +import org.openjdk.jol.info.GraphLayout +import org.openjdk.jol.info.GraphWalker +import org.openjdk.jol.info.GraphVisitor /** Utilities for the [[OpenHashMapBenchmark]]. * @@ -15,9 +18,11 @@ private object OpenHashMapBenchmark { * * Provides an array of adequately-sized, empty maps to each invocation, * so that hash table allocation won't be done during measurement. - * - * Empties the map and performs a GC after every invocation, + * Provides enough maps to make each invocation long enough to avoid timing artifacts. + * Performs a GC after re-creating the empty maps before every invocation, * so that only the GCs caused by the invocation contribute to the measurement. + * + * Records the memory used by all the maps in the last invocation of each iteration. */ @State(Scope.Thread) @AuxCounters @@ -28,24 +33,25 @@ private object OpenHashMapBenchmark { /** Minimum number of nanoseconds per invocation, so as to avoid timing artifacts. */ private[this] val minNanosPerInvocation = 1000000 // one millisecond - /** The minimum number of `put()` calls to make per invocation, so as to avoid timing artifacts. */ - private[this] val minPutsPerInvocation = minNanosPerInvocation / nanosPerPut - /** Size of the maps created in this trial. */ private[this] var size: Int = _ - /** Number of maps created in each invocation; the size of `maps`. */ - private[this] var n: Int = _ + /** Total number of entries in all of the `maps` combined. */ + var mapEntries: Int = _ /** Number of operations performed in the current invocation. */ var operations: Int = _ + /** Bytes of memory used in the object graphs of all the maps. */ + var memory: Long = _ + var maps: Array[OpenHashMap[Int,Int]] = null @Setup def threadSetup(params: BenchmarkParams) { size = params.getParam("size").toInt - n = math.ceil(minPutsPerInvocation / size).toInt + val n = math.ceil(minNanosPerInvocation / (nanosPerPut * size)).toInt + mapEntries = size * n maps = new Array(n) } @@ -56,10 +62,16 @@ private object OpenHashMapBenchmark { @Setup(Level.Invocation) def setup { - for (i <- 0 until n) maps(i) = new OpenHashMap[Int,Int](size) - operations += size * n + for (i <- 0 until maps.length) maps(i) = new OpenHashMap[Int,Int](size) + operations += mapEntries System.gc() // clean up after last invocation } + + @TearDown(Level.Iteration) + def iterationTeardown { + // limit to smaller cases to avoid OOM + memory = if (mapEntries <= 1000000) GraphLayout.parseInstance(maps(0), maps.tail).totalSize else 0 + } } /** State container for the `get()` bulk calling tests. From 2e8fb12f6d92e6021131461285b9c28909584d04 Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Sun, 27 Mar 2016 11:59:13 -0700 Subject: [PATCH 0014/2793] Add a JMH runner class to the library benchmark framework. --- test/benchmarks/README.md | 49 ++++++-- .../src/main/scala/benchmark/JmhRunner.scala | 16 +++ .../mutable/OpenHashMapRunner.scala | 111 ++++++++++++++++++ 3 files changed, 167 insertions(+), 9 deletions(-) create mode 100644 test/benchmarks/src/main/scala/benchmark/JmhRunner.scala create mode 100644 test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index aea72e90edf9..07e72f09a162 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -1,17 +1,48 @@ # Scala library benchmarks -This directory is a standalone SBT project -that makes use of the [SBT plugin for JMH](https://github.com/ktoso/sbt-jmh), -with the usual directory structure: -source code for the benchmarks, which utilize [JMH](http://openjdk.java.net/projects/code-tools/jmh/), -should be placed in `src/main/scala`. +This directory is a standalone SBT project, within the Scala project, +that makes use of the [SBT plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/). -The benchmarks require first building Scala into `../../build/pack`. -They can then be (built and) run from `sbt` with "`jmh:run`". -"`jmh:run -h`" displays the usual JMH options available. +## running a benchmark + +The benchmarks require first building Scala into `../../build/pack`, using Ant. + +You'll then need to know the fully-qualified name of the benchmark runner class. +The benchmarking classes are organized under `src/main/scala`, +in the same package hierarchy as the classes that they test. +Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, +the benchmark runner would likely be named `scala.collection.mutable.OpenHashMapRunner`. +Using this example, one would simply run + + jmh:runMain scala.collection.mutable.OpenHashMapRunner + +in SBT. +SBT should be run _from this directory_. + +The JMH results can be found under `target/jmh-results/`. +`target` gets deleted on an SBT `clean`, +so you should copy these files out of `target` if you wish to preserve them. + +## creating a benchmark and runner + +The benchmarking classes use the same package hierarchy as the classes that they test +in order to make it easy to expose, in package scope, members of the class under test, +should that be necessary for benchmarking. + +There are two types of classes in the source directory: +those suffixed "`Benchmark`" and those suffixed "`Runner`". +The former are benchmarks that can be run directly using `jmh:run`; +however, they are normally run from a corresponding class of the latter type, +which is run using `jmh:runMain` (as described above). +This …`Runner` class is useful for setting appropriate JMH command options, +and for processing the JMH results into files that can be read by other tools, such as Gnuplot. + +The `benchmark.JmhRunner` trait should be woven into any runner class, for the standard behavior that it provides. +This includes creating output files in a subdirectory of `target/jmh-results` +derived from the fully-qualified package name of the `Runner` class. ## some useful HotSpot options -Adding these to the `jmh:run` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. +Adding these to the `jmh:run` or `jmh:runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. They require prefixing with `-jvmArgs`. See [the Java documentation](http://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. diff --git a/test/benchmarks/src/main/scala/benchmark/JmhRunner.scala b/test/benchmarks/src/main/scala/benchmark/JmhRunner.scala new file mode 100644 index 000000000000..cc75be529da2 --- /dev/null +++ b/test/benchmarks/src/main/scala/benchmark/JmhRunner.scala @@ -0,0 +1,16 @@ +package benchmark + +import java.io.File + +/** Common code for JMH runner objects. */ +trait JmhRunner { + private[this] val parentDirectory = new File("target", "jmh-results") + + /** Return the output directory for this class, creating the directory if necessary. */ + protected def outputDirectory: File = { + val subdir = getClass.getPackage.getName.replace('.', File.separatorChar) + val dir = new File(parentDirectory, subdir) + if (!dir.isDirectory) dir.mkdirs() + dir + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala new file mode 100644 index 000000000000..c139c55933b4 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala @@ -0,0 +1,111 @@ +package scala.collection.mutable + +import java.io.BufferedWriter +import java.io.File +import java.io.FileOutputStream +import java.io.OutputStreamWriter +import java.io.PrintWriter +import scala.collection.JavaConversions +import scala.language.existentials +import org.openjdk.jmh.results.RunResult +import org.openjdk.jmh.runner.Runner +import org.openjdk.jmh.runner.options.CommandLineOptions +import org.openjdk.jmh.runner.options.Options +import benchmark.JmhRunner +import org.openjdk.jmh.runner.options.OptionsBuilder +import org.openjdk.jmh.runner.options.VerboseMode + +/** Replacement JMH application that runs the [[OpenHashMap]] benchmark. + * + * Outputs the results in a form consumable by a Gnuplot script. + */ +object OpenHashMapRunner extends JmhRunner { + /** File that will be created for the output data set. */ + private[this] val outputFile = new File(outputDirectory, "OpenHashMap.dat") + + /** Qualifier to add to the name of a memory usage data set. */ + private[this] val memoryDatasetQualifer = " memory" + + /** Name of the JMH parameter for the number of map entries per invocation. */ + private[this] val sizeParamName = "size" + + /** Name of the JMH auxiliary counter that collects operation counts. */ + private[this] val operationsAuxCounterName = "operations" + + /** Name of the JMH auxiliary counter that collects memory usage. */ + private[this] val memoryAuxCounterName = "memory" + + /** Name of the JMH auxiliary counter that collects the number of map entries. */ + private[this] val entriesAuxCounterName = "mapEntries" + + def main(args: Array[String]) { + import scala.collection.JavaConversions._ + import scala.language.existentials + + val opts = new CommandLineOptions(args: _*) + var builder = new OptionsBuilder().parent(opts) + .jvmArgsPrepend("-Xmx6000m") + if (!opts.verbosity.hasValue) builder = builder.verbosity(VerboseMode.SILENT) + + val results = new Runner(builder.build).run() + + // Sort the results + + /** Map from data set name to data set. */ + val datasetByName = Map.empty[String, Set[RunResult]] + + /** Ordering for the results within a data set. Orders by increasing number of map entries. */ + val ordering = Ordering.by[RunResult, Int](_.getParams.getParam(sizeParamName).toInt) + + def addToDataset(result: RunResult, key: String): Unit = + datasetByName.get(key) + .getOrElse({ val d = SortedSet.empty(ordering); datasetByName.put(key, d); d }) += result + + results.foreach { result: RunResult ⇒ + addToDataset(result, result.getPrimaryResult.getLabel) + + // Create another data set for trials that track memory usage + if (result.getSecondaryResults.containsKey(memoryAuxCounterName)) + addToDataset(result, result.getPrimaryResult.getLabel + memoryDatasetQualifer) + } + + //TODO Write out test parameters + // val jvm = params.getJvm + // val jvmArgs = params.getJvmArgs.mkString(" ") + + val f = new PrintWriter(outputFile, "UTF-8") + try { + datasetByName.foreach(_ match { case (label: String, dataset: Iterable[RunResult]) ⇒ { + f.format("# [%s]\n", label) + + val isMemoryUsageDataset = label.contains(memoryDatasetQualifer) + dataset.foreach { result ⇒ + val size = result.getParams.getParam(sizeParamName) + val secondaryResults = result.getSecondaryResults + if (isMemoryUsageDataset) { + val memoryResult = secondaryResults.get(memoryAuxCounterName) + val entriesResult = secondaryResults.get(entriesAuxCounterName) + f.format("%s %f %f %f %f\n", size, + Double.box(entriesResult.getScore), Double.box(entriesResult.getStatistics.getStandardDeviation), + Double.box(memoryResult.getScore), Double.box(memoryResult.getStatistics.getStandardDeviation)) + } + else { + if (secondaryResults.containsKey(operationsAuxCounterName)) { + val operationsResult = secondaryResults.get(operationsAuxCounterName) + f.format("%s %f %f\n", size, + Double.box(operationsResult.getScore), Double.box(operationsResult.getStatistics.getStandardDeviation)) + } else { + val primary = result.getPrimaryResult + f.format("%s %f %f\n", size, + Double.box(primary.getScore), Double.box(primary.getStatistics.getStandardDeviation)) + } + } + } + + f.println(); f.println() // data set separator + }}) + } finally { + f.close() + } + } +} From cd7be12a35fd2cf7d0448d59b6f43e4165f43db4 Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Mon, 28 Mar 2016 13:54:43 -0700 Subject: [PATCH 0015/2793] Improve the OpenHashMapBenchmark run times. For the warm-up invocations, suppress setup and teardown that is only needed for the measurement iterations. Reduce the number of forks. --- .../mutable/OpenHashMapBenchmark.scala | 21 ++++++++++++------- 1 file changed, 14 insertions(+), 7 deletions(-) diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index 13be9e6206f2..78e160a713c7 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -7,6 +7,8 @@ import org.openjdk.jmh.infra.BenchmarkParams import org.openjdk.jol.info.GraphLayout import org.openjdk.jol.info.GraphWalker import org.openjdk.jol.info.GraphVisitor +import org.openjdk.jmh.infra.IterationParams +import org.openjdk.jmh.runner.IterationType /** Utilities for the [[OpenHashMapBenchmark]]. * @@ -61,16 +63,21 @@ private object OpenHashMapBenchmark { } @Setup(Level.Invocation) - def setup { + def setup(params: IterationParams) { for (i <- 0 until maps.length) maps(i) = new OpenHashMap[Int,Int](size) - operations += mapEntries - System.gc() // clean up after last invocation + + if (params.getType == IterationType.MEASUREMENT) { + operations += mapEntries + System.gc() // clean up after last invocation + } } @TearDown(Level.Iteration) - def iterationTeardown { - // limit to smaller cases to avoid OOM - memory = if (mapEntries <= 1000000) GraphLayout.parseInstance(maps(0), maps.tail).totalSize else 0 + def iterationTeardown(params: IterationParams) { + if (params.getType == IterationType.MEASUREMENT) { + // limit to smaller cases to avoid OOM + memory = if (mapEntries <= 1000000) GraphLayout.parseInstance(maps(0), maps.tail).totalSize else 0 + } } } @@ -154,7 +161,7 @@ private object OpenHashMapBenchmark { /** Benchmark for the library's [[OpenHashMap]]. */ @BenchmarkMode(Array(Mode.AverageTime)) -@Fork(10) +@Fork(6) @Threads(1) @Warmup(iterations = 20) @Measurement(iterations = 6) From 00cbba19710b23f856f6c4a29e40a82a4ee364a9 Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Mon, 2 May 2016 22:47:58 -0700 Subject: [PATCH 0016/2793] Address JMH benchmark reviewer's issues. Besides tweaks to the documentation, this tests smaller (25-element) maps, and rewrites OpenHashMapRunner in more idiomatic Scala. --- test/benchmarks/README.md | 36 ++++---- .../mutable/OpenHashMapBenchmark.scala | 2 +- .../mutable/OpenHashMapRunner.scala | 82 +++++++++---------- 3 files changed, 61 insertions(+), 59 deletions(-) diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index 07e72f09a162..370d610bc4ab 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -3,9 +3,11 @@ This directory is a standalone SBT project, within the Scala project, that makes use of the [SBT plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/). -## running a benchmark +## Running a benchmark -The benchmarks require first building Scala into `../../build/pack`, using Ant. +The benchmarks require first building Scala into `../../build/pack` with `ant`. +If you want to build with `sbt dist/mkPack` instead, +you'll need to change `scalaHome` in this project. You'll then need to know the fully-qualified name of the benchmark runner class. The benchmarking classes are organized under `src/main/scala`, @@ -23,14 +25,14 @@ The JMH results can be found under `target/jmh-results/`. `target` gets deleted on an SBT `clean`, so you should copy these files out of `target` if you wish to preserve them. -## creating a benchmark and runner +## Creating a benchmark and runner The benchmarking classes use the same package hierarchy as the classes that they test in order to make it easy to expose, in package scope, members of the class under test, should that be necessary for benchmarking. There are two types of classes in the source directory: -those suffixed "`Benchmark`" and those suffixed "`Runner`". +those suffixed `Benchmark` and those suffixed `Runner`. The former are benchmarks that can be run directly using `jmh:run`; however, they are normally run from a corresponding class of the latter type, which is run using `jmh:runMain` (as described above). @@ -41,39 +43,45 @@ The `benchmark.JmhRunner` trait should be woven into any runner class, for the s This includes creating output files in a subdirectory of `target/jmh-results` derived from the fully-qualified package name of the `Runner` class. -## some useful HotSpot options +## Some useful HotSpot options Adding these to the `jmh:run` or `jmh:runMain` command line may help if you're using the HotSpot (Oracle, OpenJDK) compiler. They require prefixing with `-jvmArgs`. See [the Java documentation](http://docs.oracle.com/javase/8/docs/technotes/tools/unix/java.html) for more options. -### viewing JIT compilation events +### Viewing JIT compilation events Adding `-XX:+PrintCompilation` shows when Java methods are being compiled or deoptimized. At the most basic level, these messages will tell you whether the code that you're measuring is still being tuned, so that you know whether you're running enough warm-up iterations. See [Kris Mok's notes](https://gist.github.com/rednaxelafx/1165804#file-notes-md) to interpret the output in detail. -### consider GC events +### Consider GC events If you're not explicitly performing `System.gc()` calls outside of your benchmarking code, you should add the JVM option `-verbose:gc` to understand the effect that GCs may be having on your tests. -### "diagnostic" options +### "Diagnostic" options These require the `-XX:+UnlockDiagnosticVMOptions` JVM option. -#### viewing inlining events +#### Viewing inlining events Add `-XX:+PrintInlining`. -#### viewing the disassembled code +#### Viewing the disassembled code +If you're running OpenJDK or Oracle JVM, +you may need to install the disassembler library (`hsdis-amd64.so` for the `amd64` architecture). +In Debian, this is available in +the `libhsdis0-fcml` package. +For an Oracle (or other compatible) JVM not set up by your distribution, +you may also need to copy or link the disassembler library +to the `jre/lib/`_`architecture`_ directory inside your JVM installation directory. + To show the assembly code corresponding to the code generated by the JIT compiler for specific methods, add `-XX:CompileCommand=print,scala.collection.mutable.OpenHashMap::*`, for example, to show all of the methods in the `scala.collection.mutable.OpenHashMap` class. -If you're running OpenJDK, you may need to install the disassembler library (`hsdis-amd64.so` for the `amd64` architecture). -In Debian, this is available in the `libhsdis0-fcml` package. To show it for _all_ methods, add `-XX:+PrintAssembly`. (This is usually excessive.) -## useful reading +## Useful reading * [OpenJDK advice on microbenchmarks](https://wiki.openjdk.java.net/display/HotSpot/MicroBenchmarks) * Brian Goetz's "Java theory and practice" articles: * "[Dynamic compilation and performance measurement](http://www.ibm.com/developerworks/java/library/j-jtp12214/)" @@ -81,7 +89,7 @@ To show it for _all_ methods, add `-XX:+PrintAssembly`. * [Doug Lea's JSR 166 benchmarks](http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/test/loops/) * "[Measuring performance](http://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections -## legacy frameworks +## Legacy frameworks An older version of the benchmarking framework is still present in this directory, in the following locations: diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index 78e160a713c7..26e26b30654a 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -170,7 +170,7 @@ private object OpenHashMapBenchmark { class OpenHashMapBenchmark { import OpenHashMapBenchmark._ - @Param(Array("50", "100", "250", "1000", "2500", "10000", "25000", "100000", "250000", "1000000", "2500000", + @Param(Array("25", "50", "100", "250", "1000", "2500", "10000", "25000", "100000", "250000", "1000000", "2500000", "5000000", "7500000", "10000000", "25000000")) var size: Int = _ diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala index c139c55933b4..1a58b18ee979 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala @@ -14,6 +14,7 @@ import org.openjdk.jmh.runner.options.Options import benchmark.JmhRunner import org.openjdk.jmh.runner.options.OptionsBuilder import org.openjdk.jmh.runner.options.VerboseMode +import org.openjdk.jmh.results.Result /** Replacement JMH application that runs the [[OpenHashMap]] benchmark. * @@ -24,27 +25,35 @@ object OpenHashMapRunner extends JmhRunner { private[this] val outputFile = new File(outputDirectory, "OpenHashMap.dat") /** Qualifier to add to the name of a memory usage data set. */ - private[this] val memoryDatasetQualifer = " memory" + private[this] val memoryDatasetQualifier = "-memory" - /** Name of the JMH parameter for the number of map entries per invocation. */ - private[this] val sizeParamName = "size" + private[this] implicit class MyRunResult(r: RunResult) { + /** Return the dataset label. */ + def label = r.getPrimaryResult.getLabel - /** Name of the JMH auxiliary counter that collects operation counts. */ - private[this] val operationsAuxCounterName = "operations" + /** Return the value of the JMH parameter for the number of map entries per invocation. */ + def size: String = r.getParams.getParam("size") - /** Name of the JMH auxiliary counter that collects memory usage. */ - private[this] val memoryAuxCounterName = "memory" + /** Return the operation counts. */ + def operations = Option(r.getSecondaryResults.get("operations")) + + /** Return the number of map entries. */ + def entries = r.getSecondaryResults.get("mapEntries") + + /** Return the memory usage. */ + def memory = Option(r.getSecondaryResults.get("memory")) + } + + /** Return the statistics of the given result as a string. */ + private[this] def stats(r: Result[_]) = r.getScore + " " + r.getStatistics.getStandardDeviation - /** Name of the JMH auxiliary counter that collects the number of map entries. */ - private[this] val entriesAuxCounterName = "mapEntries" def main(args: Array[String]) { import scala.collection.JavaConversions._ import scala.language.existentials val opts = new CommandLineOptions(args: _*) - var builder = new OptionsBuilder().parent(opts) - .jvmArgsPrepend("-Xmx6000m") + var builder = new OptionsBuilder().parent(opts).jvmArgsPrepend("-Xmx6000m") if (!opts.verbosity.hasValue) builder = builder.verbosity(VerboseMode.SILENT) val results = new Runner(builder.build).run() @@ -55,18 +64,17 @@ object OpenHashMapRunner extends JmhRunner { val datasetByName = Map.empty[String, Set[RunResult]] /** Ordering for the results within a data set. Orders by increasing number of map entries. */ - val ordering = Ordering.by[RunResult, Int](_.getParams.getParam(sizeParamName).toInt) + val ordering = Ordering.by[RunResult, Int](_.size.toInt) - def addToDataset(result: RunResult, key: String): Unit = - datasetByName.get(key) - .getOrElse({ val d = SortedSet.empty(ordering); datasetByName.put(key, d); d }) += result + def addToDataset(key: String, result: RunResult): Unit = + datasetByName.getOrElseUpdate(key, SortedSet.empty(ordering)) += result - results.foreach { result: RunResult ⇒ - addToDataset(result, result.getPrimaryResult.getLabel) + results.foreach { result => + addToDataset(result.label, result) // Create another data set for trials that track memory usage - if (result.getSecondaryResults.containsKey(memoryAuxCounterName)) - addToDataset(result, result.getPrimaryResult.getLabel + memoryDatasetQualifer) + if (result.memory.isDefined) + addToDataset(result.label + memoryDatasetQualifier, result) } //TODO Write out test parameters @@ -75,31 +83,17 @@ object OpenHashMapRunner extends JmhRunner { val f = new PrintWriter(outputFile, "UTF-8") try { - datasetByName.foreach(_ match { case (label: String, dataset: Iterable[RunResult]) ⇒ { - f.format("# [%s]\n", label) - - val isMemoryUsageDataset = label.contains(memoryDatasetQualifer) - dataset.foreach { result ⇒ - val size = result.getParams.getParam(sizeParamName) - val secondaryResults = result.getSecondaryResults - if (isMemoryUsageDataset) { - val memoryResult = secondaryResults.get(memoryAuxCounterName) - val entriesResult = secondaryResults.get(entriesAuxCounterName) - f.format("%s %f %f %f %f\n", size, - Double.box(entriesResult.getScore), Double.box(entriesResult.getStatistics.getStandardDeviation), - Double.box(memoryResult.getScore), Double.box(memoryResult.getStatistics.getStandardDeviation)) - } - else { - if (secondaryResults.containsKey(operationsAuxCounterName)) { - val operationsResult = secondaryResults.get(operationsAuxCounterName) - f.format("%s %f %f\n", size, - Double.box(operationsResult.getScore), Double.box(operationsResult.getStatistics.getStandardDeviation)) - } else { - val primary = result.getPrimaryResult - f.format("%s %f %f\n", size, - Double.box(primary.getScore), Double.box(primary.getStatistics.getStandardDeviation)) - } - } + datasetByName.foreach(_ match { case (label: String, dataset: Iterable[RunResult]) => { + f.println(s"# [$label]") + + val isMemoryUsageDataset = label.endsWith(memoryDatasetQualifier) + dataset.foreach { r => + f.println(r.size + " " + ( + if (isMemoryUsageDataset) + stats(r.entries) + " " + stats(r.memory.get) + else + stats(r.operations getOrElse r.getPrimaryResult) + )) } f.println(); f.println() // data set separator From f952a812340db7bc11f45b45f46e4b8ce7d6fb49 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 4 May 2016 11:34:54 -0700 Subject: [PATCH 0017/2793] SI-9045 Refactor to abuse of match Collapse conditionals into match for legible. Yes, guards have scary eval order. --- .../scala/tools/nsc/typechecker/Typers.scala | 64 ++++++++----------- 1 file changed, 28 insertions(+), 36 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 8f5c4b9f6db4..65c6e09fd3ef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2992,43 +2992,35 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def includesTargetPos(tree: Tree) = tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos) val localTarget = stats exists includesTargetPos - def typedStat(stat: Tree): Tree = { - if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat)) - OnlyDeclarationsError(stat) - else - stat match { - case imp @ Import(_, _) => - imp.symbol.initialize - if (!imp.symbol.isError) { - context = context.make(imp) - typedImport(imp) - } else EmptyTree - case _ => - if (localTarget && !includesTargetPos(stat)) { - // skip typechecking of statements in a sequence where some other statement includes - // the targetposition - stat - } else { - val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) { - this - } else newTyper(context.make(stat, exprOwner)) - // XXX this creates a spurious dead code warning if an exception is thrown - // in a constructor, even if it is the only thing in the constructor. - val result = checkDead(localTyper.typedByValueExpr(stat)) - - if (treeInfo.isSelfOrSuperConstrCall(result)) { - context.inConstructorSuffix = true - if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0)) - ConstructorsOrderError(stat) - } - - if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, - "a pure expression does nothing in statement position; " + - "you may be omitting necessary parentheses" - ) - result - } + def typedStat(stat: Tree): Tree = stat match { + case s if context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(s) => OnlyDeclarationsError(s) + case imp @ Import(_, _) => + imp.symbol.initialize + if (!imp.symbol.isError) { + context = context.make(imp) + typedImport(imp) + } else EmptyTree + // skip typechecking of statements in a sequence where some other statement includes the targetposition + case s if localTarget && !includesTargetPos(s) => s + case _ => + val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) { + this + } else newTyper(context.make(stat, exprOwner)) + // XXX this creates a spurious dead code warning if an exception is thrown + // in a constructor, even if it is the only thing in the constructor. + val result = checkDead(localTyper.typedByValueExpr(stat)) + + if (treeInfo.isSelfOrSuperConstrCall(result)) { + context.inConstructorSuffix = true + if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0)) + ConstructorsOrderError(stat) } + + if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, + "a pure expression does nothing in statement position; " + + "you may be omitting necessary parentheses" + ) + result } /* 'accessor' and 'accessed' are so similar it becomes very difficult to From 6379b70d952cf0eea96d205e14a291b441f9cd45 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 4 May 2016 12:44:40 -0700 Subject: [PATCH 0018/2793] SI-9045 Error on recursive ctor If the constructor invokes itself, say so. --- .../tools/nsc/typechecker/ContextErrors.scala | 5 +++++ .../scala/tools/nsc/typechecker/Typers.scala | 17 +++++++++-------- test/files/neg/constrs.check | 2 +- test/files/neg/t4460a.check | 2 +- test/files/neg/t4460b.check | 2 +- test/files/neg/t9045.check | 7 +++++++ test/files/neg/t9045.scala | 8 ++++++++ 7 files changed, 32 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/t9045.check create mode 100644 test/files/neg/t9045.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index ccdff5c9a1ef..e190b57017b7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -469,6 +469,11 @@ trait ContextErrors { setError(tree) } + def ConstructorRecursesError(tree: Tree) = { + issueNormalTypeError(tree, "constructor invokes itself") + setError(tree) + } + def OnlyDeclarationsError(tree: Tree) = { issueNormalTypeError(tree, "only declarations allowed here") setError(tree) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 65c6e09fd3ef..329ce8c23b46 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3003,22 +3003,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // skip typechecking of statements in a sequence where some other statement includes the targetposition case s if localTarget && !includesTargetPos(s) => s case _ => - val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) { - this - } else newTyper(context.make(stat, exprOwner)) + val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this + else newTyper(context.make(stat, exprOwner)) // XXX this creates a spurious dead code warning if an exception is thrown // in a constructor, even if it is the only thing in the constructor. val result = checkDead(localTyper.typedByValueExpr(stat)) if (treeInfo.isSelfOrSuperConstrCall(result)) { context.inConstructorSuffix = true - if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0)) - ConstructorsOrderError(stat) + if (treeInfo.isSelfConstrCall(result)) { + if (result.symbol == exprOwner.enclMethod) + ConstructorRecursesError(stat) + else if (result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0)) + ConstructorsOrderError(stat) + } } - if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, - "a pure expression does nothing in statement position; " + - "you may be omitting necessary parentheses" + "a pure expression does nothing in statement position; you may be omitting necessary parentheses" ) result } diff --git a/test/files/neg/constrs.check b/test/files/neg/constrs.check index 4f4a12bc13ed..8a5bd97ae3ab 100644 --- a/test/files/neg/constrs.check +++ b/test/files/neg/constrs.check @@ -7,7 +7,7 @@ constrs.scala:6: error: value u is not a member of object test constrs.scala:10: error: called constructor's definition must precede calling constructor's definition def this() = this("abc") ^ -constrs.scala:12: error: called constructor's definition must precede calling constructor's definition +constrs.scala:12: error: constructor invokes itself def this(x: Boolean) = this(x) ^ constrs.scala:16: error: type mismatch; diff --git a/test/files/neg/t4460a.check b/test/files/neg/t4460a.check index b711e7acb184..7a7618a11403 100644 --- a/test/files/neg/t4460a.check +++ b/test/files/neg/t4460a.check @@ -1,4 +1,4 @@ -t4460a.scala:6: error: called constructor's definition must precede calling constructor's definition +t4460a.scala:6: error: constructor invokes itself def this() = this() // was binding to Predef. !! ^ one error found diff --git a/test/files/neg/t4460b.check b/test/files/neg/t4460b.check index f0e703fd104b..9a621dbd5cda 100644 --- a/test/files/neg/t4460b.check +++ b/test/files/neg/t4460b.check @@ -1,4 +1,4 @@ -t4460b.scala:7: error: called constructor's definition must precede calling constructor's definition +t4460b.scala:7: error: constructor invokes itself def this() = this() // was binding to Predef. !! ^ one error found diff --git a/test/files/neg/t9045.check b/test/files/neg/t9045.check new file mode 100644 index 000000000000..07d0e2dd7464 --- /dev/null +++ b/test/files/neg/t9045.check @@ -0,0 +1,7 @@ +t9045.scala:3: error: constructor invokes itself + def this(axes: Array[Int]) = this(axes) + ^ +t9045.scala:6: error: called constructor's definition must precede calling constructor's definition + def this(d: Double) = this(d.toLong) + ^ +two errors found diff --git a/test/files/neg/t9045.scala b/test/files/neg/t9045.scala new file mode 100644 index 000000000000..e6710ab32479 --- /dev/null +++ b/test/files/neg/t9045.scala @@ -0,0 +1,8 @@ + +case class AffineImageShape(axes: Seq[Int]) { + def this(axes: Array[Int]) = this(axes) +} +class X(i: Int) { + def this(d: Double) = this(d.toLong) + def this(n: Long) = this(n.toInt) +} From 486821b845ccaa0d02dd402fb3532d6d82055015 Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Tue, 3 May 2016 10:52:28 -0700 Subject: [PATCH 0019/2793] Enable full compiler optimizations in JMH benchmarking. --- test/benchmarks/build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt index 2959e4986ad2..4806ecdde80c 100644 --- a/test/benchmarks/build.sbt +++ b/test/benchmarks/build.sbt @@ -1,6 +1,6 @@ scalaHome := Some(file("../../build/pack")) scalaVersion := "2.11.8" -scalacOptions += "-feature" +scalacOptions ++= Seq("-feature", "-Yopt:l:classpath") lazy val root = (project in file(".")). enablePlugins(JmhPlugin). From 5faad77ccebb41e9674b103d499de927c0dba662 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Fri, 6 May 2016 12:39:13 +0100 Subject: [PATCH 0020/2793] Added pos test with multiple cases; added neg tests. --- test/files/neg/hkgadt.check | 31 +++++++++++++++++++++++++++++++ test/files/neg/hkgadt.scala | 35 +++++++++++++++++++++++++++++++++++ test/files/pos/hkgadt.scala | 25 +++++++++++++++++++++---- 3 files changed, 87 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/hkgadt.check create mode 100644 test/files/neg/hkgadt.scala diff --git a/test/files/neg/hkgadt.check b/test/files/neg/hkgadt.check new file mode 100644 index 000000000000..ef302a9abf38 --- /dev/null +++ b/test/files/neg/hkgadt.check @@ -0,0 +1,31 @@ +hkgadt.scala:7: error: type mismatch; + found : scala.collection.immutable.Set[Int] + required: F[Int] + case Bar() => Set(1) + ^ +hkgadt.scala:13: error: type mismatch; + found : Boolean(true) + required: A + case Bar1() => true + ^ +hkgadt.scala:24: error: type mismatch; + found : scala.collection.immutable.Set[Int] + required: F[Int] + case Bar() => Set(1) + ^ +hkgadt.scala:25: error: type mismatch; + found : List[Int] + required: F[Int] + case Baz() => List(1) + ^ +hkgadt.scala:32: error: type mismatch; + found : Boolean(true) + required: A + case Bar1() => true + ^ +hkgadt.scala:33: error: type mismatch; + found : Int(1) + required: A + case Baz1() => 1 + ^ +6 errors found diff --git a/test/files/neg/hkgadt.scala b/test/files/neg/hkgadt.scala new file mode 100644 index 000000000000..0107d2bdde31 --- /dev/null +++ b/test/files/neg/hkgadt.scala @@ -0,0 +1,35 @@ +object HKGADT { + sealed trait Foo[F[_]] + final case class Bar() extends Foo[List] + + def frob[F[_]](foo: Foo[F]): F[Int] = + foo match { + case Bar() => Set(1) + } + + sealed trait Foo1[F] + final case class Bar1() extends Foo1[Int] + def frob1[A](foo: Foo1[A]): A = foo match { + case Bar1() => true + } +} + +object HKGADT2 { + sealed trait Foo[F[_]] + final case class Bar() extends Foo[List] + final case class Baz() extends Foo[Set] + + def frob[F[_]](foo: Foo[F]): F[Int] = + foo match { + case Bar() => Set(1) + case Baz() => List(1) + } + + sealed trait Foo1[F] + final case class Bar1() extends Foo1[Int] + final case class Baz1() extends Foo1[Boolean] + def frob1[A](foo: Foo1[A]): A = foo match { + case Bar1() => true + case Baz1() => 1 + } +} diff --git a/test/files/pos/hkgadt.scala b/test/files/pos/hkgadt.scala index 0f3739f4d400..5719c752cdef 100644 --- a/test/files/pos/hkgadt.scala +++ b/test/files/pos/hkgadt.scala @@ -1,18 +1,35 @@ -package test - object HKGADT { sealed trait Foo[F[_]] final case class Bar() extends Foo[List] def frob[F[_]](foo: Foo[F]): F[Int] = foo match { - case Bar() => - List(1) + case Bar() => List(1) + } + + sealed trait Foo1[F] + final case class Bar1() extends Foo1[Int] + def frob1[A](foo: Foo1[A]): A = foo match { + case Bar1() => 1 + } +} + +object HKGADT2 { + sealed trait Foo[F[_]] + final case class Bar() extends Foo[List] + final case class Baz() extends Foo[Set] + + def frob[F[_]](foo: Foo[F]): F[Int] = + foo match { + case Bar() => List(1) + case Baz() => Set(1) } sealed trait Foo1[F] final case class Bar1() extends Foo1[Int] + final case class Baz1() extends Foo1[Boolean] def frob1[A](foo: Foo1[A]): A = foo match { case Bar1() => 1 + case Baz1() => true } } From b58634e9f95625a44c427d352d580ed99b3f28da Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Tue, 5 Apr 2016 11:45:53 +0200 Subject: [PATCH 0021/2793] Improves the test cases for the scala-concurrent-tck --- test/files/jvm/scala-concurrent-tck.check | 2 +- test/files/jvm/scala-concurrent-tck.scala | 31 +++++++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check index 9aef07d1e54a..8aec46e5d636 100644 --- a/test/files/jvm/scala-concurrent-tck.check +++ b/test/files/jvm/scala-concurrent-tck.check @@ -1 +1 @@ -warning: there were 73 deprecation warnings; re-run with -deprecation for details +warning: there were 75 deprecation warnings; re-run with -deprecation for details diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index 8069028cf51f..7197c1d85394 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -90,6 +90,25 @@ trait FutureCallbacks extends TestBase { promise.success(-1) } + def stressTestNumberofCallbacks(): Unit = once { + done => + val promise = Promise[Unit] + val otherPromise = Promise[Unit] + def attachMeaninglessCallbacksTo(f: Future[Any]): Unit = (1 to 1000).foreach(_ => f.onComplete(_ => ())) + attachMeaninglessCallbacksTo(promise.future) + val future = promise.future.flatMap { _ => + attachMeaninglessCallbacksTo(otherPromise.future) + otherPromise.future + } + val numbers = new java.util.concurrent.ConcurrentHashMap[Int, Unit]() + (0 to 10000) foreach { x => numbers.put(x, ()) } + Future.sequence((0 to 10000) map { x => future.andThen({ case _ => numbers.remove(x) }) }) onComplete { + _ => done(numbers.isEmpty) + } + promise.success(()) + otherPromise.success(()) + } + testOnSuccess() testOnSuccessWhenCompleted() testOnSuccessWhenFailed() @@ -100,6 +119,7 @@ trait FutureCallbacks extends TestBase { //testOnFailureWhenSpecialThrowable(7, new InterruptedException) testThatNestedCallbacksDoNotYieldStackOverflow() testOnFailureWhenTimeoutException() + stressTestNumberofCallbacks() } @@ -283,6 +303,16 @@ def testTransformFailure(): Unit = once { g onFailure { case t => done(t.getMessage() == "expected") } } + def testFlatMapDelayed(): Unit = once { + done => + val f = Future { 5 } + val p = Promise[Int] + val g = f flatMap { _ => p.future } + g onSuccess { case x => done(x == 10) } + g onFailure { case _ => done(false) } + p.success(10) + } + def testFilterSuccess(): Unit = once { done => val f = Future { 4 } @@ -458,6 +488,7 @@ def testTransformFailure(): Unit = once { testMapFailure() testFlatMapSuccess() testFlatMapFailure() + testFlatMapDelayed() testFilterSuccess() testFilterFailure() testCollectSuccess() From 9e30bee0c9363f6cf36a7b65ddbaaa225b57d6a9 Mon Sep 17 00:00:00 2001 From: Felix Mulder Date: Fri, 13 May 2016 15:35:42 +0200 Subject: [PATCH 0022/2793] Add summary reporting to Scaladoc (#5063) --- src/scaladoc/scala/tools/ant/Scaladoc.scala | 4 +- src/scaladoc/scala/tools/nsc/ScalaDoc.scala | 51 ++++++++++++++++--- .../scala/tools/nsc/doc/DocFactory.scala | 16 +++++- .../scala/tools/nsc/doc/html/Doclet.scala | 13 +++-- .../tools/nsc/doc/html/HtmlFactory.scala | 5 +- .../scala/tools/nsc/doc/html/HtmlPage.scala | 3 +- .../tools/nsc/doc/html/page/Entity.scala | 12 +++-- 7 files changed, 81 insertions(+), 23 deletions(-) diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala index b38aadd3283a..63d3b4ce2793 100644 --- a/src/scaladoc/scala/tools/ant/Scaladoc.scala +++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala @@ -14,8 +14,8 @@ import org.apache.tools.ant.Project import org.apache.tools.ant.types.{Path, Reference} import org.apache.tools.ant.util.{FileUtils, GlobPatternMapper} +import scala.tools.nsc.ScalaDocReporter import scala.tools.nsc.doc.Settings -import scala.tools.nsc.reporters.ConsoleReporter /** An Ant task to document Scala code. * @@ -666,7 +666,7 @@ class Scaladoc extends ScalaMatchingTask { /** Performs the compilation. */ override def execute() = { val (docSettings, sourceFiles) = initialize - val reporter = new ConsoleReporter(docSettings) + val reporter = new ScalaDocReporter(docSettings) try { val docProcessor = new scala.tools.nsc.doc.DocFactory(reporter, docSettings) docProcessor.document(sourceFiles.map (_.toString)) diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index bd00c27f7bb4..e266f7beea03 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -8,7 +8,8 @@ package scala.tools.nsc import scala.tools.nsc.doc.DocFactory import scala.tools.nsc.reporters.ConsoleReporter -import scala.reflect.internal.util.FakePos +import scala.reflect.internal.Reporter +import scala.reflect.internal.util.{ FakePos, NoPosition, Position } /** The main class for scaladoc, a front-end for the Scala compiler * that generates documentation from source files. @@ -38,23 +39,43 @@ class ScalaDoc { reporter.echo(command.usageMsg) else try { new DocFactory(reporter, docSettings) document command.files } - catch { - case ex @ FatalError(msg) => - if (docSettings.debug.value) ex.printStackTrace() - reporter.error(null, "fatal error: " + msg) - } - finally reporter.printSummary() + catch { + case ex @ FatalError(msg) => + if (docSettings.debug.value) ex.printStackTrace() + reporter.error(null, "fatal error: " + msg) + } + finally reporter.printSummary() !reporter.reallyHasErrors } } +/** The Scaladoc reporter adds summary messages to the `ConsoleReporter` + * + * Use the `summaryX` methods to add unique summarizing message to the end of + * the run. + */ class ScalaDocReporter(settings: Settings) extends ConsoleReporter(settings) { + import scala.collection.mutable.LinkedHashMap // need to do sometimes lie so that the Global instance doesn't // trash all the symbols just because there was an error override def hasErrors = false def reallyHasErrors = super.hasErrors + + private[this] val delayedMessages: LinkedHashMap[(Position, String), () => Unit] = + LinkedHashMap.empty + + /** Eliminates messages if both `pos` and `msg` are equal to existing element */ + def addDelayedMessage(pos: Position, msg: String, print: () => Unit): Unit = + delayedMessages += ((pos, msg) -> print) + + def printDelayedMessages(): Unit = delayedMessages.values.foreach(_.apply()) + + override def printSummary(): Unit = { + printDelayedMessages() + super.printSummary() + } } object ScalaDoc extends ScalaDoc { @@ -70,4 +91,20 @@ object ScalaDoc extends ScalaDoc { def main(args: Array[String]): Unit = sys exit { if (process(args)) 0 else 1 } + + implicit class SummaryReporter(val rep: Reporter) extends AnyVal { + /** Adds print lambda to ScalaDocReporter, executes it on other reporter */ + private[this] def summaryMessage(pos: Position, msg: String, print: () => Unit): Unit = rep match { + case r: ScalaDocReporter => r.addDelayedMessage(pos, msg, print) + case _ => print() + } + + def summaryEcho(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.echo(pos, msg)) + def summaryError(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.error(pos, msg)) + def summaryWarning(pos: Position, msg: String): Unit = summaryMessage(pos, msg, () => rep.warning(pos, msg)) + + def summaryEcho(msg: String): Unit = summaryEcho(NoPosition, msg) + def summaryError(msg: String): Unit = summaryError(NoPosition, msg) + def summaryWarning(msg: String): Unit = summaryWarning(NoPosition, msg) + } } diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala index fb6c39d7e3c8..8c646be9c676 100644 --- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala @@ -6,8 +6,8 @@ package scala.tools.nsc package doc -import scala.util.control.ControlThrowable import reporters.Reporter +import scala.util.control.ControlThrowable import scala.reflect.internal.util.BatchSourceFile /** A documentation processor controls the process of generating Scala @@ -105,7 +105,19 @@ class DocFactory(val reporter: Reporter, val settings: doc.Settings) { processor def generate() = { import doclet._ val docletClass = Class.forName(settings.docgenerator.value) // default is html.Doclet - val docletInstance = docletClass.newInstance().asInstanceOf[Generator] + val docletInstance = + docletClass + .getConstructors + .find { constr => + constr.getParameterTypes.length == 1 && + constr.getParameterTypes.apply(0) == classOf[scala.reflect.internal.Reporter] + } + .map(_.newInstance(reporter)) + .getOrElse{ + reporter.warning(null, "Doclets should be created with the Reporter constructor, otherwise logging reporters will not be shared by the creating parent") + docletClass.newInstance() + } + .asInstanceOf[Generator] docletInstance match { case universer: Universer => diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala index 541266e4ccd2..73a854e99503 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala @@ -7,14 +7,19 @@ package scala.tools.nsc package doc package html +import scala.reflect.internal.Reporter import doclet._ /** The default doclet used by the scaladoc command line tool * when no user-provided doclet is provided. */ -class Doclet extends Generator with Universer { +class Doclet(reporter: Reporter) extends Generator with Universer { - def generateImpl() { - new html.HtmlFactory(universe, new ScalaDocReporter(universe.settings)).generate() - } + @deprecated("Doclets should be created with the Reporter constructor. Otherwise logging reporters will not be shared by the creating parent", "2.12.0") + def this() = this(null) + def generateImpl() = + new html.HtmlFactory( + universe, + if (reporter != null) reporter else new ScalaDocReporter(universe.settings) + ).generate() } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala index 88b84be65e70..62620057cb80 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -12,12 +12,13 @@ import java.io.{ File => JFile } import io.{ Streamable, Directory } import scala.collection._ import page.diagram._ +import scala.reflect.internal.Reporter /** A class that can generate Scaladoc sites to some fixed root folder. * @author David Bernard * @author Gilles Dubochet */ -class HtmlFactory(val universe: doc.Universe, val reporter: ScalaDocReporter) { - import page.IndexScript +class HtmlFactory(val universe: doc.Universe, val reporter: Reporter) { + import page.{IndexScript, EntityPage} /** The character encoding to be used for generated Scaladoc sites. * This value is currently always UTF-8. */ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index 0f37f86b3ea1..6ad51f4f7e58 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -13,6 +13,7 @@ import base._ import base.comment._ import model._ +import scala.reflect.internal.Reporter import scala.xml.NodeSeq import scala.xml.Elem import scala.xml.dtd.DocType @@ -27,7 +28,7 @@ abstract class HtmlPage extends Page { thisPage => protected def title: String /** ScalaDoc reporter for error handling */ - protected def reporter: ScalaDocReporter + protected def docletReporter: Reporter /** The page description */ protected def description: String = diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 836d1b4b7def..9dd2c2184d54 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -13,6 +13,7 @@ package page import base._ import base.comment._ +import scala.reflect.internal.Reporter import scala.collection.mutable import scala.xml.{NodeSeq, Text, UnprefixedAttribute} import scala.language.postfixOps @@ -22,10 +23,12 @@ import model.diagram._ import diagram._ trait EntityPage extends HtmlPage { + import ScalaDoc.SummaryReporter + def universe: doc.Universe def generator: DiagramGenerator def tpl: DocTemplateEntity - def reporter: ScalaDocReporter + def docletReporter: Reporter override val path = templateToPath(tpl) @@ -158,8 +161,7 @@ trait EntityPage extends HtmlPage { val version = universe.settings.docversion.value if (version.length > "XX.XX.XX-XXX".length) { - reporter.warning(null, - s"doc-version ($version) is too long to be displayed in the webview") + docletReporter.summaryWarning(s"doc-version ($version) was too long to be displayed in the webview, and will be left out. The max length is: XX.XX.XX-XXX") "" } else version } @@ -1124,12 +1126,12 @@ object EntityPage { uni: doc.Universe, gen: DiagramGenerator, docTpl: DocTemplateEntity, - rep: ScalaDocReporter + rep: Reporter ): EntityPage = new EntityPage { def universe = uni def generator = gen def tpl = docTpl - def reporter = rep + def docletReporter = rep } /* Vlad: Lesson learned the hard way: don't put any stateful code that references the model here, From a6d5eb507bbeac2055a224a15fd76e7f9425520b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 4 May 2016 15:52:01 -0700 Subject: [PATCH 0023/2793] SI-8667 Improve too-many-args message Use removeNames to help diagnose the application. Supplement the error message with how many extra args and any other residual assignments that the user might have thought was a properly named arg. The error message is gradual: succinct for short arg lists, more verbose for longer applications. Very long arg lists are probably generated, so that message is the least colloquial. --- .../tools/nsc/typechecker/ContextErrors.scala | 31 +++++++- .../scala/tools/nsc/typechecker/Typers.scala | 10 ++- test/files/neg/eta-expand-star.check | 2 +- .../neg/macro-invalidusage-badargs.check | 2 +- test/files/neg/multi-array.check | 2 +- test/files/neg/protected-constructors.check | 2 +- test/files/neg/t1112.check | 2 +- test/files/neg/t1523.check | 2 +- test/files/neg/t6920.check | 2 +- test/files/neg/t7157.check | 24 +++--- test/files/neg/t8006.check | 2 +- test/files/neg/t8035-no-adapted-args.check | 2 +- test/files/neg/t8667.check | 79 +++++++++++++++++++ test/files/neg/t8667.scala | 35 ++++++++ test/files/neg/t876.check | 2 +- 15 files changed, 172 insertions(+), 27 deletions(-) create mode 100644 test/files/neg/t8667.check create mode 100644 test/files/neg/t8667.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index e190b57017b7..e1055144f8c7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -538,8 +538,33 @@ trait ContextErrors { def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) = NormalTypeError(tree, "macro applications do not support named and/or default arguments") - def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) = - NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun)) + def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree, expected: Int, supplied: Int, unknowns: List[Name]) = { + val msg = { + val badappl = { + val excess = supplied - expected + val target = treeSymTypeMsg(fun) + + if (expected == 0) s"no arguments allowed for nullary $target" + else if (excess < 3 && expected <= 5) s"too many arguments ($supplied) for $target" + else if (expected > 10) s"$supplied arguments but expected $expected for $target" + else { + val oneOf = + if (excess == 1) "one more argument" + else if (excess > 0) s"$excess more arguments" + else "too many arguments" + s"$oneOf than can be applied to $target" + } + } + val suppl = + unknowns.size match { + case 0 => "" + case 1 => s"\nNote that '${unknowns.head}' is not a parameter name of the invoked method." + case _ => unknowns.mkString("\nNote that '", "', '", "' are not parameter names of the invoked method.") + } + s"${badappl}${suppl}" + } + NormalTypeError(tree, msg) + } // can it still happen? see test case neg/overloaded-unapply.scala def OverloadedUnapplyError(tree: Tree) = @@ -551,7 +576,7 @@ trait ContextErrors { def MultipleVarargError(tree: Tree) = NormalTypeError(tree, "when using named arguments, the vararg parameter has to be specified exactly once") - def ModuleUsingCompanionClassDefaultArgsErrror(tree: Tree) = + def ModuleUsingCompanionClassDefaultArgsError(tree: Tree) = NormalTypeError(tree, "module extending its companion class cannot use default constructor arguments") def NotEnoughArgsError(tree: Tree, fun: Tree, missing: List[Symbol]) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 329ce8c23b46..1d24d8c232c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3330,7 +3330,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // #2064 duplErrorTree(WrongNumberOfArgsError(tree, fun)) } else if (lencmp > 0) { - tryTupleApply orElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun)) + tryTupleApply orElse duplErrorTree { + val (namelessArgs, _) = removeNames(Typer.this)(args, params) + val wrongs = (namelessArgs zip args) collect { + case (_: Assign, AssignOrNamedArg(Ident(name), _)) => name + } + TooManyArgsNamesDefaultsError(tree, fun, expected = formals.size, supplied = args.size, wrongs) + } } else if (lencmp == 0) { // we don't need defaults. names were used, so this application is transformed // into a block (@see transformNamedApplication in NamesDefaults) @@ -3394,7 +3400,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val lencmp2 = compareLengths(allArgs, formals) if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) { - duplErrorTree(ModuleUsingCompanionClassDefaultArgsErrror(tree)) + duplErrorTree(ModuleUsingCompanionClassDefaultArgsError(tree)) } else if (lencmp2 > 0) { removeNames(Typer.this)(allArgs, params) // #3818 duplErrTree diff --git a/test/files/neg/eta-expand-star.check b/test/files/neg/eta-expand-star.check index 6765d504fc55..f25e0a41ffad 100644 --- a/test/files/neg/eta-expand-star.check +++ b/test/files/neg/eta-expand-star.check @@ -1,4 +1,4 @@ -eta-expand-star.scala:6: error: too many arguments for method apply: (v1: Seq[T])Unit in trait Function1 +eta-expand-star.scala:6: error: too many arguments (2) for method apply: (v1: Seq[T])Unit in trait Function1 g(1, 2) ^ one error found diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check index 19ac6528d37d..2f5f6d106423 100644 --- a/test/files/neg/macro-invalidusage-badargs.check +++ b/test/files/neg/macro-invalidusage-badargs.check @@ -13,7 +13,7 @@ Macros_Test_2.scala:8: error: not enough arguments for macro method foo: (x: Int Unspecified value parameter x. foo() ^ -Macros_Test_2.scala:9: error: too many arguments for macro method foo: (x: Int)Int +Macros_Test_2.scala:9: error: too many arguments (2) for macro method foo: (x: Int)Int foo(4, 2) ^ 5 errors found diff --git a/test/files/neg/multi-array.check b/test/files/neg/multi-array.check index 511caa126fc0..a71f9b864af4 100644 --- a/test/files/neg/multi-array.check +++ b/test/files/neg/multi-array.check @@ -1,4 +1,4 @@ -multi-array.scala:7: error: too many arguments for constructor Array: (_length: Int)Array[T] +multi-array.scala:7: error: too many arguments (2) for constructor Array: (_length: Int)Array[T] val a: Array[Int] = new Array(10, 10) ^ one error found diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check index f44d7db9b982..5c880cb67206 100644 --- a/test/files/neg/protected-constructors.check +++ b/test/files/neg/protected-constructors.check @@ -1,4 +1,4 @@ -protected-constructors.scala:17: error: too many arguments for constructor Foo1: ()dingus.Foo1 +protected-constructors.scala:17: error: no arguments allowed for nullary constructor Foo1: ()dingus.Foo1 val foo1 = new Foo1("abc") ^ protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check index 5e3821b1535f..9c1254d17655 100644 --- a/test/files/neg/t1112.check +++ b/test/files/neg/t1112.check @@ -1,4 +1,4 @@ -t1112.scala:12: error: too many arguments for method call: (p: Int)(f: => Test.this.Type1)Unit +t1112.scala:12: error: too many arguments (2) for method call: (p: Int)(f: => Test.this.Type1)Unit call(0,() => System.out.println("here we are")) ^ one error found diff --git a/test/files/neg/t1523.check b/test/files/neg/t1523.check index d2489f2602a6..656845a4578d 100644 --- a/test/files/neg/t1523.check +++ b/test/files/neg/t1523.check @@ -1,4 +1,4 @@ -t1523.scala:4: error: too many arguments for method bug: (x: Any)Any +t1523.scala:4: error: 25 more arguments than can be applied to method bug: (x: Any)Any def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a") ^ one error found diff --git a/test/files/neg/t6920.check b/test/files/neg/t6920.check index ee4eafb83ec3..4e33aca3bc9a 100644 --- a/test/files/neg/t6920.check +++ b/test/files/neg/t6920.check @@ -1,4 +1,4 @@ -t6920.scala:9: error: too many arguments for method applyDynamicNamed: (values: Seq[(String, Any)])String +t6920.scala:9: error: too many arguments (2) for method applyDynamicNamed: (values: Seq[(String, Any)])String error after rewriting to CompilerError.this.test.applyDynamicNamed("crushTheCompiler")(scala.Tuple2("a", 1), scala.Tuple2("b", 2)) possible cause: maybe a wrong Dynamic method signature? test.crushTheCompiler(a = 1, b = 2) diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check index 3988460d4bb5..a043c5d40370 100644 --- a/test/files/neg/t7157.check +++ b/test/files/neg/t7157.check @@ -1,20 +1,20 @@ -Test_2.scala:5: error: too many arguments for macro method m1_0_0: ()Unit +Test_2.scala:5: error: no arguments allowed for nullary macro method m1_0_0: ()Unit m1_0_0(1) ^ -Test_2.scala:6: error: too many arguments for macro method m1_0_0: ()Unit +Test_2.scala:6: error: no arguments allowed for nullary macro method m1_0_0: ()Unit m1_0_0(1, 2) ^ -Test_2.scala:7: error: too many arguments for macro method m1_0_0: ()Unit +Test_2.scala:7: error: no arguments allowed for nullary macro method m1_0_0: ()Unit m1_0_0(1, 2, 3) ^ Test_2.scala:9: error: not enough arguments for macro method m1_1_1: (x: Int)Unit. Unspecified value parameter x. m1_1_1() ^ -Test_2.scala:11: error: too many arguments for macro method m1_1_1: (x: Int)Unit +Test_2.scala:11: error: too many arguments (2) for macro method m1_1_1: (x: Int)Unit m1_1_1(1, 2) ^ -Test_2.scala:12: error: too many arguments for macro method m1_1_1: (x: Int)Unit +Test_2.scala:12: error: too many arguments (3) for macro method m1_1_1: (x: Int)Unit m1_1_1(1, 2, 3) ^ Test_2.scala:14: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit. @@ -25,7 +25,7 @@ Test_2.scala:15: error: not enough arguments for macro method m1_2_2: (x: Int, y Unspecified value parameter y. m1_2_2(1) ^ -Test_2.scala:17: error: too many arguments for macro method m1_2_2: (x: Int, y: Int)Unit +Test_2.scala:17: error: too many arguments (3) for macro method m1_2_2: (x: Int, y: Int)Unit m1_2_2(1, 2, 3) ^ Test_2.scala:24: error: not enough arguments for macro method m1_1_inf: (x: Int, y: Int*)Unit. @@ -40,23 +40,23 @@ Test_2.scala:30: error: not enough arguments for macro method m1_2_inf: (x: Int, Unspecified value parameters y, z. m1_2_inf(1) ^ -Test_2.scala:35: error: too many arguments for macro method m2_0_0: ()Unit +Test_2.scala:35: error: no arguments allowed for nullary macro method m2_0_0: ()Unit m2_0_0()(1) ^ -Test_2.scala:36: error: too many arguments for macro method m2_0_0: ()Unit +Test_2.scala:36: error: no arguments allowed for nullary macro method m2_0_0: ()Unit m2_0_0()(1, 2) ^ -Test_2.scala:37: error: too many arguments for macro method m2_0_0: ()Unit +Test_2.scala:37: error: no arguments allowed for nullary macro method m2_0_0: ()Unit m2_0_0()(1, 2, 3) ^ Test_2.scala:39: error: not enough arguments for macro method m2_1_1: (x: Int)Unit. Unspecified value parameter x. m2_1_1()() ^ -Test_2.scala:41: error: too many arguments for macro method m2_1_1: (x: Int)Unit +Test_2.scala:41: error: too many arguments (2) for macro method m2_1_1: (x: Int)Unit m2_1_1()(1, 2) ^ -Test_2.scala:42: error: too many arguments for macro method m2_1_1: (x: Int)Unit +Test_2.scala:42: error: too many arguments (3) for macro method m2_1_1: (x: Int)Unit m2_1_1()(1, 2, 3) ^ Test_2.scala:44: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit. @@ -67,7 +67,7 @@ Test_2.scala:45: error: not enough arguments for macro method m2_2_2: (x: Int, y Unspecified value parameter y. m2_2_2()(1) ^ -Test_2.scala:47: error: too many arguments for macro method m2_2_2: (x: Int, y: Int)Unit +Test_2.scala:47: error: too many arguments (3) for macro method m2_2_2: (x: Int, y: Int)Unit m2_2_2()(1, 2, 3) ^ Test_2.scala:54: error: not enough arguments for macro method m2_1_inf: (x: Int, y: Int*)Unit. diff --git a/test/files/neg/t8006.check b/test/files/neg/t8006.check index fbac26e3ad9c..4e130670e4d4 100644 --- a/test/files/neg/t8006.check +++ b/test/files/neg/t8006.check @@ -1,4 +1,4 @@ -t8006.scala:3: error: too many arguments for method applyDynamicNamed: (value: (String, Any))String +t8006.scala:3: error: too many arguments (2) for method applyDynamicNamed: (value: (String, Any))String error after rewriting to X.this.d.applyDynamicNamed("meth")(scala.Tuple2("value1", 10), scala.Tuple2("value2", 100)) possible cause: maybe a wrong Dynamic method signature? d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed diff --git a/test/files/neg/t8035-no-adapted-args.check b/test/files/neg/t8035-no-adapted-args.check index 43637b2c1fc2..4b3cb39c769f 100644 --- a/test/files/neg/t8035-no-adapted-args.check +++ b/test/files/neg/t8035-no-adapted-args.check @@ -4,7 +4,7 @@ t8035-no-adapted-args.scala:4: warning: No automatic adaptation here: use explic after adaptation: Test.f((1, 2, 3): (Int, Int, Int)) f(1, 2, 3) ^ -t8035-no-adapted-args.scala:4: error: too many arguments for method f: (x: (Int, Int, Int))Int +t8035-no-adapted-args.scala:4: error: too many arguments (3) for method f: (x: (Int, Int, Int))Int f(1, 2, 3) ^ t8035-no-adapted-args.scala:5: warning: No automatic adaptation here: use explicit parentheses. diff --git a/test/files/neg/t8667.check b/test/files/neg/t8667.check new file mode 100644 index 000000000000..4c3bcb78c829 --- /dev/null +++ b/test/files/neg/t8667.check @@ -0,0 +1,79 @@ +t8667.scala:6: error: too many arguments (3) for constructor C: (a: Int, b: Int)C +Note that 'c' is not a parameter name of the invoked method. + def c2 = new C(a = 42, b = 17, c = 5) + ^ +t8667.scala:7: error: unknown parameter name: c + def c3 = new C(b = 42, a = 17, c = 5) + ^ +t8667.scala:7: error: too many arguments (3) for constructor C: (a: Int, b: Int)C + def c3 = new C(b = 42, a = 17, c = 5) + ^ +t8667.scala:8: error: positional after named argument. + def c4 = new C(b = 42, a = 17, 5) + ^ +t8667.scala:8: error: too many arguments (3) for constructor C: (a: Int, b: Int)C + def c4 = new C(b = 42, a = 17, 5) + ^ +t8667.scala:9: error: not found: value c + def c5 = new C(a = 42, c = 17) + ^ +t8667.scala:10: error: parameter 'b' is already specified at parameter position 2 +Note that 'c' is not a parameter name of the invoked method. + def c6 = new C(a = 42, c = 17, b = 5) + ^ +t8667.scala:10: error: too many arguments (3) for constructor C: (a: Int, b: Int)C +Note that 'c' is not a parameter name of the invoked method. + def c6 = new C(a = 42, c = 17, b = 5) + ^ +t8667.scala:11: error: too many arguments (3) for constructor C: (a: Int, b: Int)C +Note that 'c' is not a parameter name of the invoked method. + def c7 = new C(42, 17, c = 5) + ^ +t8667.scala:12: error: parameter 'b' is already specified at parameter position 2 + def c8 = new C(42, 17, b = 5) + ^ +t8667.scala:12: error: too many arguments (3) for constructor C: (a: Int, b: Int)C + def c8 = new C(42, 17, b = 5) + ^ +t8667.scala:13: error: parameter 'b' is already specified at parameter position 2 +Note that 'c' is not a parameter name of the invoked method. + def c9 = new C(a = 42, c = 17, d = 3, b = 5) + ^ +t8667.scala:13: error: too many arguments (4) for constructor C: (a: Int, b: Int)C +Note that 'c', 'd' are not parameter names of the invoked method. + def c9 = new C(a = 42, c = 17, d = 3, b = 5) + ^ +t8667.scala:14: error: too many arguments (4) for constructor C: (a: Int, b: Int)C +Note that 'd', 'c' are not parameter names of the invoked method. + def c0 = new C(42, 17, d = 3, c = 5) + ^ +t8667.scala:24: error: no arguments allowed for nullary method f0: ()Int + f0(1) + ^ +t8667.scala:25: error: too many arguments (2) for method f1: (i: Int)Int + f1(1, 2) + ^ +t8667.scala:26: error: too many arguments (3) for method f1: (i: Int)Int + f1(1, 2, 3) + ^ +t8667.scala:27: error: 3 more arguments than can be applied to method f1: (i: Int)Int + f1(1, 2, 3, 4) + ^ +t8667.scala:28: error: 3 more arguments than can be applied to method f1: (i: Int)Int +Note that 'j' is not a parameter name of the invoked method. + f1(1, j = 2, 3, 4) + ^ +t8667.scala:29: error: 3 more arguments than can be applied to method f1: (i: Int)Int +Note that 'j', 'k' are not parameter names of the invoked method. + f1(1, j = 2, k = 3, 4) + ^ +t8667.scala:30: error: one more argument than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int + f6(1, 2, 3, 4, 5, 6, 7) + ^ +t8667.scala:31: error: 2 more arguments than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int + f6(1, 2, 3, 4, 5, 6, 7, 8) + ^ +t8667.scala:32: error: 15 arguments but expected 12 for method f12: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int)Int + f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) + ^ +23 errors found diff --git a/test/files/neg/t8667.scala b/test/files/neg/t8667.scala new file mode 100644 index 000000000000..fe17eac84f6c --- /dev/null +++ b/test/files/neg/t8667.scala @@ -0,0 +1,35 @@ + +class C(a: Int, b: Int) + +trait T { + def c1 = new C(a = 42, b = 17) + def c2 = new C(a = 42, b = 17, c = 5) + def c3 = new C(b = 42, a = 17, c = 5) + def c4 = new C(b = 42, a = 17, 5) + def c5 = new C(a = 42, c = 17) + def c6 = new C(a = 42, c = 17, b = 5) + def c7 = new C(42, 17, c = 5) + def c8 = new C(42, 17, b = 5) + def c9 = new C(a = 42, c = 17, d = 3, b = 5) + def c0 = new C(42, 17, d = 3, c = 5) +} + +trait X { + def f0() = 42 + def f1(i: Int) = 42 + def f6(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int) = 42 + def f12(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int) = 42 + + def g() = { + f0(1) + f1(1, 2) + f1(1, 2, 3) + f1(1, 2, 3, 4) + f1(1, j = 2, 3, 4) + f1(1, j = 2, k = 3, 4) + f6(1, 2, 3, 4, 5, 6, 7) + f6(1, 2, 3, 4, 5, 6, 7, 8) + f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) + () + } +} diff --git a/test/files/neg/t876.check b/test/files/neg/t876.check index 04c5c8f22e97..91dcbfd288e2 100644 --- a/test/files/neg/t876.check +++ b/test/files/neg/t876.check @@ -1,4 +1,4 @@ -t876.scala:25: error: too many arguments for method apply: (key: AssertionError.A)manager.B in class HashMap +t876.scala:25: error: too many arguments (2) for method apply: (key: AssertionError.A)manager.B in class HashMap assert(manager.map(A2) == List(manager.map(A2, A1))) ^ one error found From 40b42ae71779fb333259674b05f28de45219939d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 11 May 2016 14:32:50 -0700 Subject: [PATCH 0024/2793] SI-8667 Caret at bad arg Pick the first excessive positional arg for the caret. Note that erroring on named args doesn't do the obvious thing in this regard. If `k` was removed from the signature, then `f(k=1, i=2, j=3)` doesn't tell us much about the wrong arg, because naming takes the `k=1` as an assignment, `i` as duplicate naming. No arg is deemed extra, though further inspection of the conflicting args might get there. Since assignment syntax in parens is more|less deprecated (?), no more effort is done here. --- .../tools/nsc/typechecker/ContextErrors.scala | 18 ++++-- .../scala/tools/nsc/typechecker/Typers.scala | 7 +-- test/files/neg/eta-expand-star.check | 2 +- .../neg/macro-invalidusage-badargs.check | 2 +- test/files/neg/multi-array.check | 2 +- test/files/neg/protected-constructors.check | 7 ++- test/files/neg/t1112.check | 2 +- test/files/neg/t1523.check | 2 +- test/files/neg/t6920.check | 2 +- test/files/neg/t7157.check | 24 ++++---- test/files/neg/t8006.check | 2 +- test/files/neg/t8035-no-adapted-args.check | 2 +- test/files/neg/t8667.check | 56 +++++++++++-------- test/files/neg/t8667.scala | 4 +- test/files/neg/t876.check | 2 +- 15 files changed, 79 insertions(+), 55 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index e1055144f8c7..6b30d3834c1f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -538,7 +538,14 @@ trait ContextErrors { def NamedAndDefaultArgumentsNotSupportedForMacros(tree: Tree, fun: Tree) = NormalTypeError(tree, "macro applications do not support named and/or default arguments") - def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree, expected: Int, supplied: Int, unknowns: List[Name]) = { + def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree, formals: List[Type], args: List[Tree], namelessArgs: List[Tree], argPos: Array[Int]) = { + val expected = formals.size + val supplied = args.size + // pick a caret. For f(k=1,i=2,j=3), argPos[0,-1,1] b/c `k=1` taken as arg0 + val excessive = { + val i = argPos.indexWhere(_ >= expected) + if (i < 0) tree else args(i min (supplied - 1)) + } val msg = { val badappl = { val excess = supplied - expected @@ -548,13 +555,16 @@ trait ContextErrors { else if (excess < 3 && expected <= 5) s"too many arguments ($supplied) for $target" else if (expected > 10) s"$supplied arguments but expected $expected for $target" else { - val oneOf = + val more = if (excess == 1) "one more argument" else if (excess > 0) s"$excess more arguments" else "too many arguments" - s"$oneOf than can be applied to $target" + s"$more than can be applied to $target" } } + val unknowns = (namelessArgs zip args) collect { + case (_: Assign, AssignOrNamedArg(Ident(name), _)) => name + } val suppl = unknowns.size match { case 0 => "" @@ -563,7 +573,7 @@ trait ContextErrors { } s"${badappl}${suppl}" } - NormalTypeError(tree, msg) + NormalTypeError(excessive, msg) } // can it still happen? see test case neg/overloaded-unapply.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1d24d8c232c0..286de993dbcc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3331,11 +3331,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper duplErrorTree(WrongNumberOfArgsError(tree, fun)) } else if (lencmp > 0) { tryTupleApply orElse duplErrorTree { - val (namelessArgs, _) = removeNames(Typer.this)(args, params) - val wrongs = (namelessArgs zip args) collect { - case (_: Assign, AssignOrNamedArg(Ident(name), _)) => name - } - TooManyArgsNamesDefaultsError(tree, fun, expected = formals.size, supplied = args.size, wrongs) + val (namelessArgs, argPos) = removeNames(Typer.this)(args, params) + TooManyArgsNamesDefaultsError(tree, fun, formals, args, namelessArgs, argPos) } } else if (lencmp == 0) { // we don't need defaults. names were used, so this application is transformed diff --git a/test/files/neg/eta-expand-star.check b/test/files/neg/eta-expand-star.check index f25e0a41ffad..eba17210148e 100644 --- a/test/files/neg/eta-expand-star.check +++ b/test/files/neg/eta-expand-star.check @@ -1,4 +1,4 @@ eta-expand-star.scala:6: error: too many arguments (2) for method apply: (v1: Seq[T])Unit in trait Function1 g(1, 2) - ^ + ^ one error found diff --git a/test/files/neg/macro-invalidusage-badargs.check b/test/files/neg/macro-invalidusage-badargs.check index 2f5f6d106423..ee549c45cbeb 100644 --- a/test/files/neg/macro-invalidusage-badargs.check +++ b/test/files/neg/macro-invalidusage-badargs.check @@ -15,5 +15,5 @@ Unspecified value parameter x. ^ Macros_Test_2.scala:9: error: too many arguments (2) for macro method foo: (x: Int)Int foo(4, 2) - ^ + ^ 5 errors found diff --git a/test/files/neg/multi-array.check b/test/files/neg/multi-array.check index a71f9b864af4..06ffdc9fbc33 100644 --- a/test/files/neg/multi-array.check +++ b/test/files/neg/multi-array.check @@ -1,4 +1,4 @@ multi-array.scala:7: error: too many arguments (2) for constructor Array: (_length: Int)Array[T] val a: Array[Int] = new Array(10, 10) - ^ + ^ one error found diff --git a/test/files/neg/protected-constructors.check b/test/files/neg/protected-constructors.check index 5c880cb67206..0279f5815d2e 100644 --- a/test/files/neg/protected-constructors.check +++ b/test/files/neg/protected-constructors.check @@ -1,6 +1,6 @@ protected-constructors.scala:17: error: no arguments allowed for nullary constructor Foo1: ()dingus.Foo1 val foo1 = new Foo1("abc") - ^ + ^ protected-constructors.scala:18: error: constructor Foo2 in class Foo2 cannot be accessed in object P Access to protected constructor Foo2 not permitted because enclosing object P in package hungus is not a subclass of @@ -19,4 +19,7 @@ protected-constructors.scala:15: error: class Foo3 in object Ding cannot be acce object Ding in package dingus where target is defined class Bar3 extends Ding.Foo3("abc") ^ -four errors found +protected-constructors.scala:15: error: no arguments allowed for nullary constructor Object: ()Object + class Bar3 extends Ding.Foo3("abc") + ^ +5 errors found diff --git a/test/files/neg/t1112.check b/test/files/neg/t1112.check index 9c1254d17655..e6058bf176f8 100644 --- a/test/files/neg/t1112.check +++ b/test/files/neg/t1112.check @@ -1,4 +1,4 @@ t1112.scala:12: error: too many arguments (2) for method call: (p: Int)(f: => Test.this.Type1)Unit call(0,() => System.out.println("here we are")) - ^ + ^ one error found diff --git a/test/files/neg/t1523.check b/test/files/neg/t1523.check index 656845a4578d..273d0f8cf77a 100644 --- a/test/files/neg/t1523.check +++ b/test/files/neg/t1523.check @@ -1,4 +1,4 @@ t1523.scala:4: error: 25 more arguments than can be applied to method bug: (x: Any)Any def go() = bug("a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a") - ^ + ^ one error found diff --git a/test/files/neg/t6920.check b/test/files/neg/t6920.check index 4e33aca3bc9a..d10abff03cc0 100644 --- a/test/files/neg/t6920.check +++ b/test/files/neg/t6920.check @@ -2,5 +2,5 @@ t6920.scala:9: error: too many arguments (2) for method applyDynamicNamed: (valu error after rewriting to CompilerError.this.test.applyDynamicNamed("crushTheCompiler")(scala.Tuple2("a", 1), scala.Tuple2("b", 2)) possible cause: maybe a wrong Dynamic method signature? test.crushTheCompiler(a = 1, b = 2) - ^ + ^ one error found diff --git a/test/files/neg/t7157.check b/test/files/neg/t7157.check index a043c5d40370..0b813949469b 100644 --- a/test/files/neg/t7157.check +++ b/test/files/neg/t7157.check @@ -1,22 +1,22 @@ Test_2.scala:5: error: no arguments allowed for nullary macro method m1_0_0: ()Unit m1_0_0(1) - ^ + ^ Test_2.scala:6: error: no arguments allowed for nullary macro method m1_0_0: ()Unit m1_0_0(1, 2) - ^ + ^ Test_2.scala:7: error: no arguments allowed for nullary macro method m1_0_0: ()Unit m1_0_0(1, 2, 3) - ^ + ^ Test_2.scala:9: error: not enough arguments for macro method m1_1_1: (x: Int)Unit. Unspecified value parameter x. m1_1_1() ^ Test_2.scala:11: error: too many arguments (2) for macro method m1_1_1: (x: Int)Unit m1_1_1(1, 2) - ^ + ^ Test_2.scala:12: error: too many arguments (3) for macro method m1_1_1: (x: Int)Unit m1_1_1(1, 2, 3) - ^ + ^ Test_2.scala:14: error: not enough arguments for macro method m1_2_2: (x: Int, y: Int)Unit. Unspecified value parameters x, y. m1_2_2() @@ -27,7 +27,7 @@ Unspecified value parameter y. ^ Test_2.scala:17: error: too many arguments (3) for macro method m1_2_2: (x: Int, y: Int)Unit m1_2_2(1, 2, 3) - ^ + ^ Test_2.scala:24: error: not enough arguments for macro method m1_1_inf: (x: Int, y: Int*)Unit. Unspecified value parameters x, y. m1_1_inf() @@ -42,23 +42,23 @@ Unspecified value parameters y, z. ^ Test_2.scala:35: error: no arguments allowed for nullary macro method m2_0_0: ()Unit m2_0_0()(1) - ^ + ^ Test_2.scala:36: error: no arguments allowed for nullary macro method m2_0_0: ()Unit m2_0_0()(1, 2) - ^ + ^ Test_2.scala:37: error: no arguments allowed for nullary macro method m2_0_0: ()Unit m2_0_0()(1, 2, 3) - ^ + ^ Test_2.scala:39: error: not enough arguments for macro method m2_1_1: (x: Int)Unit. Unspecified value parameter x. m2_1_1()() ^ Test_2.scala:41: error: too many arguments (2) for macro method m2_1_1: (x: Int)Unit m2_1_1()(1, 2) - ^ + ^ Test_2.scala:42: error: too many arguments (3) for macro method m2_1_1: (x: Int)Unit m2_1_1()(1, 2, 3) - ^ + ^ Test_2.scala:44: error: not enough arguments for macro method m2_2_2: (x: Int, y: Int)Unit. Unspecified value parameters x, y. m2_2_2()() @@ -69,7 +69,7 @@ Unspecified value parameter y. ^ Test_2.scala:47: error: too many arguments (3) for macro method m2_2_2: (x: Int, y: Int)Unit m2_2_2()(1, 2, 3) - ^ + ^ Test_2.scala:54: error: not enough arguments for macro method m2_1_inf: (x: Int, y: Int*)Unit. Unspecified value parameters x, y. m2_1_inf()() diff --git a/test/files/neg/t8006.check b/test/files/neg/t8006.check index 4e130670e4d4..6152d0fba364 100644 --- a/test/files/neg/t8006.check +++ b/test/files/neg/t8006.check @@ -2,5 +2,5 @@ t8006.scala:3: error: too many arguments (2) for method applyDynamicNamed: (valu error after rewriting to X.this.d.applyDynamicNamed("meth")(scala.Tuple2("value1", 10), scala.Tuple2("value2", 100)) possible cause: maybe a wrong Dynamic method signature? d.meth(value1 = 10, value2 = 100) // two arguments here, but only one is allowed - ^ + ^ one error found diff --git a/test/files/neg/t8035-no-adapted-args.check b/test/files/neg/t8035-no-adapted-args.check index 4b3cb39c769f..0115dddc91a3 100644 --- a/test/files/neg/t8035-no-adapted-args.check +++ b/test/files/neg/t8035-no-adapted-args.check @@ -6,7 +6,7 @@ t8035-no-adapted-args.scala:4: warning: No automatic adaptation here: use explic ^ t8035-no-adapted-args.scala:4: error: too many arguments (3) for method f: (x: (Int, Int, Int))Int f(1, 2, 3) - ^ + ^ t8035-no-adapted-args.scala:5: warning: No automatic adaptation here: use explicit parentheses. signature: Test.f[T](x: T): Int given arguments: diff --git a/test/files/neg/t8667.check b/test/files/neg/t8667.check index 4c3bcb78c829..82451ee5d6ad 100644 --- a/test/files/neg/t8667.check +++ b/test/files/neg/t8667.check @@ -1,7 +1,7 @@ t8667.scala:6: error: too many arguments (3) for constructor C: (a: Int, b: Int)C Note that 'c' is not a parameter name of the invoked method. def c2 = new C(a = 42, b = 17, c = 5) - ^ + ^ t8667.scala:7: error: unknown parameter name: c def c3 = new C(b = 42, a = 17, c = 5) ^ @@ -25,9 +25,13 @@ t8667.scala:10: error: too many arguments (3) for constructor C: (a: Int, b: Int Note that 'c' is not a parameter name of the invoked method. def c6 = new C(a = 42, c = 17, b = 5) ^ +t8667.scala:11: error: parameter 'a' is already specified at parameter position 1 +Note that 'c' is not a parameter name of the invoked method. + def c7 = new C(c = 42, a = 17, b = 5) + ^ t8667.scala:11: error: too many arguments (3) for constructor C: (a: Int, b: Int)C Note that 'c' is not a parameter name of the invoked method. - def c7 = new C(42, 17, c = 5) + def c7 = new C(c = 42, a = 17, b = 5) ^ t8667.scala:12: error: parameter 'b' is already specified at parameter position 2 def c8 = new C(42, 17, b = 5) @@ -42,38 +46,46 @@ Note that 'c' is not a parameter name of the invoked method. t8667.scala:13: error: too many arguments (4) for constructor C: (a: Int, b: Int)C Note that 'c', 'd' are not parameter names of the invoked method. def c9 = new C(a = 42, c = 17, d = 3, b = 5) - ^ + ^ t8667.scala:14: error: too many arguments (4) for constructor C: (a: Int, b: Int)C Note that 'd', 'c' are not parameter names of the invoked method. def c0 = new C(42, 17, d = 3, c = 5) - ^ -t8667.scala:24: error: no arguments allowed for nullary method f0: ()Int + ^ +t8667.scala:25: error: no arguments allowed for nullary method f0: ()Int f0(1) - ^ -t8667.scala:25: error: too many arguments (2) for method f1: (i: Int)Int + ^ +t8667.scala:26: error: too many arguments (2) for method f1: (i: Int)Int f1(1, 2) - ^ -t8667.scala:26: error: too many arguments (3) for method f1: (i: Int)Int + ^ +t8667.scala:27: error: too many arguments (3) for method f1: (i: Int)Int f1(1, 2, 3) - ^ -t8667.scala:27: error: 3 more arguments than can be applied to method f1: (i: Int)Int - f1(1, 2, 3, 4) - ^ + ^ t8667.scala:28: error: 3 more arguments than can be applied to method f1: (i: Int)Int + f1(1, 2, 3, 4) + ^ +t8667.scala:29: error: 3 more arguments than can be applied to method f1: (i: Int)Int Note that 'j' is not a parameter name of the invoked method. f1(1, j = 2, 3, 4) - ^ -t8667.scala:29: error: 3 more arguments than can be applied to method f1: (i: Int)Int + ^ +t8667.scala:30: error: 3 more arguments than can be applied to method f1: (i: Int)Int Note that 'j', 'k' are not parameter names of the invoked method. f1(1, j = 2, k = 3, 4) + ^ +t8667.scala:31: error: parameter 'i' is already specified at parameter position 1 +Note that 'k' is not a parameter name of the invoked method. + f2(k = 1, i = 2, j = 3) + ^ +t8667.scala:31: error: too many arguments (3) for method f2: (i: Int, j: Int)Int +Note that 'k' is not a parameter name of the invoked method. + f2(k = 1, i = 2, j = 3) ^ -t8667.scala:30: error: one more argument than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int +t8667.scala:32: error: one more argument than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int f6(1, 2, 3, 4, 5, 6, 7) - ^ -t8667.scala:31: error: 2 more arguments than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int + ^ +t8667.scala:33: error: 2 more arguments than can be applied to method f6: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int)Int f6(1, 2, 3, 4, 5, 6, 7, 8) - ^ -t8667.scala:32: error: 15 arguments but expected 12 for method f12: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int)Int + ^ +t8667.scala:34: error: 15 arguments but expected 12 for method f12: (i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int)Int f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) - ^ -23 errors found + ^ +26 errors found diff --git a/test/files/neg/t8667.scala b/test/files/neg/t8667.scala index fe17eac84f6c..d55582ca6b8f 100644 --- a/test/files/neg/t8667.scala +++ b/test/files/neg/t8667.scala @@ -8,7 +8,7 @@ trait T { def c4 = new C(b = 42, a = 17, 5) def c5 = new C(a = 42, c = 17) def c6 = new C(a = 42, c = 17, b = 5) - def c7 = new C(42, 17, c = 5) + def c7 = new C(c = 42, a = 17, b = 5) def c8 = new C(42, 17, b = 5) def c9 = new C(a = 42, c = 17, d = 3, b = 5) def c0 = new C(42, 17, d = 3, c = 5) @@ -17,6 +17,7 @@ trait T { trait X { def f0() = 42 def f1(i: Int) = 42 + def f2(i: Int, j: Int) = 42 def f6(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int) = 42 def f12(i: Int, j: Int, k: Int, l: Int, m: Int, n: Int, o: Int, p: Int, q: Int, r: Int, s: Int, t: Int) = 42 @@ -27,6 +28,7 @@ trait X { f1(1, 2, 3, 4) f1(1, j = 2, 3, 4) f1(1, j = 2, k = 3, 4) + f2(k = 1, i = 2, j = 3) f6(1, 2, 3, 4, 5, 6, 7) f6(1, 2, 3, 4, 5, 6, 7, 8) f12(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15) diff --git a/test/files/neg/t876.check b/test/files/neg/t876.check index 91dcbfd288e2..7df2e126a6f0 100644 --- a/test/files/neg/t876.check +++ b/test/files/neg/t876.check @@ -1,4 +1,4 @@ t876.scala:25: error: too many arguments (2) for method apply: (key: AssertionError.A)manager.B in class HashMap assert(manager.map(A2) == List(manager.map(A2, A1))) - ^ + ^ one error found From 45e607d3879623d5163f47c17c717904f6867ef6 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Sat, 14 May 2016 14:12:43 +0100 Subject: [PATCH 0025/2793] SI-9361 fixed assert allowing display of improved error message. --- .../scala/tools/nsc/typechecker/ContextErrors.scala | 5 +++-- test/files/neg/t9361.check | 11 +++++++++++ test/files/neg/t9361.scala | 5 +++++ 3 files changed, 19 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t9361.check create mode 100644 test/files/neg/t9361.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index e190b57017b7..90ccaefe432a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -198,7 +198,7 @@ trait ContextErrors { val foundType: Type = req.dealiasWiden match { case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => val retyped = typed (tree.duplicate.clearType()) - val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) + val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic && !sym.isErroneous) if (foundDecls.isEmpty || (found.typeSymbol eq NoSymbol)) found else { // The members arrive marked private, presumably because there was no @@ -212,7 +212,8 @@ trait ContextErrors { case _ => found } - assert(!foundType.isErroneous && !req.isErroneous, (foundType, req)) + assert(!foundType.isErroneous, s"AdaptTypeError - foundType is Erroneous: $foundType") + assert(!req.isErroneous, s"AdaptTypeError - req is Erroneous: $req") issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(foundType, req))) infer.explainTypes(foundType, req) diff --git a/test/files/neg/t9361.check b/test/files/neg/t9361.check new file mode 100644 index 000000000000..847d137f7d6c --- /dev/null +++ b/test/files/neg/t9361.check @@ -0,0 +1,11 @@ +t9361.scala:4: error: type mismatch; + found : Tc[_$2] where type _$2 + required: Nothing[] + new Foo { def tc = null.asInstanceOf[Tc[_]] } + ^ +t9361.scala:4: error: type mismatch; + found : Foo[Nothing] + required: Foo[Tc]{type T = Nothing} + new Foo { def tc = null.asInstanceOf[Tc[_]] } + ^ +two errors found diff --git a/test/files/neg/t9361.scala b/test/files/neg/t9361.scala new file mode 100644 index 000000000000..b689461e4d56 --- /dev/null +++ b/test/files/neg/t9361.scala @@ -0,0 +1,5 @@ +abstract class Foo[Tc[_]] { def tc: Tc[_] } +object Foo { + def foo[Tc[_]](): Foo[Tc] { type T = Nothing } = + new Foo { def tc = null.asInstanceOf[Tc[_]] } +} From eac1af364e99a6712c5e54e257216027b2ab127e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 16 May 2016 16:04:17 +1000 Subject: [PATCH 0026/2793] Reduce boilerplate in compiler JUnit tests (#5158) Many JUnit tests share a compiler instance between all test cases in a class to reduce overhead. This commit refactors the mechanism to reduce the boilerplate. In the new scheme: - Using the `@ClassRule` hook in JUnit, we create a per-class map for each test class. - Per-class values are registered from the test class itself by calling `cached("someKey", () => mkExpensiveThing)` - At the end of the test, the entries in this map are `close()`-ed (if they implement `Closable`), and are released for garbage collection.) --- test/junit/scala/issues/BytecodeTest.scala | 8 +--- .../scala/issues/OptimizedBytecodeTest.scala | 15 ++---- test/junit/scala/issues/RunTest.scala | 11 ++--- .../tools/nsc/backend/jvm/BTypesTest.scala | 15 ++---- .../nsc/backend/jvm/DefaultMethodTest.scala | 8 +--- .../nsc/backend/jvm/DirectCompileTest.scala | 9 +--- .../nsc/backend/jvm/IndyLambdaTest.scala | 11 +---- .../tools/nsc/backend/jvm/IndySammyTest.scala | 14 ++---- .../nsc/backend/jvm/StringConcatTest.scala | 8 +--- .../jvm/analysis/NullnessAnalyzerTest.scala | 11 +---- .../jvm/analysis/ProdConsAnalyzerTest.scala | 11 +---- .../nsc/backend/jvm/opt/AnalyzerTest.scala | 8 +--- .../nsc/backend/jvm/opt/CallGraphTest.scala | 26 ++++------ .../jvm/opt/ClosureOptimizerTest.scala | 9 +--- .../jvm/opt/EmptyExceptionHandlersTest.scala | 14 +----- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 22 ++++----- .../backend/jvm/opt/InlineWarningTest.scala | 20 +++----- .../jvm/opt/InlinerIllegalAccessTest.scala | 9 +--- .../nsc/backend/jvm/opt/InlinerTest.scala | 32 +++++-------- .../backend/jvm/opt/MethodLevelOptsTest.scala | 9 +--- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 8 +--- .../backend/jvm/opt/UnreachableCodeTest.scala | 24 ++-------- .../jvm/opt/UnusedLocalVariablesTest.scala | 9 +--- .../transform/patmat/PatmatBytecodeTest.scala | 12 +---- .../scala/tools/testing/ClearAfterClass.java | 47 ++++++++++++++++--- 25 files changed, 112 insertions(+), 258 deletions(-) diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala index cf5c7f9ec3ef..a720f2071816 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/issues/BytecodeTest.scala @@ -15,15 +15,9 @@ import scala.tools.asm.tree.ClassNode import scala.tools.partest.ASMConverters._ import scala.tools.testing.ClearAfterClass -object BytecodeTest extends ClearAfterClass.Clearable { - var compiler = newCompiler() - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class BytecodeTest extends ClearAfterClass { - ClearAfterClass.stateToClear = BytecodeTest - val compiler = BytecodeTest.compiler + val compiler = cached("compiler", () => newCompiler()) @Test def t8731(): Unit = { diff --git a/test/junit/scala/issues/OptimizedBytecodeTest.scala b/test/junit/scala/issues/OptimizedBytecodeTest.scala index 1555e8945a3c..c69229ae2295 100644 --- a/test/junit/scala/issues/OptimizedBytecodeTest.scala +++ b/test/junit/scala/issues/OptimizedBytecodeTest.scala @@ -15,17 +15,10 @@ import AsmUtils._ import scala.tools.testing.ClearAfterClass -object OptimizedBytecodeTest extends ClearAfterClass.Clearable { - val args = "-Yopt:l:classpath -Yopt-warnings" - var compiler = newCompiler(extraArgs = args) - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class OptimizedBytecodeTest extends ClearAfterClass { - ClearAfterClass.stateToClear = OptimizedBytecodeTest - - val compiler = OptimizedBytecodeTest.compiler + val args = "-Yopt:l:classpath -Yopt-warnings" + val compiler = cached("compiler", () => newCompiler(extraArgs = args)) @Test def t2171(): Unit = { @@ -127,7 +120,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { |object Warmup { def filter[A](p: Any => Boolean): Any = filter[Any](p) } """.stripMargin val c2 = "class C { def t = warmup.Warmup.filter[Any](x => false) }" - val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = OptimizedBytecodeTest.args) + val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = args) assertInvoke(getSingleMethod(c, "t"), "warmup/Warmup$", "filter") } @@ -268,7 +261,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { |} """.stripMargin - val cls = compileClassesSeparately(List(c1, c2), extraArgs = OptimizedBytecodeTest.args) + val cls = compileClassesSeparately(List(c1, c2), extraArgs = args) val c = cls.find(_.name == "C").get assertSameSummary(getSingleMethod(c, "t"), List( GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW, // module load and null checks not yet eliminated diff --git a/test/junit/scala/issues/RunTest.scala b/test/junit/scala/issues/RunTest.scala index 0605947e630d..148009c9126f 100644 --- a/test/junit/scala/issues/RunTest.scala +++ b/test/junit/scala/issues/RunTest.scala @@ -9,22 +9,17 @@ import scala.reflect.runtime._ import scala.tools.reflect.ToolBox import scala.tools.testing.ClearAfterClass -object RunTest extends ClearAfterClass.Clearable { - var toolBox = universe.runtimeMirror(getClass.getClassLoader).mkToolBox() - override def clear(): Unit = { toolBox = null } - - // definitions for individual tests +object RunTest { class VC(val x: Any) extends AnyVal } @RunWith(classOf[JUnit4]) class RunTest extends ClearAfterClass { - ClearAfterClass.stateToClear = RunTest + val toolBox = cached("toolbox", () => universe.runtimeMirror(getClass.getClassLoader).mkToolBox()) def run[T](code: String): T = { - val tb = RunTest.toolBox - tb.eval(tb.parse(code)).asInstanceOf[T] + toolBox.eval(toolBox.parse(code)).asInstanceOf[T] } @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 8b8e2b36de34..e7bbbb9a4f23 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -10,22 +10,15 @@ import org.junit.Assert._ import scala.tools.nsc.backend.jvm.CodeGenTools._ import scala.tools.testing.ClearAfterClass -object BTypesTest extends ClearAfterClass.Clearable { - var compiler = { +@RunWith(classOf[JUnit4]) +class BTypesTest extends ClearAfterClass { + val compiler = cached("compiler", () => { val comp = newCompiler(extraArgs = "-Yopt:l:none") new comp.Run() // initializes some of the compiler comp.exitingDelambdafy(comp.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler comp.exitingDelambdafy(comp.genBCode.bTypes.initializeCoreBTypes()) comp - } - def clear(): Unit = { compiler = null } -} - -@RunWith(classOf[JUnit4]) -class BTypesTest extends ClearAfterClass { - ClearAfterClass.stateToClear = BTypesTest - - val compiler = BTypesTest.compiler + }) import compiler.genBCode.bTypes._ def classBTFS(sym: compiler.Symbol) = compiler.exitingDelambdafy(classBTypeFromSymbol(sym)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala index 2ce9d213312e..7d4ae866fcaf 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala @@ -10,14 +10,8 @@ import scala.tools.nsc.backend.jvm.CodeGenTools._ import JavaConverters._ import scala.tools.testing.ClearAfterClass -object DefaultMethodTest extends ClearAfterClass.Clearable { - var compiler = newCompiler() - def clear(): Unit = { compiler = null } -} - class DefaultMethodTest extends ClearAfterClass { - ClearAfterClass.stateToClear = DefaultMethodTest - val compiler = DefaultMethodTest.compiler + val compiler = cached("compiler", () => newCompiler()) @Test def defaultMethodsViaGenBCode(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index 0cdc6ead10bd..e984b7551898 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -9,16 +9,9 @@ import scala.tools.asm.Opcodes._ import scala.tools.partest.ASMConverters._ import scala.tools.testing.ClearAfterClass -object DirectCompileTest extends ClearAfterClass.Clearable { - var compiler = newCompiler(extraArgs = "-Yopt:l:method") - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class DirectCompileTest extends ClearAfterClass { - ClearAfterClass.stateToClear = DirectCompileTest - - val compiler = DirectCompileTest.compiler + val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:method")) @Test def testCompile(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala index d29f6b0a13c4..b906942ffa1a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala @@ -10,17 +10,8 @@ import scala.tools.nsc.backend.jvm.CodeGenTools._ import scala.tools.testing.ClearAfterClass import scala.collection.JavaConverters._ -object IndyLambdaTest extends ClearAfterClass.Clearable { - var compiler = newCompiler() - - def clear(): Unit = { - compiler = null - } -} - class IndyLambdaTest extends ClearAfterClass { - ClearAfterClass.stateToClear = IndyLambdaTest - val compiler = IndyLambdaTest.compiler + val compiler = cached("compiler", () => newCompiler()) @Test def boxingBridgeMethodUsedSelectively(): Unit = { def implMethodDescriptorFor(code: String): String = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala index b9e45a7dc945..5c2ab6a2c785 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala @@ -15,21 +15,13 @@ import ASMConverters._ import scala.tools.testing.ClearAfterClass -object IndySammyTest extends ClearAfterClass.Clearable { - var _compiler = newCompiler() - - def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = - compileClasses(_compiler)(scalaCode, javaCode, allowMessage) - - def clear(): Unit = { _compiler = null } -} @RunWith(classOf[JUnit4]) class IndySammyTest extends ClearAfterClass { - ClearAfterClass.stateToClear = IndySammyTest - import IndySammyTest._ - val compiler = _compiler + val compiler = cached("compiler", () => newCompiler()) + def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = + compileClasses(compiler)(scalaCode, javaCode, allowMessage) def funClassName(from: String, to: String) = s"Fun$from$to" def classPrologue(from: String, to: String) = diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala index 2a9b8f719810..fc0c96e71a07 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala @@ -14,15 +14,9 @@ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass -object StringConcatTest extends ClearAfterClass.Clearable { - var compiler = newCompiler() - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class StringConcatTest extends ClearAfterClass { - ClearAfterClass.stateToClear = StringConcatTest - val compiler = StringConcatTest.compiler + val compiler = cached("compiler", () => newCompiler()) @Test def appendOverloadNoBoxing(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index 571d84c8726f..075f42d18f11 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -19,18 +19,9 @@ import AsmUtils._ import scala.collection.JavaConverters._ -object NullnessAnalyzerTest extends ClearAfterClass.Clearable { - var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none") - - def clear(): Unit = { - noOptCompiler = null - } -} - @RunWith(classOf[JUnit4]) class NullnessAnalyzerTest extends ClearAfterClass { - ClearAfterClass.stateToClear = NullnessAnalyzerTest - val noOptCompiler = NullnessAnalyzerTest.noOptCompiler + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) import noOptCompiler.genBCode.bTypes.backendUtils._ def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(noOptCompiler.genBCode.bTypes)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala index d54b8ac56361..8d4bc19ec359 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala @@ -14,18 +14,9 @@ import scala.tools.testing.ClearAfterClass import CodeGenTools._ import AsmUtils._ -object ProdConsAnalyzerTest extends ClearAfterClass.Clearable { - var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none") - - def clear(): Unit = { - noOptCompiler = null - } -} - @RunWith(classOf[JUnit4]) class ProdConsAnalyzerTest extends ClearAfterClass { - ClearAfterClass.stateToClear = ProdConsAnalyzerTest - val noOptCompiler = ProdConsAnalyzerTest.noOptCompiler + val noOptCompiler =cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) import noOptCompiler.genBCode.bTypes.backendUtils._ def prodToString(producer: AbstractInsnNode) = producer match { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala index 930f7f2f1099..09675870f0ed 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala @@ -21,15 +21,9 @@ import BytecodeUtils._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object AnalyzerTest extends ClearAfterClass.Clearable { - var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none") - def clear(): Unit = { noOptCompiler = null } -} - @RunWith(classOf[JUnit4]) class AnalyzerTest extends ClearAfterClass { - ClearAfterClass.stateToClear = AnalyzerTest - val noOptCompiler = AnalyzerTest.noOptCompiler + val noOptCompiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) @Test def aliasingOfPrimitives(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 1d30e42e3c0a..9a27c42cac86 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -24,29 +24,23 @@ import BackendReporting._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object CallGraphTest extends ClearAfterClass.Clearable { - var compiler = newCompiler(extraArgs = "-Yopt:inline-global -Yopt-warnings") - def clear(): Unit = { compiler = null } - - // allows inspecting the caches after a compilation run - val notPerRun: List[Clearable] = List( - compiler.genBCode.bTypes.classBTypeFromInternalName, - compiler.genBCode.bTypes.byteCodeRepository.compilingClasses, - compiler.genBCode.bTypes.byteCodeRepository.parsedClasses, - compiler.genBCode.bTypes.callGraph.callsites) - notPerRun foreach compiler.perRunCaches.unrecordCache -} - @RunWith(classOf[JUnit4]) class CallGraphTest extends ClearAfterClass { - ClearAfterClass.stateToClear = CallGraphTest + val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:inline-global -Yopt-warnings") + ) + import compiler.genBCode.bTypes + val notPerRun: List[Clearable] = List( + bTypes.classBTypeFromInternalName, + bTypes.byteCodeRepository.compilingClasses, + bTypes.byteCodeRepository.parsedClasses, + bTypes.callGraph.callsites) + notPerRun foreach compiler.perRunCaches.unrecordCache - val compiler = CallGraphTest.compiler import compiler.genBCode.bTypes._ import callGraph._ def compile(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { - CallGraphTest.notPerRun.foreach(_.clear()) + notPerRun.foreach(_.clear()) compileClasses(compiler)(code, allowMessage = allowMessage).map(c => byteCodeRepository.classNode(c.name).get) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala index 12bfba71a804..e8530af4e0e9 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala @@ -27,16 +27,9 @@ import BackendReporting._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object ClosureOptimizerTest extends ClearAfterClass.Clearable { - var compiler = newCompiler(extraArgs = "-Yopt:l:classpath -Yopt-warnings:_") - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class ClosureOptimizerTest extends ClearAfterClass { - ClearAfterClass.stateToClear = ClosureOptimizerTest - - val compiler = ClosureOptimizerTest.compiler + val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:classpath -Yopt-warnings:_")) @Test def nothingTypedClosureBody(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala index 22aed4207fc2..6d566c722f4f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala @@ -13,21 +13,11 @@ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass -object EmptyExceptionHandlersTest extends ClearAfterClass.Clearable { - var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none") - var dceCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code") - def clear(): Unit = { - noOptCompiler = null - dceCompiler = null - } -} @RunWith(classOf[JUnit4]) class EmptyExceptionHandlersTest extends ClearAfterClass { - ClearAfterClass.stateToClear = EmptyExceptionHandlersTest - - val noOptCompiler = EmptyExceptionHandlersTest.noOptCompiler - val dceCompiler = EmptyExceptionHandlersTest.dceCompiler + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) + val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) val exceptionDescriptor = "java/lang/Exception" diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index 23386bb5aeef..5cb1aab4a90f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -18,25 +18,19 @@ import BackendReporting._ import scala.collection.JavaConverters._ -object InlineInfoTest extends ClearAfterClass.Clearable { - var compiler = newCompiler(extraArgs = "-Yopt:l:classpath") - def clear(): Unit = { compiler = null } - - def notPerRun: List[Clearable] = List( - compiler.genBCode.bTypes.classBTypeFromInternalName, - compiler.genBCode.bTypes.byteCodeRepository.compilingClasses, - compiler.genBCode.bTypes.byteCodeRepository.parsedClasses) - notPerRun foreach compiler.perRunCaches.unrecordCache -} - @RunWith(classOf[JUnit4]) class InlineInfoTest extends ClearAfterClass { - ClearAfterClass.stateToClear = InlineInfoTest + val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:classpath")) - val compiler = InlineInfoTest.compiler + import compiler.genBCode.bTypes + def notPerRun: List[Clearable] = List( + bTypes.classBTypeFromInternalName, + bTypes.byteCodeRepository.compilingClasses, + bTypes.byteCodeRepository.parsedClasses) + notPerRun foreach compiler.perRunCaches.unrecordCache def compile(code: String) = { - InlineInfoTest.notPerRun.foreach(_.clear()) + notPerRun.foreach(_.clear()) compileClasses(compiler)(code) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 1597c75a7e22..6dd0a33289a0 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -27,20 +27,12 @@ import BackendReporting._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object InlineWarningTest extends ClearAfterClass.Clearable { - val argsNoWarn = "-Yopt:l:classpath" - val args = argsNoWarn + " -Yopt-warnings" - var compiler = newCompiler(extraArgs = args) - var compilerWarnAll = newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:_") - def clear(): Unit = { compiler = null; compilerWarnAll = null } -} - @RunWith(classOf[JUnit4]) class InlineWarningTest extends ClearAfterClass { - ClearAfterClass.stateToClear = InlineWarningTest - - val compiler = InlineWarningTest.compiler - val compilerWarnAll = InlineWarningTest.compilerWarnAll + val argsNoWarn = "-Yopt:l:classpath" + val args = argsNoWarn + " -Yopt-warnings" + val compiler = cached("compiler", () => newCompiler(extraArgs = args)) + val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:_")) def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false, compiler: Global = compiler): List[ClassNode] = { compileClasses(compiler)(scalaCode, javaCode, allowMessage) @@ -115,10 +107,10 @@ class InlineWarningTest extends ClearAfterClass { assert(c == 1, c) // no warnings here - compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java"))) + compileClasses(newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java"))) c = 0 - compileClasses(newCompiler(extraArgs = InlineWarningTest.argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) + compileClasses(newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) assert(c == 2, c) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index 6460158e7152..ab1aef47cde3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -19,16 +19,9 @@ import AsmUtils._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object InlinerIllegalAccessTest extends ClearAfterClass.Clearable { - var compiler = newCompiler(extraArgs = "-Yopt:l:none") - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class InlinerIllegalAccessTest extends ClearAfterClass { - ClearAfterClass.stateToClear = InlinerIllegalAccessTest - - val compiler = InlinerIllegalAccessTest.compiler + val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) import compiler.genBCode.bTypes._ def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index e2a495fb2b1c..b7641b5ec717 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -22,35 +22,27 @@ import BackendReporting._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object InlinerTest extends ClearAfterClass.Clearable { +@RunWith(classOf[JUnit4]) +class InlinerTest extends ClearAfterClass { val args = "-Yopt:l:classpath -Yopt-warnings" - var compiler = newCompiler(extraArgs = args) - var inlineOnlyCompiler = newCompiler(extraArgs = "-Yopt:inline-project") - + val compiler = cached("compiler", () => newCompiler(extraArgs = args)) + val inlineOnlyCompiler = cached("inlineOnlyCompiler", () => newCompiler(extraArgs = "-Yopt:inline-project")) + import compiler.genBCode.bTypes // allows inspecting the caches after a compilation run def notPerRun: List[Clearable] = List( - compiler.genBCode.bTypes.classBTypeFromInternalName, - compiler.genBCode.bTypes.byteCodeRepository.compilingClasses, - compiler.genBCode.bTypes.byteCodeRepository.parsedClasses, - compiler.genBCode.bTypes.callGraph.callsites) + bTypes.classBTypeFromInternalName, + bTypes.byteCodeRepository.compilingClasses, + bTypes.byteCodeRepository.parsedClasses, + bTypes.callGraph.callsites) notPerRun foreach compiler.perRunCaches.unrecordCache - def clear(): Unit = { compiler = null; inlineOnlyCompiler = null } -} - -@RunWith(classOf[JUnit4]) -class InlinerTest extends ClearAfterClass { - ClearAfterClass.stateToClear = InlinerTest - - val compiler = InlinerTest.compiler import compiler.genBCode.bTypes._ import compiler.genBCode.bTypes.backendUtils._ import inlinerHeuristics._ - val inlineOnlyCompiler = InlinerTest.inlineOnlyCompiler def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { - InlinerTest.notPerRun.foreach(_.clear()) + notPerRun.foreach(_.clear()) compileClasses(compiler)(scalaCode, javaCode, allowMessage) // Use the class nodes stored in the byteCodeRepository. The ones returned by compileClasses are not the same, // these are created new from the classfile byte array. They are completely separate instances which cannot @@ -837,7 +829,7 @@ class InlinerTest extends ClearAfterClass { var c = 0 - compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-warnings:_"))( + compileClasses(newCompiler(extraArgs = args + " -Yopt-warnings:_"))( scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn}) @@ -899,7 +891,7 @@ class InlinerTest extends ClearAfterClass { | def t = System.arraycopy(null, 0, null, 0, 0) |} """.stripMargin - val List(c) = compileClasses(newCompiler(extraArgs = InlinerTest.args + " -Yopt-inline-heuristics:everything"))(code) + val List(c) = compileClasses(newCompiler(extraArgs = args + " -Yopt-inline-heuristics:everything"))(code) assertInvoke(getSingleMethod(c, "t"), "java/lang/System", "arraycopy") } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index dd7fbd9977da..003b2d48803f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -18,16 +18,9 @@ import ASMConverters._ import scala.tools.testing.ClearAfterClass import scala.collection.JavaConverters._ -object MethodLevelOptsTest extends ClearAfterClass.Clearable { - var methodOptCompiler = newCompiler(extraArgs = "-Yopt:l:method") - def clear(): Unit = { methodOptCompiler = null } -} - @RunWith(classOf[JUnit4]) class MethodLevelOptsTest extends ClearAfterClass { - ClearAfterClass.stateToClear = MethodLevelOptsTest - - val methodOptCompiler = MethodLevelOptsTest.methodOptCompiler + val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method")) def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index 8dd23ec3ce26..6cb3fd3bba49 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -16,15 +16,9 @@ import ASMConverters._ import scala.collection.JavaConverters._ import scala.tools.testing.ClearAfterClass -object ScalaInlineInfoTest extends ClearAfterClass.Clearable { - var compiler = newCompiler(extraArgs = "-Yopt:l:none") - def clear(): Unit = { compiler = null } -} - @RunWith(classOf[JUnit4]) class ScalaInlineInfoTest extends ClearAfterClass { - ClearAfterClass.stateToClear = ScalaInlineInfoTest - val compiler = ScalaInlineInfoTest.compiler + val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) def inlineInfo(c: ClassNode): InlineInfo = c.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).head diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index 0021a1784d0e..46f06d1d3977 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -15,27 +15,13 @@ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass -object UnreachableCodeTest extends ClearAfterClass.Clearable { - // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks, - // see comment in BCodeBodyBuilder - var methodOptCompiler = newCompiler(extraArgs = "-Yopt:l:method") - var dceCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code") - var noOptCompiler = newCompiler(extraArgs = "-Yopt:l:none") - - def clear(): Unit = { - methodOptCompiler = null - dceCompiler = null - noOptCompiler = null - } -} - @RunWith(classOf[JUnit4]) class UnreachableCodeTest extends ClearAfterClass { - ClearAfterClass.stateToClear = UnreachableCodeTest - - val methodOptCompiler = UnreachableCodeTest.methodOptCompiler - val dceCompiler = UnreachableCodeTest.dceCompiler - val noOptCompiler = UnreachableCodeTest.noOptCompiler + // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks, + // see comment in BCodeBodyBuilder + val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method")) + val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) def assertEliminateDead(code: (Instruction, Boolean)*): Unit = { val method = genMethod()(code.map(_._1): _*) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala index 4f71df182285..77e73e64b99c 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala @@ -14,16 +14,9 @@ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass -object UnusedLocalVariablesTest extends ClearAfterClass.Clearable { - var dceCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code") - def clear(): Unit = { dceCompiler = null } -} - @RunWith(classOf[JUnit4]) class UnusedLocalVariablesTest extends ClearAfterClass { - ClearAfterClass.stateToClear = UnusedLocalVariablesTest - - val dceCompiler = UnusedLocalVariablesTest.dceCompiler + val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) @Test def removeUnusedVar(): Unit = { diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala index ac558e2e213f..aa83520efb2b 100644 --- a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala @@ -16,18 +16,10 @@ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass -object PatmatBytecodeTest extends ClearAfterClass.Clearable { - var compiler = newCompiler() - var optCompiler = newCompiler(extraArgs = "-Yopt:l:project") - def clear(): Unit = { compiler = null; optCompiler = null } -} - @RunWith(classOf[JUnit4]) class PatmatBytecodeTest extends ClearAfterClass { - ClearAfterClass.stateToClear = PatmatBytecodeTest - - val compiler = PatmatBytecodeTest.compiler - val optCompiler = PatmatBytecodeTest.optCompiler + val compiler = cached("compiler", () => newCompiler()) + val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-Yopt:l:project")) @Test def t6956(): Unit = { diff --git a/test/junit/scala/tools/testing/ClearAfterClass.java b/test/junit/scala/tools/testing/ClearAfterClass.java index 232d459c4e75..95e170ec138c 100644 --- a/test/junit/scala/tools/testing/ClearAfterClass.java +++ b/test/junit/scala/tools/testing/ClearAfterClass.java @@ -1,20 +1,53 @@ package scala.tools.testing; -import org.junit.AfterClass; +import org.junit.ClassRule; +import org.junit.rules.TestRule; +import org.junit.runners.model.Statement; + +import java.io.Closeable; +import java.io.IOException; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; /** - * Extend this class to use JUnit's @AfterClass. This annotation only works on static methods, + * Extend this class to use JUnit's @ClassRule. This annotation only works on static methods, * which cannot be written in Scala. * * Example: {@link scala.tools.nsc.backend.jvm.opt.InlinerTest} */ public class ClearAfterClass { - public static interface Clearable { - void clear(); + private static Map, Map> cache = new ConcurrentHashMap<>(); + + @ClassRule + public static TestRule clearClassCache() { + return (statement, desc) -> new Statement() { + @Override + public void evaluate() throws Throwable { + ConcurrentHashMap perClassCache = new ConcurrentHashMap<>(); + cache.put(desc.getTestClass(), perClassCache); + try { + statement.evaluate(); + } finally { + perClassCache.values().forEach(ClearAfterClass::closeIfClosable); + cache.remove(desc.getTestClass()); + } + } + }; } - public static Clearable stateToClear; + private static void closeIfClosable(Object o) { + if (o instanceof Closeable) { + try { + ((Closeable) o).close(); + } catch (IOException e) { + // ignore + } + } + } + + public T cached(String key, scala.Function0 t) { + Map perClassCache = cache.get(getClass()); + return (T) perClassCache.computeIfAbsent(key, s -> t.apply()); + } - @AfterClass - public static void clearState() { stateToClear.clear(); } } From 83864872d7b642520fdf522fd300151d8bc22da4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 10 May 2016 15:07:45 +1000 Subject: [PATCH 0027/2793] Eliminate major sources of daily noise in SBT build. - Intercept incorrect "binary conflict" warning issued by SBT. Fixes https://github.com/scala/scala-dev/issues/100 - Bump to a new version of pantsbuild/jarjar to fix an incompatibility with Java 8 parameter names in class files, which we run into on the 2.12.x branch. See: https://github.com/pantsbuild/jarjar/pull/19 - Disable info level logging for dependency resolve/download. --- build.sbt | 10 +++++++--- project/Quiet.scala | 33 +++++++++++++++++++++++++++++++++ project/plugins.sbt | 2 +- 3 files changed, 41 insertions(+), 4 deletions(-) create mode 100644 project/Quiet.scala diff --git a/build.sbt b/build.sbt index 5b9036deb428..e2cf40dbbc5c 100644 --- a/build.sbt +++ b/build.sbt @@ -210,7 +210,9 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it // directly to stdout - outputStrategy in run := Some(StdoutOutput) + outputStrategy in run := Some(StdoutOutput), + Quiet.silenceScalaBinaryVersionWarning, + Quiet.silenceIvyUpdateInfoLogging ) /** Extra post-processing for the published POM files. These are needed to create POMs that @@ -475,6 +477,7 @@ lazy val replJlineEmbedded = Project("repl-jline-embedded", file(".") / "target" }), publishArtifact := false, connectInput in run := true + ) .dependsOn(replJline) @@ -677,8 +680,9 @@ lazy val root = (project in file(".")) publishArtifact := false, publish := {}, publishLocal := {}, - commands ++= ScriptCommands.all - ) + commands ++= ScriptCommands.all, + Quiet.silenceIvyUpdateInfoLogging +) .aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineEmbedded, scaladoc, scalap, actors, partestExtras, junit, libraryAll, scalaDist).settings( sources in Compile := Seq.empty, diff --git a/project/Quiet.scala b/project/Quiet.scala new file mode 100644 index 000000000000..de30ebe6abac --- /dev/null +++ b/project/Quiet.scala @@ -0,0 +1,33 @@ +import sbt._ +import Keys._ + +object Quiet { + // Workaround SBT issue described: + // + // https://github.com/scala/scala-dev/issues/100 + def silenceScalaBinaryVersionWarning = ivyConfiguration := { + ivyConfiguration.value match { + case c: InlineIvyConfiguration => + val delegate = c.log + val logger = new Logger { + override def trace(t: => Throwable): Unit = delegate.trace(t) + override def log(level: sbt.Level.Value, message: => String): Unit = { + level match { + case sbt.Level.Warn => + val message0 = message + val newLevel = if (message.contains("differs from Scala binary version in project")) + delegate.log(sbt.Level.Debug, message) + else + delegate.log(level, message) + case _ => delegate.log(level, message) + } + } + override def success(message: => String): Unit = delegate.success(message) + } + new InlineIvyConfiguration(c.paths, c.resolvers, c.otherResolvers, c.moduleConfigurations, c.localOnly, c.lock, c.checksums, c.resolutionCacheDir, c.updateOptions, logger) + case x => x + } + } + + def silenceIvyUpdateInfoLogging = logLevel in update := Level.Warn +} diff --git a/project/plugins.sbt b/project/plugins.sbt index 2d91c2306b98..46203565b483 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" -libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.0" +libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.3" libraryDependencies += "biz.aQute" % "bndlib" % "1.50.0" From 71a5bdaf57f90b46a6bed8af28deebb1174318c7 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 18 Mar 2016 14:55:23 -0700 Subject: [PATCH 0028/2793] [backport] sbt build targets build/ It avoids confusion with existing test/partest scripts that test the compiler in build/, while sbt it targeting build-sbt/. --- build.sbt | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/build.sbt b/build.sbt index e2cf40dbbc5c..82d23fed903a 100644 --- a/build.sbt +++ b/build.sbt @@ -4,8 +4,8 @@ * What you see below is very much work-in-progress. The following features are implemented: * - Compiling all classses for the compiler and library ("compile" in the respective subprojects) * - Running JUnit tests ("test") and partest ("test/it:test") - * - Creating build-sbt/quick with all compiled classes and launcher scripts ("dist/mkQuick") - * - Creating build-sbt/pack with all JARs and launcher scripts ("dist/mkPack") + * - Creating build/quick with all compiled classes and launcher scripts ("dist/mkQuick") + * - Creating build/pack with all JARs and launcher scripts ("dist/mkPack") * - Building all scaladoc sets ("doc") * - Publishing ("publishDists" and standard sbt tasks like "publish" and "publishLocal") * @@ -771,8 +771,8 @@ def configureAsForkOfJavaProject(project: Project): Project = { lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") -lazy val mkQuick = taskKey[Unit]("Generate a full build, including scripts, in build-sbt/quick") -lazy val mkPack = taskKey[Unit]("Generate a full build, including scripts, in build-sbt/pack") +lazy val mkQuick = taskKey[Unit]("Generate a full build, including scripts, in build/quick") +lazy val mkPack = taskKey[Unit]("Generate a full build, including scripts, in build/pack") // Defining these settings is somewhat redundant as we also redefine settings that depend on them. // However, IntelliJ's project import works better when these are set correctly. @@ -831,7 +831,7 @@ def generateServiceProviderResources(services: (String, String)*): Setting[_] = } }.taskValue -buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build-sbt" +buildDirectory in ThisBuild := (baseDirectory in ThisBuild).value / "build" // Add tab completion to partest commands += Command("partest")(_ => PartestUtil.partestParser((baseDirectory in ThisBuild).value, (baseDirectory in ThisBuild).value / "test")) { (state, parsed) => From 139f9a0f97a24c8f8b2db5947b6cb3d68d5682fb Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Tue, 17 May 2016 12:38:28 +0900 Subject: [PATCH 0029/2793] Make Range.Partial a value class --- src/library/scala/collection/immutable/Range.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index d3fe367e5035..2fe75343d153 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -512,7 +512,7 @@ object Range { // As there is no appealing default step size for not-really-integral ranges, // we offer a partially constructed object. - class Partial[T, U](f: T => U) { + class Partial[T, U](private val f: T => U) extends AnyVal { def by(x: T): U = f(x) } From 73ca44be579e5100706d174f18025fc4487e9cb9 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 16 May 2016 16:24:50 -0700 Subject: [PATCH 0030/2793] SI-4625 Recognize App in script Cheap name test: if the script object extends "App", take it for a main-bearing parent. Note that if `-Xscript` is not `Main`, the default, then the source is taken as a snippet and there is no attempt to locate an existing `main` method. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 6 +++++- test/files/run/t4625.check | 1 + test/files/run/t4625.scala | 7 +++++++ test/files/run/t4625.script | 5 +++++ 4 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t4625.check create mode 100644 test/files/run/t4625.scala create mode 100644 test/files/run/t4625.script diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index c04d305f9e43..7af5c505de1d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -380,11 +380,15 @@ self => case DefDef(_, nme.main, Nil, List(_), _, _) => true case _ => false } + def isApp(t: Tree) = t match { + case Template(ps, _, _) => ps.exists { case Ident(x) if x.decoded == "App" => true ; case _ => false } + case _ => false + } /* For now we require there only be one top level object. */ var seenModule = false val newStmts = stmts collect { case t @ Import(_, _) => t - case md @ ModuleDef(mods, name, template) if !seenModule && (md exists isMainMethod) => + case md @ ModuleDef(mods, name, template) if !seenModule && (isApp(template) || md.exists(isMainMethod)) => seenModule = true /* This slightly hacky situation arises because we have no way to communicate * back to the scriptrunner what the name of the program is. Even if we were diff --git a/test/files/run/t4625.check b/test/files/run/t4625.check new file mode 100644 index 000000000000..e4a4d15b8754 --- /dev/null +++ b/test/files/run/t4625.check @@ -0,0 +1 @@ +Test ran. diff --git a/test/files/run/t4625.scala b/test/files/run/t4625.scala new file mode 100644 index 000000000000..44f62252201c --- /dev/null +++ b/test/files/run/t4625.scala @@ -0,0 +1,7 @@ + +import scala.tools.partest.ScriptTest + +object Test extends ScriptTest { + // must be called Main to get probing treatment in parser + override def testmain = "Main" +} diff --git a/test/files/run/t4625.script b/test/files/run/t4625.script new file mode 100644 index 000000000000..600ceacbb653 --- /dev/null +++ b/test/files/run/t4625.script @@ -0,0 +1,5 @@ + +object Main extends Runnable with App { + def run() = println("Test ran.") + run() +} From 5bcefbe1889a1b8e1f9bac04090428eaaa7b2fd3 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 16 May 2016 22:57:51 -0700 Subject: [PATCH 0031/2793] SI-4625 App is a thing Scripting knows it by name. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 5 ++++- src/reflect/scala/reflect/internal/StdNames.scala | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 7af5c505de1d..358ccb5dc3a8 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -365,12 +365,15 @@ self => val stmts = parseStats() def mainModuleName = newTermName(settings.script.value) + /* If there is only a single object template in the file and it has a * suitable main method, we will use it rather than building another object * around it. Since objects are loaded lazily the whole script would have * been a no-op, so we're not taking much liberty. */ def searchForMain(): Option[Tree] = { + import PartialFunction.cond + /* Have to be fairly liberal about what constitutes a main method since * nothing has been typed yet - for instance we can't assume the parameter * type will look exactly like "Array[String]" as it could have been renamed @@ -381,7 +384,7 @@ self => case _ => false } def isApp(t: Tree) = t match { - case Template(ps, _, _) => ps.exists { case Ident(x) if x.decoded == "App" => true ; case _ => false } + case Template(ps, _, _) => ps.exists(cond(_) { case Ident(tpnme.App) => true }) case _ => false } /* For now we require there only be one top level object. */ diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 52558d939501..a0688e129cff 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -240,6 +240,7 @@ trait StdNames { final val Any: NameType = "Any" final val AnyVal: NameType = "AnyVal" + final val App: NameType = "App" final val FlagSet: NameType = "FlagSet" final val Mirror: NameType = "Mirror" final val Modifiers: NameType = "Modifiers" From ee365cccaf740d5ec353718556b010137f4cdd4d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 16 May 2016 23:28:16 -0700 Subject: [PATCH 0032/2793] SI-4625 Permit arbitrary top-level in script In an unwrapped script, where a `main` entry point is discovered in a top-level object, retain all top-level classes. Everything winds up in the default package. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 4 +++- test/files/run/t4625b.check | 1 + test/files/run/t4625b.scala | 7 +++++++ test/files/run/t4625b.script | 8 ++++++++ 4 files changed, 19 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t4625b.check create mode 100644 test/files/run/t4625b.scala create mode 100644 test/files/run/t4625b.script diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 358ccb5dc3a8..1ece580b96d6 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -384,7 +384,7 @@ self => case _ => false } def isApp(t: Tree) = t match { - case Template(ps, _, _) => ps.exists(cond(_) { case Ident(tpnme.App) => true }) + case Template(parents, _, _) => parents.exists(cond(_) { case Ident(tpnme.App) => true }) case _ => false } /* For now we require there only be one top level object. */ @@ -402,6 +402,8 @@ self => */ if (name == mainModuleName) md else treeCopy.ModuleDef(md, mods, mainModuleName, template) + case md @ ModuleDef(_, _, _) => md + case cd @ ClassDef(_, _, _, _) => cd case _ => /* If we see anything but the above, fail. */ return None diff --git a/test/files/run/t4625b.check b/test/files/run/t4625b.check new file mode 100644 index 000000000000..e79539a5c4ee --- /dev/null +++ b/test/files/run/t4625b.check @@ -0,0 +1 @@ +Misc top-level detritus diff --git a/test/files/run/t4625b.scala b/test/files/run/t4625b.scala new file mode 100644 index 000000000000..44f62252201c --- /dev/null +++ b/test/files/run/t4625b.scala @@ -0,0 +1,7 @@ + +import scala.tools.partest.ScriptTest + +object Test extends ScriptTest { + // must be called Main to get probing treatment in parser + override def testmain = "Main" +} diff --git a/test/files/run/t4625b.script b/test/files/run/t4625b.script new file mode 100644 index 000000000000..f21a553dd12d --- /dev/null +++ b/test/files/run/t4625b.script @@ -0,0 +1,8 @@ + +trait X { def x = "Misc top-level detritus" } + +object Bumpkus + +object Main extends X with App { + println(x) +} From e753135f02a8177e809937e56fed5c054091691f Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 17 May 2016 00:00:52 -0700 Subject: [PATCH 0033/2793] SI-4625 Warn when discarding script object It's pretty confusing when your script object becomes a local and then nothing happens. Such as when you're writing a test and it takes forever to figure out what's going on. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 15 +++++++++++---- test/files/run/t4625c.check | 3 +++ test/files/run/t4625c.scala | 7 +++++++ test/files/run/t4625c.script | 6 ++++++ 4 files changed, 27 insertions(+), 4 deletions(-) create mode 100644 test/files/run/t4625c.check create mode 100644 test/files/run/t4625c.scala create mode 100644 test/files/run/t4625c.script diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 1ece580b96d6..c2f2141fd3ec 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -389,8 +389,8 @@ self => } /* For now we require there only be one top level object. */ var seenModule = false + var disallowed = EmptyTree: Tree val newStmts = stmts collect { - case t @ Import(_, _) => t case md @ ModuleDef(mods, name, template) if !seenModule && (isApp(template) || md.exists(isMainMethod)) => seenModule = true /* This slightly hacky situation arises because we have no way to communicate @@ -404,11 +404,18 @@ self => else treeCopy.ModuleDef(md, mods, mainModuleName, template) case md @ ModuleDef(_, _, _) => md case cd @ ClassDef(_, _, _, _) => cd - case _ => + case t @ Import(_, _) => t + case t => /* If we see anything but the above, fail. */ - return None + disallowed = t + EmptyTree + } + if (disallowed.isEmpty) Some(makeEmptyPackage(0, newStmts)) + else { + if (seenModule) + warning(disallowed.pos.point, "Script has a main object but statement is disallowed") + None } - Some(makeEmptyPackage(0, newStmts)) } if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain)) diff --git a/test/files/run/t4625c.check b/test/files/run/t4625c.check new file mode 100644 index 000000000000..6acb1710b907 --- /dev/null +++ b/test/files/run/t4625c.check @@ -0,0 +1,3 @@ +newSource1.scala:2: warning: Script has a main object but statement is disallowed +val x = "value x" + ^ diff --git a/test/files/run/t4625c.scala b/test/files/run/t4625c.scala new file mode 100644 index 000000000000..44f62252201c --- /dev/null +++ b/test/files/run/t4625c.scala @@ -0,0 +1,7 @@ + +import scala.tools.partest.ScriptTest + +object Test extends ScriptTest { + // must be called Main to get probing treatment in parser + override def testmain = "Main" +} diff --git a/test/files/run/t4625c.script b/test/files/run/t4625c.script new file mode 100644 index 000000000000..fa14f43950df --- /dev/null +++ b/test/files/run/t4625c.script @@ -0,0 +1,6 @@ + +val x = "value x" + +object Main extends App { + println(s"Test ran with $x.") +} From c2f67d2e14f90b0bda1bc2ae7245d104e7d0087b Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 17 May 2016 08:33:46 +0200 Subject: [PATCH 0034/2793] Move t8449 to correct place Follow-up for https://github.com/scala/scala/pull/4117 --- test/files/{ => pos}/t8449/Client.scala | 0 test/files/{ => pos}/t8449/Test.java | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename test/files/{ => pos}/t8449/Client.scala (100%) rename test/files/{ => pos}/t8449/Test.java (100%) diff --git a/test/files/t8449/Client.scala b/test/files/pos/t8449/Client.scala similarity index 100% rename from test/files/t8449/Client.scala rename to test/files/pos/t8449/Client.scala diff --git a/test/files/t8449/Test.java b/test/files/pos/t8449/Test.java similarity index 100% rename from test/files/t8449/Test.java rename to test/files/pos/t8449/Test.java From fe6886eb0ec9c02fa666e9e7af09bab92b985d05 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rui=20Gonc=CC=A7alves?= Date: Sun, 17 Apr 2016 17:51:17 +0100 Subject: [PATCH 0035/2793] Improve performance and behavior of ListMap and ListSet Makes the immutable `ListMap` and `ListSet` collections more alike one another, both in their semantics and in their performance. In terms of semantics, makes the `ListSet` iterator return the elements in their insertion order, as `ListMap` already does. While, as mentioned in SI-8985, `ListMap` and `ListSet` doesn't seem to make any guarantees in terms of iteration order, I believe users expect `ListSet` and `ListMap` to behave in the same way, particularly when they are implemented in the exact same way. In terms of performance, `ListSet` has a custom builder that avoids creation in O(N^2) time. However, this significantly reduces its performance in the creation of small sets, as its requires the instantiation and usage of an auxilliary HashSet. As `ListMap` and `ListSet` are only suitable for small sizes do to their performance characteristics, the builder is removed, the default `SetBuilder` being used instead. --- .../scala/collection/immutable/ListMap.scala | 34 +++++----- .../scala/collection/immutable/ListSet.scala | 67 ++++++------------- test/files/jvm/serialization-new.check | 4 +- test/files/jvm/serialization.check | 4 +- test/files/run/t3822.scala | 19 ------ test/files/run/t6198.scala | 7 -- test/files/run/t7445.scala | 6 -- .../collection/immutable/ListMapTest.scala | 48 +++++++++++++ .../collection/immutable/ListSetTest.scala | 53 +++++++++++++++ 9 files changed, 144 insertions(+), 98 deletions(-) delete mode 100644 test/files/run/t3822.scala delete mode 100644 test/files/run/t7445.scala create mode 100644 test/junit/scala/collection/immutable/ListMapTest.scala create mode 100644 test/junit/scala/collection/immutable/ListSetTest.scala diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index e1bcc0711ced..9af05183dd31 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -113,7 +113,8 @@ extends AbstractMap[A, B] * @param xs the traversable object. */ override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] = - ((repr: ListMap[A, B1]) /: xs.seq) (_ + _) + if (xs.isEmpty) this + else ((repr: ListMap[A, B1]) /: xs) (_ + _) /** This creates a new mapping without the given `key`. * If the map does not contain a mapping for the given key, the @@ -125,14 +126,18 @@ extends AbstractMap[A, B] /** Returns an iterator over key-value pairs. */ - def iterator: Iterator[(A,B)] = - new AbstractIterator[(A,B)] { - var self: ListMap[A,B] = ListMap.this - def hasNext = !self.isEmpty - def next(): (A,B) = - if (!hasNext) throw new NoSuchElementException("next on empty iterator") - else { val res = (self.key, self.value); self = self.next; res } - }.toList.reverseIterator + def iterator: Iterator[(A, B)] = { + def reverseList = { + var curr: ListMap[A, B] = this + var res: List[(A, B)] = Nil + while (!curr.isEmpty) { + res = (curr.key, curr.value) :: res + curr = curr.next + } + res + } + reverseList.iterator + } protected def key: A = throw new NoSuchElementException("empty map") protected def value: B = throw new NoSuchElementException("empty map") @@ -210,14 +215,9 @@ extends AbstractMap[A, B] override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil) @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] = - if (cur.isEmpty) - acc.last - else if (k == cur.key) - (cur.next /: acc) { - case (t, h) => val tt = t; new tt.Node(h.key, h.value) // SI-7459 - } - else - remove0(k, cur.next, cur::acc) + if (cur.isEmpty) acc.last + else if (k == cur.key) (cur.next /: acc) { case (t, h) => new t.Node(h.key, h.value) } + else remove0(k, cur.next, cur::acc) override protected def next: ListMap[A, B1] = ListMap.this diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index d20e7bc6d202..7803e055ed0e 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -12,7 +12,6 @@ package immutable import generic._ import scala.annotation.tailrec -import mutable.{Builder, ReusableBuilder} /** $factoryInfo * @define Coll immutable.ListSet @@ -23,33 +22,8 @@ object ListSet extends ImmutableSetFactory[ListSet] { /** setCanBuildFromInfo */ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A] - override def newBuilder[A]: Builder[A, ListSet[A]] = new ListSetBuilder[A] - private object EmptyListSet extends ListSet[Any] { } private[collection] def emptyInstance: ListSet[Any] = EmptyListSet - - /** A custom builder because forgetfully adding elements one at - * a time to a list backed set puts the "squared" in N^2. There is a - * temporary space cost, but it's improbable a list backed set could - * become large enough for this to matter given its pricy element lookup. - * - * This builder is reusable. - */ - class ListSetBuilder[Elem](initial: ListSet[Elem]) extends ReusableBuilder[Elem, ListSet[Elem]] { - def this() = this(empty[Elem]) - protected val elems = (new mutable.ListBuffer[Elem] ++= initial).reverse - protected val seen = new mutable.HashSet[Elem] ++= initial - - def +=(x: Elem): this.type = { - if (!seen(x)) { - elems += x - seen += x - } - this - } - def clear() = { elems.clear() ; seen.clear() } - def result() = elems.foldLeft(empty[Elem])(_ unchecked_+ _) - } } /** This class implements immutable sets using a list-based data @@ -104,9 +78,8 @@ sealed class ListSet[A] extends AbstractSet[A] */ override def ++(xs: GenTraversableOnce[A]): ListSet[A] = if (xs.isEmpty) this - else (new ListSet.ListSetBuilder(this) ++= xs.seq).result() + else (repr /: xs) (_ + _) - private[ListSet] def unchecked_+(e: A): ListSet[A] = new Node(e) private[ListSet] def unchecked_outer: ListSet[A] = throw new NoSuchElementException("Empty ListSet has no outer pointer") @@ -115,33 +88,34 @@ sealed class ListSet[A] extends AbstractSet[A] * @throws java.util.NoSuchElementException * @return the new iterator */ - def iterator: Iterator[A] = new AbstractIterator[A] { - var that: ListSet[A] = self - def hasNext = that.nonEmpty - def next: A = - if (hasNext) { - val res = that.head - that = that.tail - res + def iterator: Iterator[A] = { + def reverseList = { + var curr: ListSet[A] = self + var res: List[A] = Nil + while (!curr.isEmpty) { + res = curr.elem :: res + curr = curr.next } - else Iterator.empty.next() + res + } + reverseList.iterator } /** * @throws java.util.NoSuchElementException */ - override def head: A = throw new NoSuchElementException("Set has no elements") + protected def elem: A = throw new NoSuchElementException("elem of empty set") /** * @throws java.util.NoSuchElementException */ - override def tail: ListSet[A] = throw new NoSuchElementException("Next of an empty set") + protected def next: ListSet[A] = throw new NoSuchElementException("Next of an empty set") override def stringPrefix = "ListSet" /** Represents an entry in the `ListSet`. */ - protected class Node(override val head: A) extends ListSet[A] with Serializable { + protected class Node(override val elem: A) extends ListSet[A] with Serializable { override private[ListSet] def unchecked_outer = self /** Returns the number of elements in this set. @@ -166,7 +140,7 @@ sealed class ListSet[A] extends AbstractSet[A] */ override def contains(e: A) = containsInternal(this, e) @tailrec private def containsInternal(n: ListSet[A], e: A): Boolean = - !n.isEmpty && (n.head == e || containsInternal(n.unchecked_outer, e)) + !n.isEmpty && (n.elem == e || containsInternal(n.unchecked_outer, e)) /** This method creates a new set with an additional element. */ @@ -174,11 +148,14 @@ sealed class ListSet[A] extends AbstractSet[A] /** `-` can be used to remove a single element from a set. */ - override def -(e: A): ListSet[A] = if (e == head) self else { - val tail = self - e; new tail.Node(head) - } + override def -(e: A): ListSet[A] = removeInternal(e, this, Nil) + + @tailrec private def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + if (cur.isEmpty) acc.last + else if (k == cur.elem) (cur.next /: acc) { case (t, h) => new t.Node(h.elem) } + else removeInternal(k, cur.next, cur :: acc) - override def tail: ListSet[A] = self + override protected def next: ListSet[A] = self } override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]] diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index cb26446f40a2..91248320d440 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -89,8 +89,8 @@ x = Map(buffers -> 20, layers -> 2, title -> 3) y = Map(buffers -> 20, layers -> 2, title -> 3) x equals y: true, y equals x: true -x = ListSet(5, 3) -y = ListSet(5, 3) +x = ListSet(3, 5) +y = ListSet(3, 5) x equals y: true, y equals x: true x = Queue(a, b, c) diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index cb26446f40a2..91248320d440 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -89,8 +89,8 @@ x = Map(buffers -> 20, layers -> 2, title -> 3) y = Map(buffers -> 20, layers -> 2, title -> 3) x equals y: true, y equals x: true -x = ListSet(5, 3) -y = ListSet(5, 3) +x = ListSet(3, 5) +y = ListSet(3, 5) x equals y: true, y equals x: true x = Queue(a, b, c) diff --git a/test/files/run/t3822.scala b/test/files/run/t3822.scala deleted file mode 100644 index c35804035e82..000000000000 --- a/test/files/run/t3822.scala +++ /dev/null @@ -1,19 +0,0 @@ -import scala.collection.{ mutable, immutable, generic } -import immutable.ListSet - -object Test { - def main(args: Array[String]): Unit = { - val xs = ListSet(-100000 to 100001: _*) - - assert(xs.size == 200002) - assert(xs.sum == 100001) - - val ys = ListSet[Int]() - val ys1 = (1 to 12).grouped(3).foldLeft(ys)(_ ++ _) - val ys2 = (1 to 12).foldLeft(ys)(_ + _) - - assert(ys1 == ys2) - } -} - - diff --git a/test/files/run/t6198.scala b/test/files/run/t6198.scala index 5aa8f1c1cfd1..65dbaf816006 100644 --- a/test/files/run/t6198.scala +++ b/test/files/run/t6198.scala @@ -1,13 +1,6 @@ import scala.collection.immutable._ object Test extends App { - // test that ListSet.tail does not use a builder - // we can't test for O(1) behavior, so the best we can do is to - // check that ls.tail always returns the same instance - val ls = ListSet.empty[Int] + 1 + 2 - - if(ls.tail ne ls.tail) - println("ListSet.tail should not use a builder!") // class that always causes hash collisions case class Collision(value:Int) { override def hashCode = 0 } diff --git a/test/files/run/t7445.scala b/test/files/run/t7445.scala deleted file mode 100644 index e4ffeb8e1a89..000000000000 --- a/test/files/run/t7445.scala +++ /dev/null @@ -1,6 +0,0 @@ -import scala.collection.immutable.ListMap - -object Test extends App { - val a = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5); - require(a.tail == ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5)); -} diff --git a/test/junit/scala/collection/immutable/ListMapTest.scala b/test/junit/scala/collection/immutable/ListMapTest.scala new file mode 100644 index 000000000000..320a9767550f --- /dev/null +++ b/test/junit/scala/collection/immutable/ListMapTest.scala @@ -0,0 +1,48 @@ +package scala.collection.immutable + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class ListMapTest { + + @Test + def t7445(): Unit = { + val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5) + assertEquals(ListMap(2 -> 2, 3 -> 3, 4 -> 4, 5 -> 5), m.tail) + } + + @Test + def hasCorrectBuilder(): Unit = { + val m = ListMap("a" -> "1", "b" -> "2", "c" -> "3", "b" -> "2.2", "d" -> "4") + assertEquals(List("a" -> "1", "c" -> "3", "b" -> "2.2", "d" -> "4"), m.toList) + } + + @Test + def hasCorrectHeadTailLastInit(): Unit = { + val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3) + assertEquals(1 -> 1, m.head) + assertEquals(ListMap(2 -> 2, 3 -> 3), m.tail) + assertEquals(3 -> 3, m.last) + assertEquals(ListMap(1 -> 1, 2 -> 2), m.init) + } + + @Test + def hasCorrectAddRemove(): Unit = { + val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3) + assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3, 4 -> 4), m + (4 -> 4)) + assertEquals(ListMap(1 -> 1, 3 -> 3, 2 -> 4), m + (2 -> 4)) + assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3), m + (2 -> 2)) + assertEquals(ListMap(2 -> 2, 3 -> 3), m - 1) + assertEquals(ListMap(1 -> 1, 3 -> 3), m - 2) + assertEquals(ListMap(1 -> 1, 2 -> 2, 3 -> 3), m - 4) + } + + @Test + def hasCorrectIterator(): Unit = { + val m = ListMap(1 -> 1, 2 -> 2, 3 -> 3, 5 -> 5, 4 -> 4) + assertEquals(List(1 -> 1, 2 -> 2, 3 -> 3, 5 -> 5, 4 -> 4), m.iterator.toList) + } +} diff --git a/test/junit/scala/collection/immutable/ListSetTest.scala b/test/junit/scala/collection/immutable/ListSetTest.scala new file mode 100644 index 000000000000..395da88c75b6 --- /dev/null +++ b/test/junit/scala/collection/immutable/ListSetTest.scala @@ -0,0 +1,53 @@ +package scala.collection.immutable + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class ListSetTest { + + @Test + def t7445(): Unit = { + val s = ListSet(1, 2, 3, 4, 5) + assertEquals(ListSet(2, 3, 4, 5), s.tail) + } + + @Test + def hasCorrectBuilder(): Unit = { + val m = ListSet("a", "b", "c", "b", "d") + assertEquals(List("a", "b", "c", "d"), m.toList) + } + + @Test + def hasTailRecursiveDelete(): Unit = { + val s = ListSet(1 to 50000: _*) + try s - 25000 catch { case e: StackOverflowError => fail("A stack overflow occurred") } + } + + @Test + def hasCorrectHeadTailLastInit(): Unit = { + val m = ListSet(1, 2, 3) + assertEquals(1, m.head) + assertEquals(ListSet(2, 3), m.tail) + assertEquals(3, m.last) + assertEquals(ListSet(1, 2), m.init) + } + + @Test + def hasCorrectAddRemove(): Unit = { + val m = ListSet(1, 2, 3) + assertEquals(ListSet(1, 2, 3, 4), m + 4) + assertEquals(ListSet(1, 2, 3), m + 2) + assertEquals(ListSet(2, 3), m - 1) + assertEquals(ListSet(1, 3), m - 2) + assertEquals(ListSet(1, 2, 3), m - 4) + } + + @Test + def hasCorrectIterator(): Unit = { + val s = ListSet(1, 2, 3, 5, 4) + assertEquals(List(1, 2, 3, 5, 4), s.iterator.toList) + } +} From 061acd3ce9e1af12695b7387b42218fc99a8d91b Mon Sep 17 00:00:00 2001 From: som-snytt Date: Tue, 17 May 2016 02:58:17 -0700 Subject: [PATCH 0036/2793] SI-2458 Clarify spec for package syms (#5151) Package definitions are priority 4. Update the big example to be cut/pastable. --- spec/02-identifiers-names-and-scopes.md | 91 +++++++++++++++---------- 1 file changed, 54 insertions(+), 37 deletions(-) diff --git a/spec/02-identifiers-names-and-scopes.md b/spec/02-identifiers-names-and-scopes.md index 0a9c5dfe7720..6653be2ce5e9 100644 --- a/spec/02-identifiers-names-and-scopes.md +++ b/spec/02-identifiers-names-and-scopes.md @@ -17,12 +17,12 @@ which are collectively called _bindings_. Bindings of different kinds have a precedence defined on them: 1. Definitions and declarations that are local, inherited, or made - available by a package clause in the same compilation unit where the - definition occurs have highest precedence. + available by a package clause and also defined in the same compilation unit + as the reference, have highest precedence. 1. Explicit imports have next highest precedence. 1. Wildcard imports have next highest precedence. -1. Definitions made available by a package clause not in the - compilation unit where the definition occurs have lowest precedence. +1. Definitions made available by a package clause, but not also defined in the + same compilation unit as the reference, have lowest precedence. There are two different name spaces, one for [types](03-types.html#types) and one for [terms](06-expressions.html#expressions). The same name may designate a @@ -34,22 +34,18 @@ in some inner scope _shadows_ bindings of lower precedence in the same scope as well as bindings of the same or lower precedence in outer scopes. - - A reference to an unqualified (type- or term-) identifier $x$ is bound by the unique binding, which @@ -69,17 +65,36 @@ the member of the type $T$ of $e$ which has the name $x$ in the same namespace as the identifier. It is an error if $T$ is not a [value type](03-types.html#value-types). The type of $e.x$ is the member type of the referenced entity in $T$. +Binding precedence implies that the way source is bundled in files affects name resolution. +In particular, imported names have higher precedence than names, defined in other files, +that might otherwise be visible because they are defined in +either the current package or an enclosing package. + +Note that a package definition is taken as lowest precedence, since packages +are open and can be defined across arbitrary compilation units. + +```scala +package util { + import scala.util + class Random + object Test extends App { + println(new util.Random) // scala.util.Random + } +} +``` + ###### Example -Assume the following two definitions of objects named `X` in packages `P` and `Q`. +Assume the following two definitions of objects named `X` in packages `p` and `q` +in separate compilation units. ```scala -package P { +package p { object X { val x = 1; val y = 2 } } -package Q { - object X { val x = true; val y = "" } +package q { + object X { val x = true; val y = false } } ``` @@ -87,25 +102,27 @@ The following program illustrates different kinds of bindings and precedences between them. ```scala -package P { // `X' bound by package clause -import Console._ // `println' bound by wildcard import -object A { - println("L4: "+X) // `X' refers to `P.X' here - object B { - import Q._ // `X' bound by wildcard import - println("L7: "+X) // `X' refers to `Q.X' here - import X._ // `x' and `y' bound by wildcard import - println("L8: "+x) // `x' refers to `Q.X.x' here - object C { - val x = 3 // `x' bound by local definition - println("L12: "+x) // `x' refers to constant `3' here - { import Q.X._ // `x' and `y' bound by wildcard import -// println("L14: "+x) // reference to `x' is ambiguous here - import X.y // `y' bound by explicit import - println("L16: "+y) // `y' refers to `Q.X.y' here - { val x = "abc" // `x' bound by local definition - import P.X._ // `x' and `y' bound by wildcard import -// println("L19: "+y) // reference to `y' is ambiguous here - println("L20: "+x) // `x' refers to string "abc" here +package p { // `X' bound by package clause +import Console._ // `println' bound by wildcard import +object Y { + println(s"L4: $X") // `X' refers to `p.X' here + locally { + import q._ // `X' bound by wildcard import + println(s"L7: $X") // `X' refers to `q.X' here + import X._ // `x' and `y' bound by wildcard import + println(s"L9: $x") // `x' refers to `q.X.x' here + locally { + val x = 3 // `x' bound by local definition + println(s"L12: $x") // `x' refers to constant `3' here + locally { + import q.X._ // `x' and `y' bound by wildcard import +// println(s"L15: $x") // reference to `x' is ambiguous here + import X.y // `y' bound by explicit import + println(s"L17: $y") // `y' refers to `q.X.y' here + locally { + val x = "abc" // `x' bound by local definition + import p.X._ // `x' and `y' bound by wildcard import +// println(s"L21: $y") // reference to `y' is ambiguous here + println(s"L22: $x") // `x' refers to string "abc" here }}}}}} ``` From 4d5589ca63686472b090958a0984f061baf2af8f Mon Sep 17 00:00:00 2001 From: som-snytt Date: Tue, 17 May 2016 03:07:31 -0700 Subject: [PATCH 0037/2793] SI-9773 Fix doc for "".lines (#5161) An empty string yields an empty iterator. --- .../scala/collection/immutable/StringLike.scala | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index d92db689121d..8a9df0e8625d 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -100,11 +100,13 @@ self => /** Return all lines in this string in an iterator, including trailing * line end characters. * - * The number of strings returned is one greater than the number of line - * end characters in this string. For an empty string, a single empty - * line is returned. A line end character is one of - * - `LF` - line feed (`0x0A` hex) - * - `FF` - form feed (`0x0C` hex) + * This method is analogous to `s.split(EOL).toIterator`, + * except that any existing line endings are preserved in the result strings, + * and the empty string yields an empty iterator. + * + * A line end character is one of + * - `LF` - line feed (`0x0A`) + * - `FF` - form feed (`0x0C`) */ def linesWithSeparators: Iterator[String] = new AbstractIterator[String] { val str = self.toString @@ -121,14 +123,14 @@ self => } /** Return all lines in this string in an iterator, excluding trailing line - * end characters, i.e., apply `.stripLineEnd` to all lines + * end characters; i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. */ def lines: Iterator[String] = linesWithSeparators map (line => new WrappedString(line).stripLineEnd) /** Return all lines in this string in an iterator, excluding trailing line - * end characters, i.e., apply `.stripLineEnd` to all lines + * end characters; i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. */ @deprecated("Use `lines` instead.","2.11.0") From ffbf063baf5db46484a23c2b91d2b8769f0f956b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rui=20Gonc=CC=A7alves?= Date: Sun, 15 May 2016 00:09:45 +0100 Subject: [PATCH 0038/2793] Make ListMap and ListSet implementations similar ListSet and ListMap are two collections which share the exact same internal structure. This commit makes the two approaches as similar as possible by renaming and reordering internal methods, improving their Scaladoc and their code style. The Scaladoc of the classes and companion objects is also improved in order to alert users of the time complexity of the collections' operations. --- .../scala/collection/immutable/ListMap.scala | 244 +++++++----------- .../scala/collection/immutable/ListSet.scala | 159 +++++------- test/files/jvm/serialization-new.check | 4 +- test/files/jvm/serialization.check | 4 +- 4 files changed, 160 insertions(+), 251 deletions(-) diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 9af05183dd31..589f8bbba94f 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -6,8 +6,6 @@ ** |/ ** \* */ - - package scala package collection package immutable @@ -15,117 +13,79 @@ package immutable import generic._ import scala.annotation.tailrec -/** $factoryInfo - * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]] - * section on `List Maps` for more information. - * - * Note that `ListMap` is built in reverse order to canonical traversal order (traversal order is oldest first). - * Thus, `head` and `tail` are O(n). To rapidly partition a `ListMap` into elements, use `last` and `init` instead. These are O(1). - * - * @define Coll immutable.ListMap - * @define coll immutable list map - */ +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list map with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]] + * section on `List Maps` for more information. + * @since 1 + * @define Coll ListMap + * @define coll list map + */ object ListMap extends ImmutableMapFactory[ListMap] { - /** $mapCanBuildFromInfo */ + + /** + * $mapCanBuildFromInfo + */ implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), ListMap[A, B]] = new MapCanBuildFrom[A, B] + def empty[A, B]: ListMap[A, B] = EmptyListMap.asInstanceOf[ListMap[A, B]] @SerialVersionUID(-8256686706655863282L) - private object EmptyListMap extends ListMap[Any, Nothing] { - override def apply(key: Any) = throw new NoSuchElementException("key not found: " + key) - override def contains(key: Any) = false - override def last: (Any, Nothing) = throw new NoSuchElementException("Empty ListMap") - override def init: ListMap[Any, Nothing] = throw new NoSuchElementException("Empty ListMap") - } + private object EmptyListMap extends ListMap[Any, Nothing] } -/** This class implements immutable maps using a list-based data structure, which preserves insertion order. - * Instances of `ListMap` represent empty maps; they can be either created by - * calling the constructor directly, or by applying the function `ListMap.empty`. - * - * @tparam A the type of the keys in this list map. - * @tparam B the type of the values associated with the keys. - * - * @author Matthias Zenger - * @author Martin Odersky - * @version 2.0, 01/01/2007 - * @since 1 - * @define Coll immutable.ListMap - * @define coll immutable list map - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ +/** + * This class implements immutable maps using a list-based data structure. List map iterators and + * traversal methods visit key-value pairs in the order whey were first inserted. + * + * Entries are stored internally in reversed insertion order, which means the newest key is at the + * head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and `init` + * are O(1). Other operations, such as inserting or removing entries, are also O(n), which makes + * this collection suitable only for a small number of elements. + * + * Instances of `ListMap` represent empty maps; they can be either created by calling the + * constructor directly, or by applying the function `ListMap.empty`. + * + * @tparam A the type of the keys contained in this list map + * @tparam B the type of the values associated with the keys + * + * @author Matthias Zenger + * @author Martin Odersky + * @version 2.0, 01/01/2007 + * @since 1 + * @define Coll ListMap + * @define coll list map + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ @SerialVersionUID(301002838095710379L) -sealed class ListMap[A, +B] -extends AbstractMap[A, B] - with Map[A, B] - with MapLike[A, B, ListMap[A, B]] - with Serializable { +sealed class ListMap[A, +B] extends AbstractMap[A, B] + with Map[A, B] + with MapLike[A, B, ListMap[A, B]] + with Serializable { override def empty = ListMap.empty - /** Returns the number of mappings in this map. - * - * @return number of mappings in this map. - */ override def size: Int = 0 + override def isEmpty: Boolean = true - /** Checks if this map maps `key` to a value and return the - * value if it exists. - * - * @param key the key of the mapping of interest - * @return the value of the mapping, if it exists - */ def get(key: A): Option[B] = None - /** This method allows one to create a new map with an additional mapping - * from `key` to `value`. If the map contains already a mapping for `key`, - * it will be overridden by this function. - * - * @param key the key element of the updated entry. - * @param value the value element of the updated entry. - */ - override def updated [B1 >: B] (key: A, value: B1): ListMap[A, B1] = - new Node[B1](key, value) - - /** Add a key/value pair to this map. - * @param kv the key/value pair - * @return A new map with the new binding added to this map - */ - def + [B1 >: B] (kv: (A, B1)): ListMap[A, B1] = updated(kv._1, kv._2) - - /** Adds two or more elements to this collection and returns - * either the collection itself (if it is mutable), or a new collection - * with the added elements. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): ListMap[A, B1] = - this + elem1 + elem2 ++ elems - - /** Adds a number of elements provided by a traversable object - * and returns a new collection with the added elements. - * - * @param xs the traversable object. - */ + override def updated[B1 >: B](key: A, value: B1): ListMap[A, B1] = new Node[B1](key, value) + + def +[B1 >: B](kv: (A, B1)): ListMap[A, B1] = new Node[B1](kv._1, kv._2) + def -(key: A): ListMap[A, B] = this + override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): ListMap[A, B1] = if (xs.isEmpty) this else ((repr: ListMap[A, B1]) /: xs) (_ + _) - /** This creates a new mapping without the given `key`. - * If the map does not contain a mapping for the given key, the - * method returns the same map. - * - * @param key a map without a mapping for the given key. - */ - def - (key: A): ListMap[A, B] = this - - /** Returns an iterator over key-value pairs. - */ def iterator: Iterator[(A, B)] = { def reverseList = { var curr: ListMap[A, B] = this @@ -139,90 +99,68 @@ extends AbstractMap[A, B] reverseList.iterator } - protected def key: A = throw new NoSuchElementException("empty map") - protected def value: B = throw new NoSuchElementException("empty map") - protected def next: ListMap[A, B] = throw new NoSuchElementException("empty map") + protected def key: A = throw new NoSuchElementException("key of empty map") + protected def value: B = throw new NoSuchElementException("value of empty map") + protected def next: ListMap[A, B] = throw new NoSuchElementException("next of empty map") + + override def stringPrefix = "ListMap" - /** This class represents an entry in the `ListMap`. - */ + /** + * Represents an entry in the `ListMap`. + */ @SerialVersionUID(-6453056603889598734L) protected class Node[B1 >: B](override protected val key: A, override protected val value: B1) extends ListMap[A, B1] with Serializable { - /** Returns the number of mappings in this map. - * - * @return number of mappings. - */ - override def size: Int = size0(this, 0) - - // to allow tail recursion and prevent stack overflows - @tailrec private def size0(cur: ListMap[A, B1], acc: Int): Int = if (cur.isEmpty) acc else size0(cur.next, acc + 1) - - /** Is this an empty map? - * - * @return true, iff the map is empty. - */ - override def isEmpty: Boolean = false - /** Retrieves the value which is associated with the given key. This - * method throws an exception if there is no mapping from the given - * key to a value. - * - * @param k the key - * @return the value associated with the given key. - */ - override def apply(k: A): B1 = apply0(this, k) - - @tailrec private def apply0(cur: ListMap[A, B1], k: A): B1 = - if (cur.isEmpty) throw new NoSuchElementException("key not found: "+k) - else if (k == cur.key) cur.value - else apply0(cur.next, k) + override def size: Int = sizeInternal(this, 0) + + @tailrec private[this] def sizeInternal(cur: ListMap[A, B1], acc: Int): Int = + if (cur.isEmpty) acc + else sizeInternal(cur.next, acc + 1) + + override def isEmpty: Boolean = false - /** Checks if this map maps `key` to a value and return the - * value if it exists. - * - * @param k the key of the mapping of interest - * @return the value of the mapping, if it exists - */ - override def get(k: A): Option[B1] = get0(this, k) + override def apply(k: A): B1 = applyInternal(this, k) - @tailrec private def get0(cur: ListMap[A, B1], k: A): Option[B1] = - if (k == cur.key) Some(cur.value) - else if (cur.next.nonEmpty) get0(cur.next, k) else None + @tailrec private[this] def applyInternal(cur: ListMap[A, B1], k: A): B1 = + if (cur.isEmpty) throw new NoSuchElementException("key not found: " + k) + else if (k == cur.key) cur.value + else applyInternal(cur.next, k) + override def get(k: A): Option[B1] = getInternal(this, k) - override def contains(key: A): Boolean = contains0(this, key) + @tailrec private[this] def getInternal(cur: ListMap[A, B1], k: A): Option[B1] = + if (cur.isEmpty) None + else if (k == cur.key) Some(cur.value) + else getInternal(cur.next, k) - @tailrec private def contains0(cur: ListMap[A, B1], k: A): Boolean = - if (k == cur.key) true - else if (cur.next.nonEmpty) contains0(cur.next, k) - else false + override def contains(k: A): Boolean = containsInternal(this, k) + @tailrec private[this] def containsInternal(cur: ListMap[A, B1], k: A): Boolean = + if(cur.isEmpty) false + else if (k == cur.key) true + else containsInternal(cur.next, k) - /** This method allows one to create a new map with an additional mapping - * from `key` to `value`. If the map contains already a mapping for `key`, - * it will be overridden by this function. - */ - override def updated [B2 >: B1](k: A, v: B2): ListMap[A, B2] = { + override def updated[B2 >: B1](k: A, v: B2): ListMap[A, B2] = { val m = this - k new m.Node[B2](k, v) } + override def +[B2 >: B1](kv: (A, B2)): ListMap[A, B2] = { + val m = this - kv._1 + new m.Node[B2](kv._1, kv._2) + } - /** Creates a new mapping without the given `key`. - * If the map does not contain a mapping for the given key, the - * method returns the same map. - */ - override def - (k: A): ListMap[A, B1] = remove0(k, this, Nil) + override def -(k: A): ListMap[A, B1] = removeInternal(k, this, Nil) - @tailrec private def remove0(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] = + @tailrec private[this] def removeInternal(k: A, cur: ListMap[A, B1], acc: List[ListMap[A, B1]]): ListMap[A, B1] = if (cur.isEmpty) acc.last else if (k == cur.key) (cur.next /: acc) { case (t, h) => new t.Node(h.key, h.value) } - else remove0(k, cur.next, cur::acc) + else removeInternal(k, cur.next, cur :: acc) override protected def next: ListMap[A, B1] = ListMap.this override def last: (A, B1) = (key, value) - override def init: ListMap[A, B1] = next } } diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index 7803e055ed0e..c9c6558bb992 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -13,84 +13,74 @@ package immutable import generic._ import scala.annotation.tailrec -/** $factoryInfo - * @define Coll immutable.ListSet - * @define coll immutable list set - * @since 1 - */ +/** + * $factoryInfo + * + * Note that each element insertion takes O(n) time, which means that creating a list set with + * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of + * elements. + * + * @since 1 + * @define Coll ListSet + * @define coll list set + */ object ListSet extends ImmutableSetFactory[ListSet] { - /** setCanBuildFromInfo */ - implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A] - private object EmptyListSet extends ListSet[Any] { } + /** + * $setCanBuildFromInfo + */ + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = + setCanBuildFrom[A] + + private object EmptyListSet extends ListSet[Any] private[collection] def emptyInstance: ListSet[Any] = EmptyListSet } -/** This class implements immutable sets using a list-based data - * structure. Instances of `ListSet` represent - * empty sets; they can be either created by calling the constructor - * directly, or by applying the function `ListSet.empty`. - * - * @tparam A the type of the elements contained in this list set. - * - * @author Matthias Zenger - * @version 1.0, 09/07/2003 - * @since 1 - * @define Coll immutable.ListSet - * @define coll immutable list set - * @define mayNotTerminateInf - * @define willNotTerminateInf - */ +/** + * This class implements immutable sets using a list-based data structure. List set iterators and + * traversal methods visit elements in the order whey were first inserted. + * + * Elements are stored internally in reversed insertion order, which means the newest element is at + * the head of the list. As such, methods such as `head` and `tail` are O(n), while `last` and + * `init` are O(1). Other operations, such as inserting or removing entries, are also O(n), which + * makes this collection suitable only for a small number of elements. + * + * Instances of `ListSet` represent empty sets; they can be either created by calling the + * constructor directly, or by applying the function `ListSet.empty`. + * + * @tparam A the type of the elements contained in this list set + * + * @author Matthias Zenger + * @version 1.0, 09/07/2003 + * @since 1 + * @define Coll ListSet + * @define coll list set + * @define mayNotTerminateInf + * @define willNotTerminateInf + */ sealed class ListSet[A] extends AbstractSet[A] - with Set[A] - with GenericSetTemplate[A, ListSet] - with SetLike[A, ListSet[A]] - with Serializable{ self => + with Set[A] + with GenericSetTemplate[A, ListSet] + with SetLike[A, ListSet[A]] + with Serializable { + override def companion: GenericCompanion[ListSet] = ListSet - /** Returns the number of elements in this set. - * - * @return number of set elements. - */ override def size: Int = 0 override def isEmpty: Boolean = true - /** Checks if this set contains element `elem`. - * - * @param elem the element to check for membership. - * @return `'''true'''`, iff `elem` is contained in this set. - */ def contains(elem: A): Boolean = false - /** This method creates a new set with an additional element. - */ - def + (elem: A): ListSet[A] = new Node(elem) - - /** `-` can be used to remove a single element. - */ - def - (elem: A): ListSet[A] = this + def +(elem: A): ListSet[A] = new Node(elem) + def -(elem: A): ListSet[A] = this - /** If we are bulk adding elements and desire a runtime measured in - * sub-interstellar time units, we better find a way to avoid traversing - * the collection on each element. That's what the custom builder does, - * so we take the easy way out and add ourselves and the argument to - * a new builder. - */ override def ++(xs: GenTraversableOnce[A]): ListSet[A] = if (xs.isEmpty) this else (repr /: xs) (_ + _) - private[ListSet] def unchecked_outer: ListSet[A] = - throw new NoSuchElementException("Empty ListSet has no outer pointer") - - /** Creates a new iterator over all elements contained in this set. - * - * @throws java.util.NoSuchElementException - * @return the new iterator - */ def iterator: Iterator[A] = { def reverseList = { - var curr: ListSet[A] = self + var curr: ListSet[A] = this var res: List[A] = Nil while (!curr.isEmpty) { res = curr.elem :: res @@ -101,62 +91,43 @@ sealed class ListSet[A] extends AbstractSet[A] reverseList.iterator } - /** - * @throws java.util.NoSuchElementException - */ protected def elem: A = throw new NoSuchElementException("elem of empty set") + protected def next: ListSet[A] = throw new NoSuchElementException("next of empty set") - /** - * @throws java.util.NoSuchElementException - */ - protected def next: ListSet[A] = throw new NoSuchElementException("Next of an empty set") + override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]] override def stringPrefix = "ListSet" - /** Represents an entry in the `ListSet`. - */ - protected class Node(override val elem: A) extends ListSet[A] with Serializable { - override private[ListSet] def unchecked_outer = self + /** + * Represents an entry in the `ListSet`. + */ + protected class Node(override protected val elem: A) extends ListSet[A] with Serializable { - /** Returns the number of elements in this set. - * - * @return number of set elements. - */ override def size = sizeInternal(this, 0) - @tailrec private def sizeInternal(n: ListSet[A], acc: Int): Int = + + @tailrec private[this] def sizeInternal(n: ListSet[A], acc: Int): Int = if (n.isEmpty) acc - else sizeInternal(n.unchecked_outer, acc + 1) + else sizeInternal(n.next, acc + 1) - /** Checks if this set is empty. - * - * @return true, iff there is no element in the set. - */ override def isEmpty: Boolean = false - /** Checks if this set contains element `elem`. - * - * @param e the element to check for membership. - * @return `'''true'''`, iff `elem` is contained in this set. - */ override def contains(e: A) = containsInternal(this, e) - @tailrec private def containsInternal(n: ListSet[A], e: A): Boolean = - !n.isEmpty && (n.elem == e || containsInternal(n.unchecked_outer, e)) - /** This method creates a new set with an additional element. - */ + @tailrec private[this] def containsInternal(n: ListSet[A], e: A): Boolean = + !n.isEmpty && (n.elem == e || containsInternal(n.next, e)) + override def +(e: A): ListSet[A] = if (contains(e)) this else new Node(e) - /** `-` can be used to remove a single element from a set. - */ override def -(e: A): ListSet[A] = removeInternal(e, this, Nil) - @tailrec private def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = + @tailrec private[this] def removeInternal(k: A, cur: ListSet[A], acc: List[ListSet[A]]): ListSet[A] = if (cur.isEmpty) acc.last else if (k == cur.elem) (cur.next /: acc) { case (t, h) => new t.Node(h.elem) } else removeInternal(k, cur.next, cur :: acc) - override protected def next: ListSet[A] = self - } + override protected def next: ListSet[A] = ListSet.this - override def toSet[B >: A]: Set[B] = this.asInstanceOf[ListSet[B]] + override def last: A = elem + override def init: ListSet[A] = next + } } diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index 91248320d440..1c5dd4828bea 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -85,8 +85,8 @@ x = List((buffers,20), (layers,2), (title,3)) y = List((buffers,20), (layers,2), (title,3)) x equals y: true, y equals x: true -x = Map(buffers -> 20, layers -> 2, title -> 3) -y = Map(buffers -> 20, layers -> 2, title -> 3) +x = ListMap(buffers -> 20, layers -> 2, title -> 3) +y = ListMap(buffers -> 20, layers -> 2, title -> 3) x equals y: true, y equals x: true x = ListSet(3, 5) diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index 91248320d440..1c5dd4828bea 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -85,8 +85,8 @@ x = List((buffers,20), (layers,2), (title,3)) y = List((buffers,20), (layers,2), (title,3)) x equals y: true, y equals x: true -x = Map(buffers -> 20, layers -> 2, title -> 3) -y = Map(buffers -> 20, layers -> 2, title -> 3) +x = ListMap(buffers -> 20, layers -> 2, title -> 3) +y = ListMap(buffers -> 20, layers -> 2, title -> 3) x equals y: true, y equals x: true x = ListSet(3, 5) From 4552de451b48b65b8dca42d4167eb6f6aefbf408 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rui=20Gonc=CC=A7alves?= Date: Sun, 15 May 2016 00:19:35 +0100 Subject: [PATCH 0039/2793] Add SerialVersionUID to ListSet --- src/library/scala/collection/immutable/ListSet.scala | 3 +++ test/files/run/t8549.scala | 4 +++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index c9c6558bb992..d9795e9161f0 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -32,6 +32,7 @@ object ListSet extends ImmutableSetFactory[ListSet] { implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, ListSet[A]] = setCanBuildFrom[A] + @SerialVersionUID(5010379588739277132L) private object EmptyListSet extends ListSet[Any] private[collection] def emptyInstance: ListSet[Any] = EmptyListSet } @@ -58,6 +59,7 @@ object ListSet extends ImmutableSetFactory[ListSet] { * @define mayNotTerminateInf * @define willNotTerminateInf */ +@SerialVersionUID(-8417059026623606218L) sealed class ListSet[A] extends AbstractSet[A] with Set[A] with GenericSetTemplate[A, ListSet] @@ -101,6 +103,7 @@ sealed class ListSet[A] extends AbstractSet[A] /** * Represents an entry in the `ListSet`. */ + @SerialVersionUID(-787710309854855049L) protected class Node(override protected val elem: A) extends ListSet[A] with Serializable { override def size = sizeInternal(this, 0) diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala index e2d0d335b0ca..1ce8933efb13 100644 --- a/test/files/run/t8549.scala +++ b/test/files/run/t8549.scala @@ -79,7 +79,7 @@ object Test extends App { } } - // Generated on 20160328-17:47:35 with Scala version 2.12.0-20160328-174205-d46145c) + // Generated on 20160515-00:17:51 with Scala version 2.12.0-SNAPSHOT) overwrite.foreach(updateComment) check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAF4dAASTGphdmEvbGFuZy9PYmplY3Q7eHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==") @@ -145,6 +145,8 @@ object Test extends App { check(immutable.HashSet(1, 2, 3))( "rO0ABXNyADVzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5IYXNoU2V0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAACAwAAeHB3BAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==") // TODO provoke HashSetCollision1 + check(immutable.ListSet())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0JEVtcHR5TGlzdFNldCRFiHGwmKwhTAIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0izCZaSia0jYCAAB4cA==") + check(immutable.ListSet(1))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2V0JE5vZGX1EX2lizBAdwIAAkwABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0U2V0O0wABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDt4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNldIswmWkomtI2AgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdFNldCRFbXB0eUxpc3RTZXQkRYhxsJisIUwCAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==") check(immutable.ListMap())( "rO0ABXNyADBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJEVtcHR5TGlzdE1hcCSNalsvpBZeDgIAAHhyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwBC1gfIkUSKsCAAB4cA==") check(immutable.ListMap(1 -> 2))( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0TWFwJE5vZGWmciM1Yav+8gIAA0wABiRvdXRlcnQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9MaXN0TWFwO0wAA2tleXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABXZhbHVlcQB+AAJ4cgAic2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcAQtYHyJFEirAgAAeHBzcgAwc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdE1hcCRFbXB0eUxpc3RNYXAkjWpbL6QWXg4CAAB4cQB+AANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABwAAAAI=") check(immutable.Queue())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5RdWV1ZZY146W3qSuhAgACTAACaW50ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDtMAANvdXRxAH4AAXhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4cQB+AAQ=") From 443fa04653f8f2d80df721885192ee4adefce3c5 Mon Sep 17 00:00:00 2001 From: Krzysztof Romanowski Date: Tue, 17 May 2016 12:22:30 +0200 Subject: [PATCH 0040/2793] Remove default value for sourcepath in scalac (ant version). (#5166) --- src/compiler/scala/tools/ant/Scalac.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index f46f014096b0..a6024d438870 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -577,8 +577,6 @@ class Scalac extends ScalaMatchingTask with ScalacShared { settings.classpath.value = asString(getClasspath) if (!sourcepath.isEmpty) settings.sourcepath.value = asString(getSourcepath) - else if (origin.get.size() > 0) - settings.sourcepath.value = origin.get.list()(0) if (!bootclasspath.isEmpty) settings.bootclasspath.value = asString(getBootclasspath) if (!extdirs.isEmpty) settings.extdirs.value = asString(getExtdirs) From 883fdd74d63a54495f4013eef81c9b5ebc850d1c Mon Sep 17 00:00:00 2001 From: peterz Date: Tue, 10 May 2016 11:54:21 +0300 Subject: [PATCH 0041/2793] SI-5463 Check .jars before using them Make broken JAR files on compiler classpath cause a fatal error --- .../nsc/classpath/AggregateClassPath.scala | 12 ++++++++++- test/files/run/t5463.scala | 21 +++++++++++++++++++ 2 files changed, 32 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t5463.scala diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala index 6b435542a309..a1af3413ead6 100644 --- a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala @@ -6,6 +6,7 @@ package scala.tools.nsc.classpath import java.net.URL import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.FatalError import scala.reflect.io.AbstractFile import scala.tools.nsc.util.ClassPath import scala.tools.nsc.util.ClassRepresentation @@ -72,7 +73,16 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { getDistinctEntries(_.sources(inPackage)) override private[nsc] def list(inPackage: String): ClassPathEntries = { - val (packages, classesAndSources) = aggregates.map(_.list(inPackage)).unzip + val (packages, classesAndSources) = aggregates.map { cp => + try { + cp.list(inPackage) + } catch { + case ex: java.io.IOException => + val e = new FatalError(ex.getMessage) + e.initCause(ex) + throw e + } + }.unzip val distinctPackages = packages.flatten.distinct val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*) ClassPathEntries(distinctPackages, distinctClassesAndSources) diff --git a/test/files/run/t5463.scala b/test/files/run/t5463.scala new file mode 100644 index 000000000000..30b8306156d3 --- /dev/null +++ b/test/files/run/t5463.scala @@ -0,0 +1,21 @@ +import scala.reflect.internal.FatalError +import scala.tools.partest.DirectTest + +object Test extends DirectTest { + + def code = "class A" + + override def show(): Unit = { + // Create a broken JAR file and put it on compiler classpath + val jarpath = testOutput.path + "/notajar.jar" + scala.reflect.io.File(jarpath).writeAll("This isn't really a JAR file") + + val classpath = List(sys.props("partest.lib"), jarpath, testOutput.path) mkString sys.props("path.separator") + try { + compileString(newCompiler("-cp", classpath, "-d", testOutput.path))(code) + throw new Error("Compilation should have failed"); + } catch { + case ex: FatalError => // this is expected + } + } +} From 7f514bba9ff1993ccbfdcf4a37a8045849f1647a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 17 May 2016 14:12:57 -0700 Subject: [PATCH 0042/2793] SI-4625 Warn on first non-toplevel only Fixed the warning when main module is accompanied by snippets. Minor clean-up so even I can follow what is returned. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 88 ++++++++++--------- test/files/run/t4625c.script | 1 + 2 files changed, 47 insertions(+), 42 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index c2f2141fd3ec..308669256dc7 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -371,7 +371,7 @@ self => * around it. Since objects are loaded lazily the whole script would have * been a no-op, so we're not taking much liberty. */ - def searchForMain(): Option[Tree] = { + def searchForMain(): Tree = { import PartialFunction.cond /* Have to be fairly liberal about what constitutes a main method since @@ -387,10 +387,10 @@ self => case Template(parents, _, _) => parents.exists(cond(_) { case Ident(tpnme.App) => true }) case _ => false } - /* For now we require there only be one top level object. */ + /* We allow only one main module. */ var seenModule = false var disallowed = EmptyTree: Tree - val newStmts = stmts collect { + val newStmts = stmts.map { case md @ ModuleDef(mods, name, template) if !seenModule && (isApp(template) || md.exists(isMainMethod)) => seenModule = true /* This slightly hacky situation arises because we have no way to communicate @@ -407,54 +407,58 @@ self => case t @ Import(_, _) => t case t => /* If we see anything but the above, fail. */ - disallowed = t + if (disallowed.isEmpty) disallowed = t EmptyTree } - if (disallowed.isEmpty) Some(makeEmptyPackage(0, newStmts)) + if (disallowed.isEmpty) makeEmptyPackage(0, newStmts) else { if (seenModule) warning(disallowed.pos.point, "Script has a main object but statement is disallowed") - None + EmptyTree } } - if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain)) - searchForMain() foreach { return _ } + def mainModule: Tree = + if (mainModuleName == newTermName(ScriptRunner.defaultScriptMain)) searchForMain() else EmptyTree - /* Here we are building an AST representing the following source fiction, - * where `moduleName` is from -Xscript (defaults to "Main") and are - * the result of parsing the script file. - * - * {{{ - * object moduleName { - * def main(args: Array[String]): Unit = - * new AnyRef { - * stmts - * } - * } - * }}} - */ - def emptyInit = DefDef( - NoMods, - nme.CONSTRUCTOR, - Nil, - ListOfNil, - TypeTree(), - Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit) - ) - - // def main - def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String))) - def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree)) - def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts)) - - // object Main - def moduleName = newTermName(ScriptRunner scriptMain settings) - def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef)) - def moduleDef = ModuleDef(NoMods, moduleName, moduleBody) - - // package { ... } - makeEmptyPackage(0, moduleDef :: Nil) + def repackaged: Tree = { + /* Here we are building an AST representing the following source fiction, + * where `moduleName` is from -Xscript (defaults to "Main") and are + * the result of parsing the script file. + * + * {{{ + * object moduleName { + * def main(args: Array[String]): Unit = + * new AnyRef { + * stmts + * } + * } + * }}} + */ + def emptyInit = DefDef( + NoMods, + nme.CONSTRUCTOR, + Nil, + ListOfNil, + TypeTree(), + Block(List(Apply(gen.mkSuperInitCall, Nil)), literalUnit) + ) + + // def main + def mainParamType = AppliedTypeTree(Ident(tpnme.Array), List(Ident(tpnme.String))) + def mainParameter = List(ValDef(Modifiers(Flags.PARAM), nme.args, mainParamType, EmptyTree)) + def mainDef = DefDef(NoMods, nme.main, Nil, List(mainParameter), scalaDot(tpnme.Unit), gen.mkAnonymousNew(stmts)) + + // object Main + def moduleName = newTermName(ScriptRunner scriptMain settings) + def moduleBody = Template(atInPos(scalaAnyRefConstr) :: Nil, noSelfType, List(emptyInit, mainDef)) + def moduleDef = ModuleDef(NoMods, moduleName, moduleBody) + + // package { ... } + makeEmptyPackage(0, moduleDef :: Nil) + } + + mainModule orElse repackaged } /* --------------- PLACEHOLDERS ------------------------------------------- */ diff --git a/test/files/run/t4625c.script b/test/files/run/t4625c.script index fa14f43950df..16159208e05c 100644 --- a/test/files/run/t4625c.script +++ b/test/files/run/t4625c.script @@ -1,5 +1,6 @@ val x = "value x" +val y = "value y" object Main extends App { println(s"Test ran with $x.") From 4a7f82c9047d04d79aa0fe4c0f8dc249ba221f76 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 18 May 2016 00:04:53 +0200 Subject: [PATCH 0043/2793] improve README (#5163) --- CONTRIBUTING.md | 28 ++- README.md | 380 +++++++++++++++++++++++------------------ src/intellij/README.md | 64 +++++-- 3 files changed, 287 insertions(+), 185 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 617734210f06..47d27886231e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,7 @@ In 2014, you -- the Scala community -- matched the core team at EPFL in number o We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)! -This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala-internals, or tweet about it to @adriaanm.) +This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to gitter, scala-internals, or tweet about it to @adriaanm.) By the way, the team at Lightbend is: @adriaanm, @lrytz, @retronym, @SethTisue, and @szeiger. @@ -117,14 +117,32 @@ See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) ### Pass code review -Your PR will need to be assigned to one or more reviewers. You can suggest reviewers yourself; if you're not sure, see the list in [README.md](README.md) or ask on scala-internals. +Your PR will need to be assigned to one or more reviewers. You can suggest reviewers +yourself; if you're not sure, see the list in [README.md](README.md) or ask on gitter +or scala-internals. -To assign a reviewer, add a "review by @reviewer" to your PR description. +To assign a reviewer, add a "review by @reviewer" to the PR description or in a +comment on your PR. NOTE: it's best not to @mention in commit messages, as github pings you every time a commit with your @name on it shuffles through the system (even in other repos, on merges,...). A reviewer gives the green light by commenting "LGTM" (looks good to me). -A review feedback may be addressed by pushing new commits to the request, if these commits stand on their own. +When including review feedback, we typically amend the changes into the existing commit(s) +and `push -f` to the branch. This is to keep the git history clean. Additional commits +are OK if they stand on their own. -Once all these conditions are met, and we agree with the change (we are available on scala-internals to discuss this beforehand, before you put in the coding work!), we will merge your changes. +Once all these conditions are met, and we agree with the change (we are available on +gitter or scala-internals to discuss this beforehand, before you put in the coding work!), +we will merge your changes. + +We use the following labels: + +Label | Description +-------------------------|:----------- +`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted +`welcome` | added by reviewer / queue curator to welcome someone's first PR (for highlighting in the release notes) +`release-notes` | added by reviewer / queue curator to make sure this PR is highlighted in the release notes +`on-hold` | added when this PR should not yet be merged, even though CI is green +`WIP` | added by the author if a PR is submitted for CI testing, needs more work to be complete +`assistance-appreciated` | added by the author if help by the community is appreciated to move a change forward diff --git a/README.md b/README.md index dc869da0da79..6ebb4531765c 100644 --- a/README.md +++ b/README.md @@ -5,12 +5,12 @@ This is the official repository for the [Scala Programming Language](http://www. To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature. -For more information on building and developing the core of Scala, read on! +For more information on building and developing the core of Scala, make sure to read +the rest of this README! -Please also check out: - -* our [guidelines for contributing](CONTRIBUTING.md). -* the ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html) covers some of the same ground as this README, but in greater detail and in a more tutorial style, using a running example. +In order to get in touch with Scala contributors, join the +[scala/contributors](https://gitter.im/scala/contributors) gitter channel or post on the +[scala-internals mailing list](http://www.scala-lang.org/community/). # Reporting issues @@ -18,195 +18,247 @@ We're still using Jira for issue reporting, so please [report any issues](https: (We would love to start using GitHub Issues, but we're too resource-constrained to take on this migration right now.) # Get in touch! -If you need some help with your PR at any time, please feel free to @-mention anyone from the list below (or simply `@scala/team-core-scala`), and we will do our best to help you out: +If you need some help with your PR at any time, please feel free to @-mention anyone from the list below, and we will do our best to help you out: | username | talk to me about... | --------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------| - | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec | - | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience | - | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | - | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | - | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | - | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool | - | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library | - | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection | - | [`@heathermiller`](https://github.com/heathermiller) | documentation | - | [`@dickwall`](https://github.com/dickwall) | process & community, documentation | - | [`@dragos`](https://github.com/dragos) | specialization, back end | - | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | - | [`@janekdb`](https://github.com/janekdb) | documentation | + | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec | + | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience | + | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | + | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | + | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | + | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool | + | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library | + | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection | + | [`@heathermiller`](https://github.com/heathermiller) | documentation | + | [`@dickwall`](https://github.com/dickwall) | process & community, documentation | + | [`@dragos`](https://github.com/dragos) | specialization, back end | + | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | + | [`@janekdb`](https://github.com/janekdb) | documentation | P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list! -# Handy Links - - [A wealth of documentation](http://docs.scala-lang.org) - - [mailing lists](http://www.scala-lang.org/community/) - - [Gitter room for Scala contributors](https://gitter.im/scala/contributors) - - [Scala CI](https://scala-ci.typesafe.com/) - - download the latest nightlies: - - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/) - - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/) - # Repository structure ``` scala/ -+--build.xml The main Ant build script, see also under src/build. -+--pull-binary-libs.sh Pulls binary artifacts from remote repository. -+--lib/ Pre-compiled libraries for the build. -+--src/ All sources. - +---/library Scala Standard Library. - +---/reflect Scala Reflection. - +---/compiler Scala Compiler. - +---/eclipse Eclipse project files. - +---/intellij IntelliJ project templates. ++--build.sbt The main sbt build script ++--build.xml The deprecated Ant build script ++--pull-binary-libs.sh Pulls binary artifacts from remote repository, used by build scripts ++--lib/ Pre-compiled libraries for the build ++--src/ All sources + +---/library Scala Standard Library + +---/reflect Scala Reflection + +---/compiler Scala Compiler + +---/eclipse Eclipse project files + +---/intellij IntelliJ project templates ++--spec/ The Scala language specification +--scripts/ Scripts for the CI jobs (including building releases) -+--test/ The Scala test suite. -+--build/ [Generated] Build products output directory for ant. -+--dist/ [Generated] The destination folder for Scala distributions. ++--test/ The Scala test suite + +---/files Partest tests + +---/junit JUnit tests ++--build/ [Generated] Build output directory ``` -# How we roll +# Get Ready to Contribute ## Requirements -You'll need a Java SDK. The baseline version is 6 for 2.11.x, 8 for -2.12.x. (It's also possible to use a later SDK for local development, -but the CI will verify against the baseline version.) - -You'll also need Apache Ant (version 1.9.3 or above) and curl (for `./pull-binary-libs.sh`). - -Mac OS X and Linux work. Windows may work if you use Cygwin. (Community help with keeping the build working on Windows is appreciated.) - -## Git Hygiene - -As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 72 or fewer characters for the first line, wrapping subsequent ones at 80 (at most). - -When not sure how to formulate your commit message, imagine you're writing a bullet item for the next release notes, or describing what the commit does to the code base (use active verbs in the present tense). When your commit title is featured in the next release notes, it will be read by a lot of curious Scala users, looking for the latest improvements. Satisfy their thirst for information with as few words as possible! Also, a commit should convey clearly to your (future) fellow contributors what it does to the code base. - -Writing the commit message is a great sanity check that the commit is of the right size. If it does too many things, the description will be unwieldy and tedious to write. Chop it up (`git add -u --patch` and `git rebase` are your friends) and simplify! - -To pinpoint bugs, we often use git bisect, which is only effective when we can count on each commit building (and passing the test suite). Thus, the CI bot enforces this. Please rebase your development history into a sensible list of self-contained commits that tell the story of your bug fix or improvement. Carve them up so that the riskier bits can be reverted independently. Keep changes focussed by splitting out cleanups from refactorings from actual changes to the logic. - -This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one). - -Please do not @-mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @-mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)). - - -## Reviews - -Please consider nominating a reviewer for your PR in the PR's description or a comment. If unsure, not to worry -- the core team will assign one for you. - -Your reviewer is also your mentor, who will help you rework your PR so that it meets our requirements. We strive to give timely feedback, and apologize for those times when we are overwhelmed by the volume of contributions. Please feel free to ping us. You are entitled to regular progress updates and at least a quick assessment of feasibility of a bigger PR. - -To help you plan your contributions, we communicate our plans on a regular basis on scala-internals, and deadlines are tracked as due dates for [GitHub milestones](https://github.com/scala/scala/milestones). - -## Reviewing - -Once you've gained some experience with the code base and the process, the next step is to review the contributions of others. - -The main goal of this whole process is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part! - -## [Labels](https://github.com/scala/scala/labels) - -Label | Description ---------------- | ----------- -`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted -`welcome` | reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes) -`release-notes` | reviewer / queue curator adds to make sure this PR is highlighted in the release notes -`on-hold` | added when this PR should not yet be merged, even though CI is green - -### Tips & Tricks -Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows: +You need the following tools: + - A Java SDK. The baseline version is 6 for 2.11.x, 8 for 2.12.x. It's possible + to use a later SDK for local development, but the CI will verify against the baseline + version. + - sbt, we recommend the [sbt-extras](https://github.com/paulp/sbt-extras) runner + script. It provides sensible default jvm options (stack and heap size). + - curl (for `./pull-binary-libs.sh`, used by the sbt / ant build). + - Apache Ant (version 1.9.3 or above) if you need to use the (deprecated) ant build. + +Mac OS X and Linux work. Windows may work if you use Cygwin. Community help with keeping +the build working on Windows is appreciated. + +## Build Setup + +### Basics + +Scala is built in layers, where each layer is a complete Scala compiler and library. +Here is a short description of the layers, from bottom to top: + + - `starr`: the stable reference Scala release. We use an official release of + Scala (specified by `starr.version` in [versions.properties](versions.properties)), + downloaded from the Central Repository. + - `locker` (deprecated, only in ant): an intermediate layer that existed in the + ant build to perform a bootstrap. + - `quick`: the development layer which is incrementally built when working on + changes in the compiler or library. + - `strap` (deprecated, only in ant) : a test layer used to check stability of + the build. + +The sbt build uses `starr` to build `quick`. This is sufficient for most development +scenarios: changes to the library or the compiler can be tested by running the `quick` +Scala (see below for how to do that). + +However, a full build of Scala (a *bootstrap*, as performed by our CI) requires two +layers. This guarantees that every Scala version can build itself. If you change the +code generation part of the Scala compiler, your changes will only reflect in the +bytecode of the library and compiler after a bootstrap. See below for how to create +a bootstrap build locally. + +### Using the Sbt Build + +Core commands: + - `compile` compiles all sub-projects (library, reflect, compiler, scaladoc, etc) + - `scala` / `scalac` run the REPL / compiler directly from sbt (accept options / + arguments) + - `dist/mkBin` generates runner scripts (`scala`, `scalac`, etc) in `build/quick/bin` + - `dist/mkPack` creates a build in the Scala distribution format in `build/pack` + - `test` runs the JUnit test, `testOnly *immutable.ListTest` runs a subset + - `partest` runs partest tests (accepts options, try `partest --help`) + - `publishLocal` publishes a distribution locally (can be used as `scalaVersion` in + other sbt projects) + - Optionally `set VersionUtil.baseVersionSuffix in Global := "abcd123-SNAPSHOT"` + where `abcd123` is the git hash of the revision being published. You can also + use something custom like `"mypatch"`. This changes the version number from + `2.12.0-SNAPSHOT` to something more stable (`2.12.0-abcd123-SNAPSHOT`). + - Optionally `set publishArtifact in (Compile, packageDoc) in ThisBuild := false` + to skip generating / publishing API docs (speeds up the process). + +#### Sandbox + +We recommend to keep local test files in the `sandbox` directory which is listed in +the `.gitignore` of the Scala repo. + +#### Incremental Compilation + +Note that sbt's incremental compilation is often too coarse for the Scala compiler +codebase and re-compiles too many files, resulting in long build times (check +[sbt#1104](https://github.com/sbt/sbt/issues/1104) for progress on that front). In the +meantime you can: + - Enable "ant mode" in which sbt only re-compiles source files that were modified. + Create a file `local.sbt` containing the line `(incOptions in ThisBuild) := (incOptions in ThisBuild).value.withNameHashing(false).withAntStyle(true)`. + Add an entry `local.sbt` to your `~/.gitignore`. + - Use IntelliJ IDEA for incremental compiles (see [IDE Setup](#ide-setup) below) - its + incremental compiler is a bit less conservative, but usually correct. + +#### Local Bootstrap Build + +To perform a bootstrap using sbt + - first a build is published either locally or on a temporary repository, + - then a separate invocation of sbt (using the previously built version as `starr`) + is used to build / publish the actual build. + +Assume the current `starr` version is `2.12.0-M4` (defined in +[versions.properties](versions.properties)) and the current version is `2.12.0-SNAPSHOT` +(defined in [build.sbt](build.sbt)). To perform a local bootstrap: + - Run `publishLocal` (you may want to specify a custom version suffix and skip + generating API docs, see above). + - Quit sbt and start a new sbt instance using `sbt -Dstarr.version=` where + `` is the version number you published locally. + - If the version number you published is not binary compatible with the current + `starr`, `set every scalaBinaryVersion := "2.12.0-M4"`. This is not required if + the version you published locally is binary compatible, i.e., if the current + `starr` is a 2.12.x release and not a milestone / RC. + +The last step is required to resolve modules (scala-xml, scala-partest, etc). It +assumes that the module releases for the current `starr` work (in terms of binary +compatibility) with the local starr that you published locally. A full bootstrap +requires re-building the all the modules. On our CI this is handled by the +[bootstrap](scripts/jobs/integrate/bootstrap) script, but it (currently) cannot +be easily executed locally. + +### IDE Setup + +You may use IntelliJ IDEA ([src/intellij/README.md](src/intellij/README.md)) or the +Scala IDE for Eclipse (see [src/eclipse/README.md](src/eclipse/README.md)). + +In order to use IntelliJ's incremental compiler: + - run `dist/mkBin` in sbt to get a build and the runner scripts in `build/quick/bin` + - run "Build" - "Make Project" in IntelliJ + +Now you can edit and build in IntelliJ and use the scripts (compiler, REPL) to +directly test your changes. You can also run the `scala`, `scalac` and `partest` +commands in sbt. Enable "ant mode" (explained above) to prevent sbt's incremental +compiler from re-compiling (too many) files before each `partest` invocation. + +# Coding Guidelines + +Our guidelines for contributing are explained in [CONTRIBUTING.md](CONTRIBUTING.md). +It contains useful information on our coding standards, testing, documentation, how +we use git and GitHub and how to get your code reviewed. + +You may also want to check out the following resources: + - The ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html) + covers some of the same ground as this README, but in greater detail and in a more + tutorial style, using a running example. + - [Scala documentation site](http://docs.scala-lang.org) + +# Scala CI + +Once you submit a PR your commits will are automatically tested by the Scala CI. + +If you see a spurious build failure, you can post `/rebuild` as a PR comment. +The [scabot README](https://github.com/scala/scabot) lists all available commands. + +If you'd like to test your patch before having everything polished for review, +feel free to submit a PR and add the `WIP` label. In case your WIP branch contains +a large number of commits (that you didn't clean up / squash yet for review), +consider adding `[ci: last-only]` to the PR title. That way only the last commit +will be tested, saving some energy and CI-resources. Note that inactive WIP PRs +will be closed eventually, which does not mean the change is being rejected. + +CI performs a full bootstrap. The first task, `validate-publish-core`, publishes +a build of your commit to the temporary repository +https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots. +Note that this build is not yet bootstrapped, its bytecode is built using the +current `starr`. The version number is `2.12.0-abcd123-SNAPSHOT` where `abcd123` +is the commit hash. + +You can use Scala builds in the validation repository locally by adding a resolver +and specifying the corresponding `scalaVersion`: ``` $ sbt - > set resolvers += "pr" at "https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/" -> set scalaVersion := "--SNAPSHOT" +> set scalaVersion := "2.12.0-abcd123-SNAPSHOT" > console ``` -Here, `` is the milestone targeted by the PR (e.g., 2.11.6), and `` is the 7-character sha (the format used by GitHub on the web). - -## IDE Setup -### Eclipse -See [src/eclipse/README.md](src/eclipse/README.md). - -### IntelliJ 15 -See [src/intellij/README.md](src/intellij/README.md). - -## Building with sbt (EXPERIMENTAL) - -The experimental sbt-based build definition has arrived! Run `sbt package` -to build the compiler. You can run `sbt test` to run unit (JUnit) tests. -Use `sbt test/it:test` to run integration (partest) tests. - -We would like to migrate to sbt build as quickly as possible. If you would -like to help please use the scala-internals mailing list to discuss your -ideas and coordinate your effort with others. - -## Building with Ant +Note that the scala modules are currently not built / published against the +tested version during CI validation. -NOTE: we are working on migrating the build to sbt. +## Nightly Builds -If you are behind a HTTP proxy, include -[`ANT_ARGS=-autoproxy`](https://ant.apache.org/manual/proxy.html) in -your environment. +The Scala CI builds nightly download releases (including all modules) and publishes +them to the following locations: + - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/?C=M;O=D) + - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=A) -Run `ant build-opt` to build an optimized version of the compiler. -Verify your build using `ant test-opt`. +The CI also publishes nightly API docs: + - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/?C=M;O=D) + - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/2.12.x/) + - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/?C=M;O=D) + - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/2.11.x/) -The Scala build system is based on Apache Ant. Most required pre-compiled -libraries are part of the repository (in 'lib/'). The following however is -assumed to be installed on the build machine: TODO +Note that we currently don't publish nightly (or SNAPSHOT) builds in maven or ivy +format to any repository. You can track progress on this front at +[scala-jenkins-infra#133](https://github.com/scala/scala-jenkins-infra/issues/133) +and [scala-dev#68](https://github.com/scala/scala-dev/issues/68). -### Ant Tips and tricks +## Scala CI Internals -Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant). - -Command | Description ------------------------ | ----------- -`./pull-binary-libs.sh` | downloads all binary artifacts associated with this commit. -`ant -p` | prints out information about the commonly used ant targets. -`ant` or `ant build` | A quick compilation (to `build/quick`) of your changes using the locker compiler. -`ant dist` | builds a distribution in 'dists/latest'. -`ant all.clean` | removes all build files and all distributions. - -A typical debug cycle incrementally builds quick, then uses it to compile and run the file -`sandbox/test.scala` as follows: - - - `ant && build/quick/bin/scalac -d sandbox sandbox/test.scala && build/quick/bin/scala -cp sandbox Test` - -We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick/bin/scala -cp sandbox` to `qs` in our shell. - -`ant test-opt` tests that your code is working and fit to be committed: - - - Runs the test suite and bootstrapping test on quick. - - You can run the suite only (skipping strap) with `ant test.suite`. - -`ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick. -Note: on most machines this requires more heap than is allocated by default. You can adjust the parameters with `ANT_OPTS`. Example command line: - -```sh -ANT_OPTS="-Xms512M -Xmx2048M -Xss1M" ant docs -``` +The Scala CI runs as a Jenkins instance on [scala-ci.typesafe.com](https://scala-ci.typesafe.com/), +configured by a chef cookbook at [scala/scala-jenkins-infra](https://github.com/scala/scala-jenkins-infra). -### Bootstrapping concepts -NOTE: This is somewhat outdated, but the ideas still hold. +The build bot that watches PRs, triggers testing builds and applies the "reviewed" label +after an LGTM comment is in the [scala/scabot](https://github.com/scala/scabot) repo. -In order to guarantee the bootstrapping of the Scala compiler, the ant build -compiles Scala in layers. Each layer is a complete compiled Scala compiler and library. -A superior layer is always compiled by the layer just below it. Here is a short -description of the four layers that the build uses, from bottom to top: +## Community Build - - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from the Central Repository. - - `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`). - - `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code. - - `strap`: a test layer used to check stability of the build. +The community build is a central element for testing Scala releases. A community +build can be launched for any Scala revision / commit. It first builds the Scala +library and compiler and then uses that Scala version to build a large number of +open-source projects from source. -For each layer, the Scala library is compiled first and the compiler next. -That means that any changes in the library can immediately be used in the -compiler without an intermediate build. On the other hand, if building the -library requires changes in the compiler, a new locker must be built if -bootstrapping is still possible, or a new starr if it is not. +Community builds run on the Scala Jenkins instance, the jobs are named +`..-integrate-community-build`. The community build definitions specifying which +projects are built are in the +[scala/community-builds](https://github.com/scala/community-builds) repo. diff --git a/src/intellij/README.md b/src/intellij/README.md index dcad699d43f6..41fef0418362 100644 --- a/src/intellij/README.md +++ b/src/intellij/README.md @@ -1,25 +1,25 @@ -# Building Scala in IntelliJ IDEA - -## Requirements +# Developing Scala in IntelliJ IDEA Use the latest IntelliJ release and install the Scala plugin from within the IDE. -## Initial setup +## Initial Setup To create the IntelliJ project files: - Run `sbt intellij` - Open `src/intellij/scala.ipr` in IntelliJ - - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry named "1.8" containing the Java 1.8 SDK + - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry + named "1.8" containing the Java 1.8 SDK (1.6 if you're on the Scala the 2.11.x branch) -The project files are created by as copies of the `.SAMPLE` files, which are under version control. -The actual IntelliJ project files are in `.gitignore` so that local changes are ignored. +The project files are created as copies of the `.SAMPLE` files, which are under version +control. The actual IntelliJ project files are in `.gitignore` so that local changes +are ignored. ## Dependencies For every module in the IntelliJ project there is a corresponding `-deps` library, for exmaple `compiler-deps` provides `ant.jar` for the compiler codebase. The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again. -This is necessary whenever the dependencies in the sbt build change, for example when the STARR version is updated. +This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated. Note that this command only patches the dependency lists, all other settings in the IntelliJ project definition are unchanged. To overwrite the project definition files by copying the `.SAMPLE` files again run `sbt intellijFromSample`. @@ -33,17 +33,49 @@ When switching between 2.11.x and 2.12.x, make sure to run `sbt intellij`. Note that the `Project SDK` is not updated in this process. If you want to use the Java 1.6 SDK while working on 2.11.x you need to change it manually (`File` → `Project Structure` → `Project` → `Project SDK`). -## Usage +If you switch between 2.11.x and 2.12.x often, it makes sense to have a separate clone +of the repository for each branch. + +## Incremental Compilation + +Run `Build` → `Make Project` to build all modules of the Scala repository (library, +compiler, etc). Note that compilation IntelliJ is performed in a single pass (no +bootstrap), like the sbt build. + +Note that the output directory when compiling in IntelliJ is the same as for the +sbt and (deprecated) ant builds. This allows building incrementally in IntelliJ +and directly use the changes using the command-line scripts in `build/quick/bin/`. + +## Running JUnit Tests + +JUnit tests can be executed by right-clicking on a test class or test method and +selecting "Run" or "Debug". The debugger will allow you to stop at breakpoints +within the Scala library. + +It is possible to invoke the Scala compiler from a JUnit test (passing the source +code as a string) and inspect the generated bytecode, see for example +`scala.issues.BytecodeTest`. Debugging such a test is an easy way to stop at +breakpoints within the Scala compiler. + +## Running the Compiler and REPL + +You can create run/debug configurations to run the compiler and REPL directly within +IntelliJ, which might accelerate development and debugging of the the compiler. -Compiling, running, JUnit tests and debugging should all work. -You can work on the compiler, the standard library, and other components as well. +To debug the Scala codebase you can also use "Remote" debug configuration and pass +the corresponding arguments to the jvm running the compiler / program. -Note that compilation within IntelliJ is performed in a single pass. -The code is compiled using the "STARR" (stable reference) compiler, as specified by `starr.version` in `versions.properties`. -This is consistent with the sbt build. +To run the compiler create an "Application" configuration with + - Main class: `scala.tools.nsc.Main` + - Program arguments: `-usejavacp -cp sandbox -d sandbox sandbox/Test.scala` + - Working directory: the path of your checkout + - Use classpath of module: `compiler` -Note that the output directory when compiling in IntelliJ is the same as for the sbt build. -This allows building incrementally in IntelliJ and directly use the changes using the command-line scripts in `build/quick/bin/`. +To run the REPL create an "Application" configuration with + - Main class: `scala.tools.nsc.MainGenericRunner` + - Program arguments: `-usejavacp` + - Working directory: the path of your checkout + - Use classpath of module: `repl` ## Updating the `.SAMPLE` files From d3e10c0b0aa95408873072262f0d728b96cfd885 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 18 May 2016 09:42:37 +1000 Subject: [PATCH 0044/2793] SI-8756 Test to demonstrate the status quo Java generic signatures assume that refinement types should be boxed. Why did `g2` in the test seem to be immune to this bug demonstrated by `f2`? Because we opt to elide the generic signature altogether when no generics are involved. --- test/files/run/t8756.check | 9 +++++++++ test/files/run/t8756.scala | 22 ++++++++++++++++++++++ 2 files changed, 31 insertions(+) create mode 100644 test/files/run/t8756.check create mode 100644 test/files/run/t8756.scala diff --git a/test/files/run/t8756.check b/test/files/run/t8756.check new file mode 100644 index 000000000000..89337543bd70 --- /dev/null +++ b/test/files/run/t8756.check @@ -0,0 +1,9 @@ +public Bippy Test.f1(long) +public Bippy Test.f2(java.lang.Object) +public Bippy Test.i1(Bippy) +public Bippy Test.i2(Bippy) +public int Test.g1(long) +public int Test.g2(long) +public java.lang.Object Test.h1(long) +public java.lang.Object Test.h2(long) +public static void Test.main(java.lang.String[]) diff --git a/test/files/run/t8756.scala b/test/files/run/t8756.scala new file mode 100644 index 000000000000..edd243473ab3 --- /dev/null +++ b/test/files/run/t8756.scala @@ -0,0 +1,22 @@ +trait Bippy[A] + +class Test { + type T1 = Long + type T2 = Long { type Tag = Nothing } + + def f1(t: T1): Bippy[Object] = ??? + def f2(t: T2): Bippy[Object] = ??? + def g1(t: T1): Int = ??? + def g2(t: T2): Int = ??? + def h1(t: T1): Object = ??? + def h2(t: T2): Object = ??? + def i1(t: Bippy[T1]): Bippy[T1] = ??? + def i2(t: Bippy[T2]): Bippy[T2] = ??? + +} + +object Test { + def main(args: Array[String]) { + println(classOf[Test].getDeclaredMethods.map(_.toGenericString).toList.sorted.mkString("\n")) + } +} From af972a5019d0c5e8f4be4363eeed590c8fb384c3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 18 May 2016 12:57:49 +1000 Subject: [PATCH 0045/2793] SI-8756 Fix generic signature for refinement of primitive Java generic signature generation was making the wrong assumption about how refinement types should erase to Java generics. This commit passes through the current value of `primitiveOk`, rather than forcing it to `true`. This flag is true when generating the signature for `f2`, but false in `i2` (as we are in a type argument position). --- src/compiler/scala/tools/nsc/transform/Erasure.scala | 2 +- test/files/run/t8756.check | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 0301e06c87a3..bc614dfc31e0 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -344,7 +344,7 @@ abstract class Erasure extends AddInterfaces buf.toString case RefinedType(parents, decls) => - boxedSig(intersectionDominator(parents)) + jsig(intersectionDominator(parents), primitiveOK = primitiveOK) case ClassInfoType(parents, _, _) => superSig(parents) case AnnotatedType(_, atp) => diff --git a/test/files/run/t8756.check b/test/files/run/t8756.check index 89337543bd70..9b9dcafe7d6a 100644 --- a/test/files/run/t8756.check +++ b/test/files/run/t8756.check @@ -1,5 +1,5 @@ public Bippy Test.f1(long) -public Bippy Test.f2(java.lang.Object) +public Bippy Test.f2(long) public Bippy Test.i1(Bippy) public Bippy Test.i2(Bippy) public int Test.g1(long) From 38ca9dec8807ddce36a988bf13b367f4d6f03b9e Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 18 May 2016 15:24:45 +0200 Subject: [PATCH 0046/2793] SI-9671, SI-7397 fix null.asInstanceOf[Int] when pt erases to Object Erasure first replaces null.asInstanceOf[Int] by unbox(null). If the expected type erases to object, erasure then introduces a box operation, yielding box(unbox(null)). Note that this value is a box of zero, not null. Erasure has an optimization to replace box(unbox(x)) in case x is of primitive type. 60f1b4b extended this to the case when x is null, which is incorrect in general. The reason was to prevent creating a primitive box to be stored in the unused generic field when creating an instance of a specialized class. A special case ensures that this optimization is still performed. --- .../scala/tools/nsc/transform/Erasure.scala | 6 +- .../tools/nsc/transform/SpecializeTypes.scala | 21 +++- .../transform/TypeAdaptingTransformer.scala | 22 ++--- test/junit/scala/BoxUnboxTest.scala | 18 ++-- test/junit/scala/issues/BytecodeTest.scala | 10 ++ test/junit/scala/issues/RunTest.scala | 98 ++++++++++++++++++- 6 files changed, 145 insertions(+), 30 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index bc614dfc31e0..7bfe5a4740a5 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -606,10 +606,8 @@ abstract class Erasure extends AddInterfaces // !!! Make pending/run/t5866b.scala work. The fix might be here and/or in unbox1. if (isPrimitiveValueType(targ.tpe) || isErasedValueType(targ.tpe)) { val noNullCheckNeeded = targ.tpe match { - case ErasedValueType(_, underlying) => - isPrimitiveValueClass(underlying.typeSymbol) - case _ => - true + case ErasedValueType(_, underlying) => isPrimitiveValueType(underlying) + case _ => true } if (noNullCheckNeeded) unbox(qual1, targ.tpe) else { diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 4b1f1efee497..e894c58b1ac9 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1911,8 +1911,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { /** Forward to the generic class constructor. If the current class initializes * specialized fields corresponding to parameters, it passes null to the superclass - * constructor. This saves the boxing cost for initializing generic fields that are - * never used. + * constructor. * * For example: * {{{ @@ -1926,7 +1925,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * super.this(null.asInstanceOf[Int], null.asInstanceOf[Int]) * } * } - * }} + * }}} + * + * Note that erasure first transforms `null.asInstanceOf[Int]` to `unbox(null)`, which is 0. + * Then it adapts the argument `unbox(null)` of type Int to the erased parameter type of Tuple2, + * which is Object, so it inserts a `box` call and we get `box(unbox(null))`, which is + * `new Integer(0)` (not `null`). + * + * However it does not make sense to create an Integer instance to be stored in the generic field + * of the superclass: that field is never used. Therefore we mark the `null` tree with the + * [[SpecializedSuperConstructorCallArgument]] attachment and special-case erasure to replace + * `box(unbox(null))` by `null` in this case. */ private def forwardCtorCall(pos: scala.reflect.internal.util.Position, receiver: Tree, paramss: List[List[ValDef]], clazz: Symbol): Tree = { log(s"forwardCtorCall($pos, $receiver, $paramss, $clazz)") @@ -1945,7 +1954,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val argss = mmap(paramss)(x => if (initializesSpecializedField(x.symbol)) - gen.mkAsInstanceOf(Literal(Constant(null)), x.symbol.tpe) + gen.mkAsInstanceOf(Literal(Constant(null)).updateAttachment(SpecializedSuperConstructorCallArgument), x.symbol.tpe) else Ident(x.symbol) ) @@ -1989,5 +1998,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } resultTree - } } + } + } + object SpecializedSuperConstructorCallArgument } diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index afafdedce75a..52d7c0b897bd 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -14,11 +14,18 @@ trait TypeAdaptingTransformer { self: TreeDSL => def typedPos(pos: Position)(tree: Tree): Tree + /** + * SI-4148: can't always replace box(unbox(x)) by x because + * - unboxing x may lead to throwing an exception, e.g. in "aah".asInstanceOf[Int] + * - box(unbox(null)) is not `null` but the box of zero + */ private def isSafelyRemovableUnbox(fn: Tree, arg: Tree): Boolean = { - currentRun.runDefinitions.isUnbox(fn.symbol) && { - val cls = arg.tpe.typeSymbol - (cls == NullClass) || isBoxedValueClass(cls) - } + currentRun.runDefinitions.isUnbox(fn.symbol) && { + // replace box(unbox(null)) by null when passed to the super constructor in a specialized + // class, see comment in SpecializeTypes.forwardCtorCall. + arg.hasAttachment[specializeTypes.SpecializedSuperConstructorCallArgument.type] || + isBoxedValueClass(arg.tpe.typeSymbol) + } } private def isPrimitiveValueType(tpe: Type) = isPrimitiveValueClass(tpe.typeSymbol) @@ -44,14 +51,7 @@ trait TypeAdaptingTransformer { self: TreeDSL => case x => assert(x != ArrayClass) tree match { - /* Can't always remove a Box(Unbox(x)) combination because the process of boxing x - * may lead to throwing an exception. - * - * This is important for specialization: calls to the super constructor should not box/unbox specialized - * fields (see TupleX). (ID) - */ case Apply(boxFun, List(arg)) if isSafelyRemovableUnbox(tree, arg) => - log(s"boxing an unbox: ${tree.symbol} -> ${arg.tpe}") arg case _ => (REF(currentRun.runDefinitions.boxMethod(x)) APPLY tree) setPos (tree.pos) setType ObjectTpe diff --git a/test/junit/scala/BoxUnboxTest.scala b/test/junit/scala/BoxUnboxTest.scala index 162d805a6b75..eb7a35e98cf1 100644 --- a/test/junit/scala/BoxUnboxTest.scala +++ b/test/junit/scala/BoxUnboxTest.scala @@ -48,22 +48,22 @@ class BoxUnboxTest { assertEquals(n4, 0) val n5 = null.asInstanceOf[Int] == 0 assertTrue(n5) - val n6 = null.asInstanceOf[Int] == null // SI-9671 -- should be false, but is true - assertThrows[AssertionError](assertFalse(n6)) // should not throw + val n6 = null.asInstanceOf[Int] == null + assertFalse(n6) val n7 = null.asInstanceOf[Int] != 0 assertFalse(n7) - val n8 = null.asInstanceOf[Int] != null // SI-9671 -- should be true, but is false - assertThrows[AssertionError](assertTrue(n8)) // should not throw + val n8 = null.asInstanceOf[Int] != null + assertTrue(n8) val mp = new java.util.HashMap[Int, Int] val n9 = mp.get(0) assertEquals(n9, 0) - val n10 = mp.get(0) == null // SI-602 -- maybe related to SI-9671 (test above)? + val n10 = mp.get(0) == null // SI-602 assertThrows[AssertionError](assertFalse(n10)) // should not throw def f(a: Any) = "" + a - val n11 = f(null.asInstanceOf[Int]) // "null", should be "0". probably same cause as SI-602. - assertThrows[AssertionError](assertEquals(n11, "0")) // should not throw + val n11 = f(null.asInstanceOf[Int]) + assertEquals(n11, "0") def n12 = genericNull[Int] assertEquals(n12, 0) @@ -81,8 +81,8 @@ class BoxUnboxTest { @Test def boxUnboxBoolean(): Unit = { - val n1 = Option(null.asInstanceOf[Boolean]) // SI-7397 -- should be Some(false), but is None - assertThrows[AssertionError](assertEquals(n1, Some(false))) // should not throw + val n1 = Option(null.asInstanceOf[Boolean]) + assertEquals(n1, Some(false)) } @Test diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala index a720f2071816..7b9474b52e2b 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/issues/BytecodeTest.scala @@ -419,6 +419,16 @@ class BytecodeTest extends ClearAfterClass { assertInvoke(getSingleMethod(c, "f3"), "java/lang/Object", "hashCode") assertInvoke(getSingleMethod(c, "f4"), "java/lang/Object", "toString") } + + @Test + def superConstructorArgumentInSpecializedClass(): Unit = { + // see comment in SpecializeTypes.forwardCtorCall + val code = "case class C[@specialized(Int) T](_1: T)" + val List(c, cMod, cSpec) = compileClasses(compiler)(code) + assertSameSummary(getSingleMethod(cSpec, ""), + // pass `null` to super constructor, no box-unbox, no Integer created + List(ALOAD, ILOAD, PUTFIELD, ALOAD, ACONST_NULL, "", RETURN)) + } } object invocationReceiversTestCode { diff --git a/test/junit/scala/issues/RunTest.scala b/test/junit/scala/issues/RunTest.scala index 148009c9126f..b81a3e1d6f8e 100644 --- a/test/junit/scala/issues/RunTest.scala +++ b/test/junit/scala/issues/RunTest.scala @@ -10,8 +10,8 @@ import scala.tools.reflect.ToolBox import scala.tools.testing.ClearAfterClass object RunTest { - class VC(val x: Any) extends AnyVal + class VCI(val x: Int) extends AnyVal { override def toString = "" + x } } @RunWith(classOf[JUnit4]) @@ -154,4 +154,100 @@ class RunTest extends ClearAfterClass { val u = Void.TYPE assertEquals(run[(Class[_], Class[_])](code), (u, u)) } + + @Test + def t9671(): Unit = { + val code = + """import scala.issues.RunTest.VCI + | + |def f1(a: Any) = "" + a + |def f2(a: AnyVal) = "" + a + |def f3[T](a: T) = "" + a + |def f4(a: Int) = "" + a + |def f5(a: VCI) = "" + a + |def f6(u: Unit) = "" + u + | + |def n1: AnyRef = null + |def n2: Null = null + |def n3: Any = null + |def n4[T]: T = null.asInstanceOf[T] + | + |def npe(s: => String) = try { s; throw new Error() } catch { case _: NullPointerException => "npe" } + | + | f1(null.asInstanceOf[Int]) + + | f1( n1.asInstanceOf[Int]) + + | f1( n2.asInstanceOf[Int]) + + | f1( n3.asInstanceOf[Int]) + + | f1( n4[Int]) + // "null" + |"-" + + | f1(null.asInstanceOf[VCI]) + + |npe(f1( n1.asInstanceOf[VCI])) + // SI-8097 + | f1( n2.asInstanceOf[VCI]) + + |npe(f1( n3.asInstanceOf[VCI])) + // SI-8097 + | f1( n4[VCI]) + // "null" + |"-" + + | f1(null.asInstanceOf[Unit]) + // "null", SI-9066 + | f1( n1.asInstanceOf[Unit]) + // "null", SI-9066 + | f1( n2.asInstanceOf[Unit]) + // "null", SI-9066 + | f1( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f1( n4[Unit]) + // "null" + |"-" + + | f2(null.asInstanceOf[Int]) + + | f2( n1.asInstanceOf[Int]) + + | f2( n2.asInstanceOf[Int]) + + | f2( n3.asInstanceOf[Int]) + + | f2( n4[Int]) + // "null" + |"-" + + | f2(null.asInstanceOf[VCI]) + + |npe(f2( n1.asInstanceOf[VCI])) + // SI-8097 + | f2( n2.asInstanceOf[VCI]) + + |npe(f2( n3.asInstanceOf[VCI])) + // SI-8097 + | f2( n4[VCI]) + // "null" + |"-" + + | f2(null.asInstanceOf[Unit]) + // "null", SI-9066 + | f2( n1.asInstanceOf[Unit]) + // "null", SI-9066 + | f2( n2.asInstanceOf[Unit]) + // "null", SI-9066 + | f2( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f2( n4[Unit]) + // "null" + |"-" + + | f3(null.asInstanceOf[Int]) + + | f3( n1.asInstanceOf[Int]) + + | f3( n2.asInstanceOf[Int]) + + | f3( n3.asInstanceOf[Int]) + + | f3( n4[Int]) + // "null" + |"-" + + | f3(null.asInstanceOf[VCI]) + + |npe(f3( n1.asInstanceOf[VCI])) + // SI-8097 + | f3( n2.asInstanceOf[VCI]) + + |npe(f3( n3.asInstanceOf[VCI])) + // SI-8097 + | f3( n4[VCI]) + // "null" + |"-" + + | f3(null.asInstanceOf[Unit]) + // "null", SI-9066 + | f3( n1.asInstanceOf[Unit]) + // "null", SI-9066 + | f3( n2.asInstanceOf[Unit]) + // "null", SI-9066 + | f3( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f3( n4[Unit]) + // "null" + |"-" + + | f4(null.asInstanceOf[Int]) + + | f4( n1.asInstanceOf[Int]) + + | f4( n2.asInstanceOf[Int]) + + | f4( n3.asInstanceOf[Int]) + + | f4( n4[Int]) + + |"-" + + | f5(null.asInstanceOf[VCI]) + + |npe(f5( n1.asInstanceOf[VCI])) + // SI-8097 + | f5( n2.asInstanceOf[VCI]) + + |npe(f5( n3.asInstanceOf[VCI])) + // SI-8097 + |npe(f5( n4[VCI])) + // SI-8097 + |"-" + + | f6(null.asInstanceOf[Unit]) + // "null", SI-9066 + | f6( n1.asInstanceOf[Unit]) + // "null", SI-9066 + | f6( n2.asInstanceOf[Unit]) + // "null", SI-9066 + | f6( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f6( n4[Unit]) // "null" + """.stripMargin + + assertEquals(run[String](code), + "0000null-0npe0npenull-nullnullnullnullnull-0000null-0npe0npenull-nullnullnullnullnull-0000null-0npe0npenull-nullnullnullnullnull-00000-0npe0npenpe-nullnullnullnullnull") + } } From a5fab1f588a6042ca924a78d225e85d0acddf5db Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 18 May 2016 21:54:24 +0200 Subject: [PATCH 0047/2793] SI-9066 fix null.asInstanceOf[Unit] --- .../scala/tools/nsc/transform/Erasure.scala | 4 +++ test/junit/scala/BoxUnboxTest.scala | 12 +++---- test/junit/scala/issues/RunTest.scala | 34 +++++++++---------- 3 files changed, 27 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 7bfe5a4740a5..5e903946c1c4 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1141,6 +1141,10 @@ abstract class Erasure extends AddInterfaces else { val tree1 = preErase(tree) tree1 match { + case TypeApply(fun, targs @ List(targ)) if fun.symbol == Any_asInstanceOf && targ.tpe == UnitTpe => + // SI-9066 prevent transforming `o.asInstanceOf[Unit]` to `o.asInstanceOf[BoxedUnit]`. + // adaptMember will then replace the call by a reference to BoxedUnit.UNIT. + treeCopy.TypeApply(tree1, transform(fun), targs).clearType() case EmptyTree | TypeTree() => tree1 setType specialScalaErasure(tree1.tpe) case ArrayValue(elemtpt, trees) => diff --git a/test/junit/scala/BoxUnboxTest.scala b/test/junit/scala/BoxUnboxTest.scala index eb7a35e98cf1..88b3037e6990 100644 --- a/test/junit/scala/BoxUnboxTest.scala +++ b/test/junit/scala/BoxUnboxTest.scala @@ -106,14 +106,14 @@ class BoxUnboxTest { Unit.unbox({eff(); null}); chk() assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() - val n1 = null.asInstanceOf[Unit] // SI-9066: should be UNIT, but currently null - assertThrows[AssertionError](assert(n1 == b)) // should not throw + val n1 = null.asInstanceOf[Unit] + assert(n1 == b) - val n2 = null.asInstanceOf[Unit] == b // SI-9066: should be true, but currently false - assertThrows[AssertionError](assert(n2)) // should not throw + val n2 = null.asInstanceOf[Unit] == b + assert(n2) def f(a: Any) = "" + a - val n3 = f(null.asInstanceOf[Unit]) // "null", should be "()". probably same cause as SI-602. - assertThrows[AssertionError](assertEquals(n3, "()")) // should not throw + val n3 = f(null.asInstanceOf[Unit]) + assertEquals(n3, "()") } } diff --git a/test/junit/scala/issues/RunTest.scala b/test/junit/scala/issues/RunTest.scala index b81a3e1d6f8e..3ebdc8a72ff6 100644 --- a/test/junit/scala/issues/RunTest.scala +++ b/test/junit/scala/issues/RunTest.scala @@ -186,10 +186,10 @@ class RunTest extends ClearAfterClass { |npe(f1( n3.asInstanceOf[VCI])) + // SI-8097 | f1( n4[VCI]) + // "null" |"-" + - | f1(null.asInstanceOf[Unit]) + // "null", SI-9066 - | f1( n1.asInstanceOf[Unit]) + // "null", SI-9066 - | f1( n2.asInstanceOf[Unit]) + // "null", SI-9066 - | f1( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f1(null.asInstanceOf[Unit]) + + | f1( n1.asInstanceOf[Unit]) + + | f1( n2.asInstanceOf[Unit]) + + | f1( n3.asInstanceOf[Unit]) + | f1( n4[Unit]) + // "null" |"-" + | f2(null.asInstanceOf[Int]) + @@ -204,10 +204,10 @@ class RunTest extends ClearAfterClass { |npe(f2( n3.asInstanceOf[VCI])) + // SI-8097 | f2( n4[VCI]) + // "null" |"-" + - | f2(null.asInstanceOf[Unit]) + // "null", SI-9066 - | f2( n1.asInstanceOf[Unit]) + // "null", SI-9066 - | f2( n2.asInstanceOf[Unit]) + // "null", SI-9066 - | f2( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f2(null.asInstanceOf[Unit]) + + | f2( n1.asInstanceOf[Unit]) + + | f2( n2.asInstanceOf[Unit]) + + | f2( n3.asInstanceOf[Unit]) + | f2( n4[Unit]) + // "null" |"-" + | f3(null.asInstanceOf[Int]) + @@ -222,10 +222,10 @@ class RunTest extends ClearAfterClass { |npe(f3( n3.asInstanceOf[VCI])) + // SI-8097 | f3( n4[VCI]) + // "null" |"-" + - | f3(null.asInstanceOf[Unit]) + // "null", SI-9066 - | f3( n1.asInstanceOf[Unit]) + // "null", SI-9066 - | f3( n2.asInstanceOf[Unit]) + // "null", SI-9066 - | f3( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f3(null.asInstanceOf[Unit]) + + | f3( n1.asInstanceOf[Unit]) + + | f3( n2.asInstanceOf[Unit]) + + | f3( n3.asInstanceOf[Unit]) + | f3( n4[Unit]) + // "null" |"-" + | f4(null.asInstanceOf[Int]) + @@ -240,14 +240,14 @@ class RunTest extends ClearAfterClass { |npe(f5( n3.asInstanceOf[VCI])) + // SI-8097 |npe(f5( n4[VCI])) + // SI-8097 |"-" + - | f6(null.asInstanceOf[Unit]) + // "null", SI-9066 - | f6( n1.asInstanceOf[Unit]) + // "null", SI-9066 - | f6( n2.asInstanceOf[Unit]) + // "null", SI-9066 - | f6( n3.asInstanceOf[Unit]) + // "null", SI-9066 + | f6(null.asInstanceOf[Unit]) + + | f6( n1.asInstanceOf[Unit]) + + | f6( n2.asInstanceOf[Unit]) + + | f6( n3.asInstanceOf[Unit]) + | f6( n4[Unit]) // "null" """.stripMargin assertEquals(run[String](code), - "0000null-0npe0npenull-nullnullnullnullnull-0000null-0npe0npenull-nullnullnullnullnull-0000null-0npe0npenull-nullnullnullnullnull-00000-0npe0npenpe-nullnullnullnullnull") + "0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-00000-0npe0npenpe-()()()()null") } } From 41c9a17e4f211fc24a931949a0819a0474cc004a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 19 May 2016 16:23:23 +1000 Subject: [PATCH 0048/2793] Generate static forwarders for object members in companion interface (#5131) We used to disable generation of static forwarders when a object had a trait as a companion, as one could not add methods with bodies to an interface in JVM 6. The JVM lifted this restriction to support default methods in interfaces, so we can lift the restriction on static forwarders, too. Fixes https://github.com/scala/scala-dev/issues/59 --- .../scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala | 3 +-- test/files/run/trait-static-forwarder.check | 1 + test/files/run/trait-static-forwarder/Test.java | 5 +++++ test/files/run/trait-static-forwarder/forwarders.scala | 5 +++++ 4 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 test/files/run/trait-static-forwarder.check create mode 100644 test/files/run/trait-static-forwarder/Test.java create mode 100644 test/files/run/trait-static-forwarder/forwarders.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index f190c1f2de26..bddc41e5c6ac 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -164,8 +164,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { } else { - val skipStaticForwarders = (claszSymbol.isInterface || settings.noForwarders) - if (!skipStaticForwarders) { + if (!settings.noForwarders) { val lmoc = claszSymbol.companionModule // add static forwarders if there are no name conflicts; see bugs #363 and #1735 if (lmoc != NoSymbol) { diff --git a/test/files/run/trait-static-forwarder.check b/test/files/run/trait-static-forwarder.check new file mode 100644 index 000000000000..d81cc0710eb6 --- /dev/null +++ b/test/files/run/trait-static-forwarder.check @@ -0,0 +1 @@ +42 diff --git a/test/files/run/trait-static-forwarder/Test.java b/test/files/run/trait-static-forwarder/Test.java new file mode 100644 index 000000000000..89012c016209 --- /dev/null +++ b/test/files/run/trait-static-forwarder/Test.java @@ -0,0 +1,5 @@ +public final class Test { + public static void main(String... args) { + System.out.println(T.foo()); + } +} diff --git a/test/files/run/trait-static-forwarder/forwarders.scala b/test/files/run/trait-static-forwarder/forwarders.scala new file mode 100644 index 000000000000..d6ee9a081d02 --- /dev/null +++ b/test/files/run/trait-static-forwarder/forwarders.scala @@ -0,0 +1,5 @@ +trait T + +object T { + def foo = 42 +} From 15189d14953335f7a3a8310861d045d21ab22d48 Mon Sep 17 00:00:00 2001 From: Ben Hutchison Date: Thu, 19 May 2016 17:01:45 +1000 Subject: [PATCH 0049/2793] Test case for SI-5183, tagged primitives in case classes (#5144) Test for SI-7088, arrays containing tagged primitives --- test/files/pos/t5183.scala | 34 ++++++++++++++++++++++++++++++++++ test/files/pos/t7088.scala | 8 ++++++++ 2 files changed, 42 insertions(+) create mode 100644 test/files/pos/t5183.scala create mode 100644 test/files/pos/t7088.scala diff --git a/test/files/pos/t5183.scala b/test/files/pos/t5183.scala new file mode 100644 index 000000000000..783b8c28dcfd --- /dev/null +++ b/test/files/pos/t5183.scala @@ -0,0 +1,34 @@ +trait Day + +object Test { + def foo(t: Int with Day) = t == t +} + +class DayOps(val i: Int with Day) extends AnyVal + +case class Test1(d: Int with Day) +case class Test2(d1: Int with Day, d2: Int with Day) + +class User +class Checkin +object Example { + + type Tagged[U] = { type Tag = U } + type @@[T, U] = T with Tagged[U] // Thanks to @retronym for suggesting this type alias + + class Tagger[U] { + def apply[T](t : T) : T @@ U = t.asInstanceOf[T @@ U] + } + def tag[U] = new Tagger[U] + + // Manual specialization needed here ... specializing apply above doesn't help + def tag[U](i : Int) : Int @@ U = i.asInstanceOf[Int @@ U] + def tag[U](l : Long) : Long @@ U = l.asInstanceOf[Long @@ U] + def tag[U](d : Double) : Double @@ U = d.asInstanceOf[Double @@ U] + + def fetch[A](id: Int @@ A): A = null.asInstanceOf[A] + + def tag[U](arr: Array[Int]):Array[Int @@ U] = arr.asInstanceOf[Array[Int @@ U]] + + tag[User](Array(3, 4, 5)).map(_.toString) +} \ No newline at end of file diff --git a/test/files/pos/t7088.scala b/test/files/pos/t7088.scala new file mode 100644 index 000000000000..de9d1b7040c6 --- /dev/null +++ b/test/files/pos/t7088.scala @@ -0,0 +1,8 @@ +object Example extends App { + type Tag[X] = {type Tag = X} + type TaggedArray[T] = Array[T] with Tag[Any] + + def method[T: reflect.ClassTag](a: TaggedArray[T], value: T) {a.update(0, value)} + + method(Array(1, 2).asInstanceOf[TaggedArray[Int]], 1) +} From e483e49d7e53e15436890000b639cc06d31712e8 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 19 May 2016 12:13:38 +0200 Subject: [PATCH 0050/2793] [backport] identical readme in 2.11.x as in 2.12.x --- CONTRIBUTING.md | 28 ++- README.md | 380 +++++++++++++++++++++++------------------ src/intellij/README.md | 73 ++++++-- 3 files changed, 296 insertions(+), 185 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 617734210f06..47d27886231e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -8,7 +8,7 @@ In 2014, you -- the Scala community -- matched the core team at EPFL in number o We are super happy about this, and are eager to make your experience contributing to Scala productive and satisfying, so that we can keep up this growth. We can't do this alone (nor do we want to)! -This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to scala-internals, or tweet about it to @adriaanm.) +This is why we're collecting these notes on how to contribute, and we hope you'll share your experience to improve the process for the next contributor! (Feel free to send a PR for this note, send your thoughts to gitter, scala-internals, or tweet about it to @adriaanm.) By the way, the team at Lightbend is: @adriaanm, @lrytz, @retronym, @SethTisue, and @szeiger. @@ -117,14 +117,32 @@ See the [scala-jenkins-infra repo](https://github.com/scala/scala-jenkins-infra) ### Pass code review -Your PR will need to be assigned to one or more reviewers. You can suggest reviewers yourself; if you're not sure, see the list in [README.md](README.md) or ask on scala-internals. +Your PR will need to be assigned to one or more reviewers. You can suggest reviewers +yourself; if you're not sure, see the list in [README.md](README.md) or ask on gitter +or scala-internals. -To assign a reviewer, add a "review by @reviewer" to your PR description. +To assign a reviewer, add a "review by @reviewer" to the PR description or in a +comment on your PR. NOTE: it's best not to @mention in commit messages, as github pings you every time a commit with your @name on it shuffles through the system (even in other repos, on merges,...). A reviewer gives the green light by commenting "LGTM" (looks good to me). -A review feedback may be addressed by pushing new commits to the request, if these commits stand on their own. +When including review feedback, we typically amend the changes into the existing commit(s) +and `push -f` to the branch. This is to keep the git history clean. Additional commits +are OK if they stand on their own. -Once all these conditions are met, and we agree with the change (we are available on scala-internals to discuss this beforehand, before you put in the coding work!), we will merge your changes. +Once all these conditions are met, and we agree with the change (we are available on +gitter or scala-internals to discuss this beforehand, before you put in the coding work!), +we will merge your changes. + +We use the following labels: + +Label | Description +-------------------------|:----------- +`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted +`welcome` | added by reviewer / queue curator to welcome someone's first PR (for highlighting in the release notes) +`release-notes` | added by reviewer / queue curator to make sure this PR is highlighted in the release notes +`on-hold` | added when this PR should not yet be merged, even though CI is green +`WIP` | added by the author if a PR is submitted for CI testing, needs more work to be complete +`assistance-appreciated` | added by the author if help by the community is appreciated to move a change forward diff --git a/README.md b/README.md index 9ef47494184e..6ebb4531765c 100644 --- a/README.md +++ b/README.md @@ -5,12 +5,12 @@ This is the official repository for the [Scala Programming Language](http://www. To contribute to the Scala Standard Library, Scala Compiler and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository! We do have to ask you to sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature. -For more information on building and developing the core of Scala, read on! +For more information on building and developing the core of Scala, make sure to read +the rest of this README! -Please also check out: - -* our [guidelines for contributing](CONTRIBUTING.md). -* the ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html) covers some of the same ground as this README, but in greater detail and in a more tutorial style, using a running example. +In order to get in touch with Scala contributors, join the +[scala/contributors](https://gitter.im/scala/contributors) gitter channel or post on the +[scala-internals mailing list](http://www.scala-lang.org/community/). # Reporting issues @@ -18,195 +18,247 @@ We're still using Jira for issue reporting, so please [report any issues](https: (We would love to start using GitHub Issues, but we're too resource-constrained to take on this migration right now.) # Get in touch! -If you need some help with your PR at any time, please feel free to @-mention anyone from the list below (or simply `@scala/team-core-scala`), and we will do our best to help you out: +If you need some help with your PR at any time, please feel free to @-mention anyone from the list below, and we will do our best to help you out: | username | talk to me about... | --------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------| - | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec | - | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience | - | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | - | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | - | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | - | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool | - | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library | - | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection | - | [`@heathermiller`](https://github.com/heathermiller) | documentation | - | [`@dickwall`](https://github.com/dickwall) | process & community, documentation | - | [`@dragos`](https://github.com/dragos) | specialization, back end | - | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | - | [`@janekdb`](https://github.com/janekdb) | documentation | + | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec | + | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience | + | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | + | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | + | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | + | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool | + | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library | + | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection | + | [`@heathermiller`](https://github.com/heathermiller) | documentation | + | [`@dickwall`](https://github.com/dickwall) | process & community, documentation | + | [`@dragos`](https://github.com/dragos) | specialization, back end | + | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | + | [`@janekdb`](https://github.com/janekdb) | documentation | P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list! -# Handy Links - - [A wealth of documentation](http://docs.scala-lang.org) - - [mailing lists](http://www.scala-lang.org/community/) - - [Gitter room for Scala contributors](https://gitter.im/scala/contributors) - - [Scala CI](https://scala-ci.typesafe.com/) - - download the latest nightlies: - - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/) - - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/) - # Repository structure ``` scala/ -+--build.xml The main Ant build script, see also under src/build. -+--pull-binary-libs.sh Pulls binary artifacts from remote repository. -+--lib/ Pre-compiled libraries for the build. -+--src/ All sources. - +---/library Scala Standard Library. - +---/reflect Scala Reflection. - +---/compiler Scala Compiler. - +---/eclipse Eclipse project files. - +---/intellij IntelliJ project templates. ++--build.sbt The main sbt build script ++--build.xml The deprecated Ant build script ++--pull-binary-libs.sh Pulls binary artifacts from remote repository, used by build scripts ++--lib/ Pre-compiled libraries for the build ++--src/ All sources + +---/library Scala Standard Library + +---/reflect Scala Reflection + +---/compiler Scala Compiler + +---/eclipse Eclipse project files + +---/intellij IntelliJ project templates ++--spec/ The Scala language specification +--scripts/ Scripts for the CI jobs (including building releases) -+--test/ The Scala test suite. -+--build/ [Generated] Build products output directory for ant. -+--dist/ [Generated] The destination folder for Scala distributions. ++--test/ The Scala test suite + +---/files Partest tests + +---/junit JUnit tests ++--build/ [Generated] Build output directory ``` -# How we roll +# Get Ready to Contribute ## Requirements -You'll need a Java SDK. The baseline version is 6 for 2.11.x, 8 for -2.12.x. (It's also possible to use a later SDK for local development, -but the CI will verify against the baseline version.) - -You'll also need Apache Ant (version 1.9.0 or above) and curl (for `./pull-binary-libs.sh`). - -Mac OS X and Linux work. Windows may work if you use Cygwin. (Community help with keeping the build working on Windows is appreciated.) - -## Git Hygiene - -As git history is forever, we take great pride in the quality of the commits we merge into the repository. The title of your commit will be read hundreds (of thousands? :-)) of times, so it pays off to spend just a little bit more time to polish it, making it descriptive and concise. Please take a minute to read the advice [most projects agree on](https://github.com/erlang/otp/wiki/Writing-good-commit-messages), and stick to 72 or fewer characters for the first line, wrapping subsequent ones at 80 (at most). - -When not sure how to formulate your commit message, imagine you're writing a bullet item for the next release notes, or describing what the commit does to the code base (use active verbs in the present tense). When your commit title is featured in the next release notes, it will be read by a lot of curious Scala users, looking for the latest improvements. Satisfy their thirst for information with as few words as possible! Also, a commit should convey clearly to your (future) fellow contributors what it does to the code base. - -Writing the commit message is a great sanity check that the commit is of the right size. If it does too many things, the description will be unwieldy and tedious to write. Chop it up (`git add -u --patch` and `git rebase` are your friends) and simplify! - -To pinpoint bugs, we often use git bisect, which is only effective when we can count on each commit building (and passing the test suite). Thus, the CI bot enforces this. Please rebase your development history into a sensible list of self-contained commits that tell the story of your bug fix or improvement. Carve them up so that the riskier bits can be reverted independently. Keep changes focussed by splitting out cleanups from refactorings from actual changes to the logic. - -This facilitates reviewing: a commit that reformats code can be judged quickly not to affect anything, so we can focus on the meat of the PR. It also helps when merging between long-running branches, reducing conflicts (or providing at least a limited scope for each one). - -Please do not @-mention anyone in the commit message -- that's what the PR description and comments are for. Every time a commit is shuffled through github (in a merge in some fork, say), every @-mention results in an email to that person (the core team treats them as personal email, straight to their inbox, so please don't flood us :-)). - - -## Reviews - -Please consider nominating a reviewer for your PR in the PR's description or a comment. If unsure, not to worry -- the core team will assign one for you. - -Your reviewer is also your mentor, who will help you rework your PR so that it meets our requirements. We strive to give timely feedback, and apologize for those times when we are overwhelmed by the volume of contributions. Please feel free to ping us. You are entitled to regular progress updates and at least a quick assessment of feasibility of a bigger PR. - -To help you plan your contributions, we communicate our plans on a regular basis on scala-internals, and deadlines are tracked as due dates for [GitHub milestones](https://github.com/scala/scala/milestones). - -## Reviewing - -Once you've gained some experience with the code base and the process, the next step is to review the contributions of others. - -The main goal of this whole process is to ensure the health of the Scala project by improving the quality of the code base, the documentation, as well as this process itself. Thank you for doing your part! - -## [Labels](https://github.com/scala/scala/labels) - -Label | Description ---------------- | ----------- -`reviewed` | automatically added by scabot when a comment prefixed with LGTM is posted -`welcome` | reviewer / queue curator adds to welcome someone's first PR (for highlighting in the release notes) -`release-notes` | reviewer / queue curator adds to make sure this PR is highlighted in the release notes -`on-hold` | added when this PR should not yet be merged, even though CI is green - -### Tips & Tricks -Once the `publish-core` task has completed on a commit, you can try it out in sbt as follows: +You need the following tools: + - A Java SDK. The baseline version is 6 for 2.11.x, 8 for 2.12.x. It's possible + to use a later SDK for local development, but the CI will verify against the baseline + version. + - sbt, we recommend the [sbt-extras](https://github.com/paulp/sbt-extras) runner + script. It provides sensible default jvm options (stack and heap size). + - curl (for `./pull-binary-libs.sh`, used by the sbt / ant build). + - Apache Ant (version 1.9.3 or above) if you need to use the (deprecated) ant build. + +Mac OS X and Linux work. Windows may work if you use Cygwin. Community help with keeping +the build working on Windows is appreciated. + +## Build Setup + +### Basics + +Scala is built in layers, where each layer is a complete Scala compiler and library. +Here is a short description of the layers, from bottom to top: + + - `starr`: the stable reference Scala release. We use an official release of + Scala (specified by `starr.version` in [versions.properties](versions.properties)), + downloaded from the Central Repository. + - `locker` (deprecated, only in ant): an intermediate layer that existed in the + ant build to perform a bootstrap. + - `quick`: the development layer which is incrementally built when working on + changes in the compiler or library. + - `strap` (deprecated, only in ant) : a test layer used to check stability of + the build. + +The sbt build uses `starr` to build `quick`. This is sufficient for most development +scenarios: changes to the library or the compiler can be tested by running the `quick` +Scala (see below for how to do that). + +However, a full build of Scala (a *bootstrap*, as performed by our CI) requires two +layers. This guarantees that every Scala version can build itself. If you change the +code generation part of the Scala compiler, your changes will only reflect in the +bytecode of the library and compiler after a bootstrap. See below for how to create +a bootstrap build locally. + +### Using the Sbt Build + +Core commands: + - `compile` compiles all sub-projects (library, reflect, compiler, scaladoc, etc) + - `scala` / `scalac` run the REPL / compiler directly from sbt (accept options / + arguments) + - `dist/mkBin` generates runner scripts (`scala`, `scalac`, etc) in `build/quick/bin` + - `dist/mkPack` creates a build in the Scala distribution format in `build/pack` + - `test` runs the JUnit test, `testOnly *immutable.ListTest` runs a subset + - `partest` runs partest tests (accepts options, try `partest --help`) + - `publishLocal` publishes a distribution locally (can be used as `scalaVersion` in + other sbt projects) + - Optionally `set VersionUtil.baseVersionSuffix in Global := "abcd123-SNAPSHOT"` + where `abcd123` is the git hash of the revision being published. You can also + use something custom like `"mypatch"`. This changes the version number from + `2.12.0-SNAPSHOT` to something more stable (`2.12.0-abcd123-SNAPSHOT`). + - Optionally `set publishArtifact in (Compile, packageDoc) in ThisBuild := false` + to skip generating / publishing API docs (speeds up the process). + +#### Sandbox + +We recommend to keep local test files in the `sandbox` directory which is listed in +the `.gitignore` of the Scala repo. + +#### Incremental Compilation + +Note that sbt's incremental compilation is often too coarse for the Scala compiler +codebase and re-compiles too many files, resulting in long build times (check +[sbt#1104](https://github.com/sbt/sbt/issues/1104) for progress on that front). In the +meantime you can: + - Enable "ant mode" in which sbt only re-compiles source files that were modified. + Create a file `local.sbt` containing the line `(incOptions in ThisBuild) := (incOptions in ThisBuild).value.withNameHashing(false).withAntStyle(true)`. + Add an entry `local.sbt` to your `~/.gitignore`. + - Use IntelliJ IDEA for incremental compiles (see [IDE Setup](#ide-setup) below) - its + incremental compiler is a bit less conservative, but usually correct. + +#### Local Bootstrap Build + +To perform a bootstrap using sbt + - first a build is published either locally or on a temporary repository, + - then a separate invocation of sbt (using the previously built version as `starr`) + is used to build / publish the actual build. + +Assume the current `starr` version is `2.12.0-M4` (defined in +[versions.properties](versions.properties)) and the current version is `2.12.0-SNAPSHOT` +(defined in [build.sbt](build.sbt)). To perform a local bootstrap: + - Run `publishLocal` (you may want to specify a custom version suffix and skip + generating API docs, see above). + - Quit sbt and start a new sbt instance using `sbt -Dstarr.version=` where + `` is the version number you published locally. + - If the version number you published is not binary compatible with the current + `starr`, `set every scalaBinaryVersion := "2.12.0-M4"`. This is not required if + the version you published locally is binary compatible, i.e., if the current + `starr` is a 2.12.x release and not a milestone / RC. + +The last step is required to resolve modules (scala-xml, scala-partest, etc). It +assumes that the module releases for the current `starr` work (in terms of binary +compatibility) with the local starr that you published locally. A full bootstrap +requires re-building the all the modules. On our CI this is handled by the +[bootstrap](scripts/jobs/integrate/bootstrap) script, but it (currently) cannot +be easily executed locally. + +### IDE Setup + +You may use IntelliJ IDEA ([src/intellij/README.md](src/intellij/README.md)) or the +Scala IDE for Eclipse (see [src/eclipse/README.md](src/eclipse/README.md)). + +In order to use IntelliJ's incremental compiler: + - run `dist/mkBin` in sbt to get a build and the runner scripts in `build/quick/bin` + - run "Build" - "Make Project" in IntelliJ + +Now you can edit and build in IntelliJ and use the scripts (compiler, REPL) to +directly test your changes. You can also run the `scala`, `scalac` and `partest` +commands in sbt. Enable "ant mode" (explained above) to prevent sbt's incremental +compiler from re-compiling (too many) files before each `partest` invocation. + +# Coding Guidelines + +Our guidelines for contributing are explained in [CONTRIBUTING.md](CONTRIBUTING.md). +It contains useful information on our coding standards, testing, documentation, how +we use git and GitHub and how to get your code reviewed. + +You may also want to check out the following resources: + - The ["Scala Hacker Guide"](http://scala-lang.org/contribute/hacker-guide.html) + covers some of the same ground as this README, but in greater detail and in a more + tutorial style, using a running example. + - [Scala documentation site](http://docs.scala-lang.org) + +# Scala CI + +Once you submit a PR your commits will are automatically tested by the Scala CI. + +If you see a spurious build failure, you can post `/rebuild` as a PR comment. +The [scabot README](https://github.com/scala/scabot) lists all available commands. + +If you'd like to test your patch before having everything polished for review, +feel free to submit a PR and add the `WIP` label. In case your WIP branch contains +a large number of commits (that you didn't clean up / squash yet for review), +consider adding `[ci: last-only]` to the PR title. That way only the last commit +will be tested, saving some energy and CI-resources. Note that inactive WIP PRs +will be closed eventually, which does not mean the change is being rejected. + +CI performs a full bootstrap. The first task, `validate-publish-core`, publishes +a build of your commit to the temporary repository +https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots. +Note that this build is not yet bootstrapped, its bytecode is built using the +current `starr`. The version number is `2.12.0-abcd123-SNAPSHOT` where `abcd123` +is the commit hash. + +You can use Scala builds in the validation repository locally by adding a resolver +and specifying the corresponding `scalaVersion`: ``` $ sbt - > set resolvers += "pr" at "https://scala-ci.typesafe.com/artifactory/scala-pr-validation-snapshots/" -> set scalaVersion := "--SNAPSHOT" +> set scalaVersion := "2.12.0-abcd123-SNAPSHOT" > console ``` -Here, `` is the milestone targeted by the PR (e.g., 2.11.6), and `` is the 7-character sha (the format used by GitHub on the web). - -## IDE Setup -### Eclipse -See [src/eclipse/README.md](src/eclipse/README.md). - -### IntelliJ 15 -See [src/intellij/README.md](src/intellij/README.md). - -## Building with sbt (EXPERIMENTAL) - -The experimental sbt-based build definition has arrived! Run `sbt package` -to build the compiler. You can run `sbt test` to run unit (JUnit) tests. -Use `sbt test/it:test` to run integration (partest) tests. - -We would like to migrate to sbt build as quickly as possible. If you would -like to help please use the scala-internals mailing list to discuss your -ideas and coordinate your effort with others. - -## Building with Ant +Note that the scala modules are currently not built / published against the +tested version during CI validation. -NOTE: we are working on migrating the build to sbt. +## Nightly Builds -If you are behind a HTTP proxy, include -[`ANT_ARGS=-autoproxy`](https://ant.apache.org/manual/proxy.html) in -your environment. +The Scala CI builds nightly download releases (including all modules) and publishes +them to the following locations: + - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/?C=M;O=D) + - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/?C=M;O=A) -Run `ant build-opt` to build an optimized version of the compiler. -Verify your build using `ant test-opt`. +The CI also publishes nightly API docs: + - [2.12.x](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/?C=M;O=D) + - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.12.x/api/2.12.x/) + - [2.11.x](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/?C=M;O=D) + - [symlink to the latest](http://www.scala-lang.org/files/archive/nightly/2.11.x/api/2.11.x/) -The Scala build system is based on Apache Ant. Most required pre-compiled -libraries are part of the repository (in 'lib/'). The following however is -assumed to be installed on the build machine: TODO +Note that we currently don't publish nightly (or SNAPSHOT) builds in maven or ivy +format to any repository. You can track progress on this front at +[scala-jenkins-infra#133](https://github.com/scala/scala-jenkins-infra/issues/133) +and [scala-dev#68](https://github.com/scala/scala-dev/issues/68). -### Ant Tips and tricks +## Scala CI Internals -Here are some common commands. Most ant targets offer a `-opt` variant that runs under `-optimise` (CI runs the -optimize variant). - -Command | Description ------------------------ | ----------- -`./pull-binary-libs.sh` | downloads all binary artifacts associated with this commit. -`ant -p` | prints out information about the commonly used ant targets. -`ant` or `ant build` | A quick compilation (to `build/quick`) of your changes using the locker compiler. -`ant dist` | builds a distribution in 'dists/latest'. -`ant all.clean` | removes all build files and all distributions. - -A typical debug cycle incrementally builds quick, then uses it to compile and run the file -`sandbox/test.scala` as follows: - - - `ant && build/quick/bin/scalac -d sandbox sandbox/test.scala && build/quick/bin/scala -cp sandbox Test` - -We typically alias `build/quick/bin/scalac -d sandbox` to `qsc` and `build/quick/bin/scala -cp sandbox` to `qs` in our shell. - -`ant test-opt` tests that your code is working and fit to be committed: - - - Runs the test suite and bootstrapping test on quick. - - You can run the suite only (skipping strap) with `ant test.suite`. - -`ant docs` generates the HTML documentation for the library from the sources using the scaladoc tool in quick. -Note: on most machines this requires more heap than is allocated by default. You can adjust the parameters with `ANT_OPTS`. Example command line: - -```sh -ANT_OPTS="-Xms512M -Xmx2048M -Xss1M -XX:MaxPermSize=128M" ant docs -``` +The Scala CI runs as a Jenkins instance on [scala-ci.typesafe.com](https://scala-ci.typesafe.com/), +configured by a chef cookbook at [scala/scala-jenkins-infra](https://github.com/scala/scala-jenkins-infra). -### Bootstrapping concepts -NOTE: This is somewhat outdated, but the ideas still hold. +The build bot that watches PRs, triggers testing builds and applies the "reviewed" label +after an LGTM comment is in the [scala/scabot](https://github.com/scala/scabot) repo. -In order to guarantee the bootstrapping of the Scala compiler, the ant build -compiles Scala in layers. Each layer is a complete compiled Scala compiler and library. -A superior layer is always compiled by the layer just below it. Here is a short -description of the four layers that the build uses, from bottom to top: +## Community Build - - `starr`: the stable reference Scala release. We use an official version of Scala (specified by `starr.version` in `versions.properties`), downloaded from the Central Repository. - - `locker`: the local reference which is compiled by starr and is the work compiler in a typical development cycle. Add `locker.skip=true` to `build.properties` to skip this step and speed up development when you're not changing code generation. In any case, after it has been built once, it is “frozen” in this state. Updating it to fit the current source code must be explicitly requested (`ant locker.unlock`). - - `quick`: the layer which is incrementally built when testing changes in the compiler or library. This is considered an actual new version when locker is up-to-date in relation to the source code. - - `strap`: a test layer used to check stability of the build. +The community build is a central element for testing Scala releases. A community +build can be launched for any Scala revision / commit. It first builds the Scala +library and compiler and then uses that Scala version to build a large number of +open-source projects from source. -For each layer, the Scala library is compiled first and the compiler next. -That means that any changes in the library can immediately be used in the -compiler without an intermediate build. On the other hand, if building the -library requires changes in the compiler, a new locker must be built if -bootstrapping is still possible, or a new starr if it is not. +Community builds run on the Scala Jenkins instance, the jobs are named +`..-integrate-community-build`. The community build definitions specifying which +projects are built are in the +[scala/community-builds](https://github.com/scala/community-builds) repo. diff --git a/src/intellij/README.md b/src/intellij/README.md index c311afda9c16..41fef0418362 100644 --- a/src/intellij/README.md +++ b/src/intellij/README.md @@ -1,40 +1,81 @@ -# Building Scala in IntelliJ IDEA - -## Requirements +# Developing Scala in IntelliJ IDEA Use the latest IntelliJ release and install the Scala plugin from within the IDE. -## Initial setup +## Initial Setup To create the IntelliJ project files: - Run `sbt intellij` - Open `src/intellij/scala.ipr` in IntelliJ - - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry named "1.6" containing the Java 1.6 SDK + - In `File` → `Project Structure` → `Project` → `Project SDK`, create an SDK entry + named "1.8" containing the Java 1.8 SDK (1.6 if you're on the Scala the 2.11.x branch) -The project files are created by as copies of the `.SAMPLE` files, which are under version control. -The actual IntelliJ project files are in `.gitignore` so that local changes are ignored. +The project files are created as copies of the `.SAMPLE` files, which are under version +control. The actual IntelliJ project files are in `.gitignore` so that local changes +are ignored. ## Dependencies For every module in the IntelliJ project there is a corresponding `-deps` library, for exmaple `compiler-deps` provides `ant.jar` for the compiler codebase. The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again. -This is necessary whenever the dependencies in the sbt build change, for example when the STARR version is updated. +This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated. Note that this command only patches the dependency lists, all other settings in the IntelliJ project definition are unchanged. To overwrite the project definition files by copying the `.SAMPLE` files again run `sbt intellijFromSample`. -## Usage +## Switching Branches + +The 2.12.x branch contains IntelliJ module files for `actors` and `forkjoin` even though these modules only exist in 2.11.x. +This allows using the same IntelliJ project files when switching to the 2.11.x branch (without causing any issues while working on 2.12.x). + +When switching between 2.11.x and 2.12.x, make sure to run `sbt intellij`. +Note that the `Project SDK` is not updated in this process. +If you want to use the Java 1.6 SDK while working on 2.11.x you need to change it manually (`File` → `Project Structure` → `Project` → `Project SDK`). + +If you switch between 2.11.x and 2.12.x often, it makes sense to have a separate clone +of the repository for each branch. + +## Incremental Compilation + +Run `Build` → `Make Project` to build all modules of the Scala repository (library, +compiler, etc). Note that compilation IntelliJ is performed in a single pass (no +bootstrap), like the sbt build. + +Note that the output directory when compiling in IntelliJ is the same as for the +sbt and (deprecated) ant builds. This allows building incrementally in IntelliJ +and directly use the changes using the command-line scripts in `build/quick/bin/`. + +## Running JUnit Tests + +JUnit tests can be executed by right-clicking on a test class or test method and +selecting "Run" or "Debug". The debugger will allow you to stop at breakpoints +within the Scala library. + +It is possible to invoke the Scala compiler from a JUnit test (passing the source +code as a string) and inspect the generated bytecode, see for example +`scala.issues.BytecodeTest`. Debugging such a test is an easy way to stop at +breakpoints within the Scala compiler. + +## Running the Compiler and REPL + +You can create run/debug configurations to run the compiler and REPL directly within +IntelliJ, which might accelerate development and debugging of the the compiler. -Compiling, running, JUnit tests and debugging should all work. -You can work on the compiler, the standard library, and other components as well. +To debug the Scala codebase you can also use "Remote" debug configuration and pass +the corresponding arguments to the jvm running the compiler / program. -Note that compilation within IntelliJ is performed in a single pass. -The code is compiled using the "STARR" (stable reference) compiler, as specified by `starr.version` in `versions.properties`. -This is consistent with the sbt build. +To run the compiler create an "Application" configuration with + - Main class: `scala.tools.nsc.Main` + - Program arguments: `-usejavacp -cp sandbox -d sandbox sandbox/Test.scala` + - Working directory: the path of your checkout + - Use classpath of module: `compiler` -Note that the output directory when compiling in IntelliJ is the same as for the sbt build. -This allows building incrementally in IntelliJ and directly use the changes using the command-line scripts in `build/quick/bin/`. +To run the REPL create an "Application" configuration with + - Main class: `scala.tools.nsc.MainGenericRunner` + - Program arguments: `-usejavacp` + - Working directory: the path of your checkout + - Use classpath of module: `repl` ## Updating the `.SAMPLE` files From 214ea82573624bffb4d0f16f3e5c49f9370ba7a7 Mon Sep 17 00:00:00 2001 From: Steve Robinson Date: Sat, 27 Feb 2016 09:13:45 -0800 Subject: [PATCH 0051/2793] SI-9656 Range.toString distinguishes Numeric step For Range and NumericRange, toString will indicate the step if it is not 1. Additionally, indicate empty ranges and ranges which are not "exact". For a "mapped" range, used by `Range.Double`, toString includes the underlying range and the simple type of the step (to distinguish Double from BigDecimal). --- .../collection/immutable/NumericRange.scala | 14 ++++-- .../scala/collection/immutable/Range.scala | 17 ++++---- test/files/jvm/serialization-new.check | 8 ++-- test/files/jvm/serialization.check | 8 ++-- test/files/run/t9656.check | 14 ++++++ test/files/run/t9656.scala | 43 +++++++++++++++++++ 6 files changed, 84 insertions(+), 20 deletions(-) create mode 100644 test/files/run/t9656.check create mode 100644 test/files/run/t9656.scala diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index c8d751925441..fdf50960a383 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -161,6 +161,12 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { override def isEmpty = underlyingRange.isEmpty override def apply(idx: Int): A = fm(underlyingRange(idx)) override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el) + + override def toString = { + def simpleOf(x: Any): String = x.getClass.getName.split("\\.").last + val stepped = simpleOf(underlyingRange.step) + s"${super.toString} (using $underlyingRange of $stepped)" + } } } @@ -250,9 +256,11 @@ extends AbstractSeq[T] with IndexedSeq[T] with Serializable { super.equals(other) } - override def toString() = { - val endStr = if (length > Range.MAX_PRINT) ", ... )" else ")" - take(Range.MAX_PRINT).mkString("NumericRange(", ", ", endStr) + override def toString = { + val empty = if (isEmpty) "empty " else "" + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + s"${empty}NumericRange $start $preposition $end$stepped" } } diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index d3fe367e5035..6eaf404fe8e2 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -396,22 +396,20 @@ extends scala.collection.AbstractSeq[Int] case _ => super.equals(other) } - /** Note: hashCode can't be overridden without breaking Seq's - * equals contract. - */ - override def toString() = { - val endStr = - if (numRangeElements > Range.MAX_PRINT || (!isEmpty && numRangeElements < 0)) ", ... )" else ")" - take(Range.MAX_PRINT).mkString("Range(", ", ", endStr) + /* Note: hashCode can't be overridden without breaking Seq's equals contract. */ + + override def toString = { + val preposition = if (isInclusive) "to" else "until" + val stepped = if (step == 1) "" else s" by $step" + val prefix = if (isEmpty) "empty " else if (!isExact) "inexact " else "" + s"${prefix}Range $start $preposition $end$stepped" } } /** A companion object for the `Range` class. */ object Range { - private[immutable] val MAX_PRINT = 512 // some arbitrary value - /** Counts the number of range elements. * @pre step != 0 * If the size of the range exceeds Int.MaxValue, the @@ -514,6 +512,7 @@ object Range { // we offer a partially constructed object. class Partial[T, U](f: T => U) { def by(x: T): U = f(x) + override def toString = "Range requires step" } // Illustrating genericity with Int Range, which should have the same behavior diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index 1c5dd4828bea..ca91ec107384 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -97,12 +97,12 @@ x = Queue(a, b, c) y = Queue(a, b, c) x equals y: true, y equals x: true -x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) -y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) +x = Range 0 until 10 +y = Range 0 until 10 x equals y: true, y equals x: true -x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) -y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) +x = NumericRange 0 until 10 +y = NumericRange 0 until 10 x equals y: true, y equals x: true x = Map(1 -> A, 2 -> B, 3 -> C) diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index 1c5dd4828bea..ca91ec107384 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -97,12 +97,12 @@ x = Queue(a, b, c) y = Queue(a, b, c) x equals y: true, y equals x: true -x = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) -y = Range(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) +x = Range 0 until 10 +y = Range 0 until 10 x equals y: true, y equals x: true -x = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) -y = NumericRange(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) +x = NumericRange 0 until 10 +y = NumericRange 0 until 10 x equals y: true, y equals x: true x = Map(1 -> A, 2 -> B, 3 -> C) diff --git a/test/files/run/t9656.check b/test/files/run/t9656.check new file mode 100644 index 000000000000..03e3ff3b5fcb --- /dev/null +++ b/test/files/run/t9656.check @@ -0,0 +1,14 @@ +Range 1 to 10 +Range 1 to 10 +inexact Range 1 to 10 by 2 +Range 1 to 10 by 3 +inexact Range 1 until 10 by 2 +Range 100 to 100 +empty Range 100 until 100 +NumericRange 1 to 10 +NumericRange 1 to 10 by 2 +NumericRange 0.1 until 1.0 by 0.1 +NumericRange 0.1 until 1.0 by 0.1 +NumericRange 0.1 until 1.0 by 0.1 (using NumericRange 0.1 until 1.0 by 0.1 of BigDecimal) +NumericRange 0 days until 10 seconds by 1 second +empty NumericRange 0 days until 0 days by 1 second diff --git a/test/files/run/t9656.scala b/test/files/run/t9656.scala new file mode 100644 index 000000000000..373271955340 --- /dev/null +++ b/test/files/run/t9656.scala @@ -0,0 +1,43 @@ + +import scala.math.BigDecimal + +object Test extends App { + println(1 to 10) + println(1 to 10 by 1) + println(1 to 10 by 2) + println(1 to 10 by 3) + println(1 until 10 by 2) + println(100 to 100) + println(100 until 100) + + println(1L to 10L) + println(1L to 10L by 2) + + // want to know if this is BigDecimal or Double stepping by BigDecimal + println(0.1 until 1.0 by 0.1) + println(Range.BigDecimal(BigDecimal("0.1"), BigDecimal("1.0"), BigDecimal("0.1"))) + println(Range.Double(0.1, 1.0, 0.1)) + + import concurrent.duration.{SECONDS => Seconds, _}, collection.immutable.NumericRange + implicit val `duration is integerish`: math.Integral[FiniteDuration] = new math.Integral[FiniteDuration] { + def quot(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ??? + def rem(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ??? + + // Members declared in scala.math.Numeric + def fromInt(x: Int): scala.concurrent.duration.FiniteDuration = Duration(x, Seconds) + def minus(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ??? + def negate(x: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ??? + def plus(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ??? + def times(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): scala.concurrent.duration.FiniteDuration = ??? + def toDouble(x: scala.concurrent.duration.FiniteDuration): Double = ??? + def toFloat(x: scala.concurrent.duration.FiniteDuration): Float = ??? + def toInt(x: scala.concurrent.duration.FiniteDuration): Int = toLong(x).toInt + def toLong(x: scala.concurrent.duration.FiniteDuration): Long = x.length + + // Members declared in scala.math.Ordering + def compare(x: scala.concurrent.duration.FiniteDuration,y: scala.concurrent.duration.FiniteDuration): Int = + x.compare(y) + } + println(NumericRange(Duration.Zero, Duration(10, Seconds), Duration(1, Seconds))) + println(NumericRange(Duration.Zero, Duration.Zero, Duration(1, Seconds))) +} From 3cddeaa525fd6fe9860a27019fdf484297a8d3dd Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 13 Sep 2015 17:22:11 -0700 Subject: [PATCH 0052/2793] SI-7916: ScriptEngine support Refactor the ScriptEngine support to an adaptor atop the IMain API. Allow references to resolve to context attributes. (The attributes must be defined at compilation time, though they may resolve to updated values at evaluation time.) This means that attributes are not bound statically in REPL history. In particular, we forgo the trick of binding attributes named "name: Type" as typed values. Instead, an `x` bound in dynamic context is injected into the script as a dynamic selection `$ctx.x` where `ctx` performs the look-up in the script context. When a compiled script is re-evaluated, a new instance of the script class is created and defined symbols are rebound. The context stdout writer is handled with `Console.withOut`, with bytes decoded using the default charset. Compilation errors are thrown as ScriptException with the first reported error. This commit doesn't attempt dynamic selection from objects in context. Currently, script must cast. --- build.xml | 2 +- .../scala/tools/nsc/interpreter/IMain.scala | 203 +++-------- .../tools/nsc/interpreter/Scripted.scala | 343 ++++++++++++++++++ .../scala/tools/nsc/interpreter/package.scala | 2 +- test/files/run/repl-serialization.scala | 2 +- test/files/run/t1500.scala | 2 +- test/files/run/t7843-jsr223-service.check | 2 - test/files/run/t7843-jsr223-service.scala | 8 - test/files/run/t7933.check | 2 - test/files/run/t7933.scala | 11 - .../tools/nsc/interpreter/ScriptedTest.scala | 83 +++++ 11 files changed, 471 insertions(+), 189 deletions(-) create mode 100644 src/repl/scala/tools/nsc/interpreter/Scripted.scala delete mode 100644 test/files/run/t7843-jsr223-service.check delete mode 100644 test/files/run/t7843-jsr223-service.scala delete mode 100644 test/files/run/t7933.check delete mode 100644 test/files/run/t7933.scala create mode 100644 test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala diff --git a/build.xml b/build.xml index 8790bf637d3f..778bcc561b14 100644 --- a/build.xml +++ b/build.xml @@ -1163,7 +1163,7 @@ TODO: - + diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 8c91242b36ac..a42a12a6fc5c 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -20,7 +20,6 @@ import scala.tools.nsc.typechecker.{StructuredTypeStrings, TypeStrings} import scala.tools.nsc.util._ import ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.Exceptional.unwrap -import javax.script.{AbstractScriptEngine, Bindings, Compilable, CompiledScript, ScriptContext, ScriptEngine, ScriptEngineFactory, ScriptException} import java.net.URL import scala.tools.util.PathResolver @@ -56,10 +55,11 @@ import scala.tools.util.PathResolver * @author Moez A. Abdel-Gawad * @author Lex Spoon */ -class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Settings, protected val out: JPrintWriter) extends AbstractScriptEngine with Compilable with Imports with PresentationCompilation { +class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation { imain => - setBindings(createBindings, ScriptContext.ENGINE_SCOPE) + def this(initialSettings: Settings) = this(initialSettings, IMain.defaultOut) + object replOutput extends ReplOutput(settings.Yreploutdir) { } @deprecated("Use replOutput.dir instead", "2.11.0") @@ -104,13 +104,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set finally if (!saved) settings.nowarn.value = false } - /** construct an interpreter that reports to Console */ - def this(settings: Settings, out: JPrintWriter) = this(null, settings, out) - def this(factory: ScriptEngineFactory, settings: Settings) = this(factory, settings, new NewLinePrintWriter(new ConsoleWriter, true)) - def this(settings: Settings) = this(settings, new NewLinePrintWriter(new ConsoleWriter, true)) - def this(factory: ScriptEngineFactory) = this(factory, new Settings()) - def this() = this(new Settings()) - // the expanded prompt but without color escapes and without leading newline, for purposes of indenting lazy val formatting = Formatting.forPrompt(replProps.promptText) lazy val reporter: ReplReporter = new ReplReporter(this) @@ -464,7 +457,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set pos } - private[interpreter] def requestFromLine(line: String, synthetic: Boolean): Either[IR.Result, Request] = { + private[interpreter] def requestFromLine(line: String, synthetic: Boolean = false): Either[IR.Result, Request] = { val content = line val trees: List[global.Tree] = parse(content) match { @@ -559,77 +552,8 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set */ def interpret(line: String): IR.Result = interpret(line, synthetic = false) def interpretSynthetic(line: String): IR.Result = interpret(line, synthetic = true) - def interpret(line: String, synthetic: Boolean): IR.Result = compile(line, synthetic) match { - case Left(result) => result - case Right(req) => new WrappedRequest(req).loadAndRunReq - } - - private def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = { - if (global == null) Left(IR.Error) - else requestFromLine(line, synthetic) match { - case Left(result) => Left(result) - case Right(req) => - // null indicates a disallowed statement type; otherwise compile and - // fail if false (implying e.g. a type error) - if (req == null || !req.compile) Left(IR.Error) else Right(req) - } - } - - var code = "" - var bound = false - def compiled(script: String): CompiledScript = { - if (!bound) { - quietBind("engine" -> this.asInstanceOf[ScriptEngine]) - bound = true - } - val cat = code + script - compile(cat, false) match { - case Left(result) => result match { - case IR.Incomplete => { - code = cat + "\n" - new CompiledScript { - def eval(context: ScriptContext): Object = null - def getEngine: ScriptEngine = IMain.this - } - } - case _ => { - code = "" - throw new ScriptException("compile-time error") - } - } - case Right(req) => { - code = "" - new WrappedRequest(req) - } - } - } - - private class WrappedRequest(val req: Request) extends CompiledScript { - var recorded = false - - /** In Java we would have to wrap any checked exception in the declared - * ScriptException. Runtime exceptions and errors would be ok and would - * not need to be caught. So let us do the same in Scala : catch and - * wrap any checked exception, and let runtime exceptions and errors - * escape. We could have wrapped runtime exceptions just like other - * exceptions in ScriptException, this is a choice. - */ - @throws[ScriptException] - def eval(context: ScriptContext): Object = { - val result = req.lineRep.evalEither match { - case Left(e: RuntimeException) => throw e - case Left(e: Exception) => throw new ScriptException(e) - case Left(e) => throw e - case Right(result) => result.asInstanceOf[Object] - } - if (!recorded) { - recordRequest(req) - recorded = true - } - result - } - - def loadAndRunReq = classLoader.asContext { + def interpret(line: String, synthetic: Boolean): IR.Result = { + def loadAndRunReq(req: Request) = classLoader.asContext { val (result, succeeded) = req.loadAndRun /** To our displeasure, ConsoleReporter offers only printMessage, @@ -654,12 +578,32 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set } } - def getEngine: ScriptEngine = IMain.this + compile(line, synthetic) match { + case Left(result) => result + case Right(req) => loadAndRunReq(req) + } + } + + // create a Request and compile it + private[interpreter] def compile(line: String, synthetic: Boolean): Either[IR.Result, Request] = { + if (global == null) Left(IR.Error) + else requestFromLine(line, synthetic) match { + case Right(null) => Left(IR.Error) // disallowed statement type + case Right(req) if !req.compile => Left(IR.Error) // compile error + case ok @ Right(req) => ok + case err @ Left(result) => err + } } /** Bind a specified name to a specified value. The name may * later be used by expressions passed to interpret. * + * A fresh `ReadEvalPrint`, which defines a `line` package, is used to compile + * a custom `eval` object that wraps the bound value. + * + * If the bound value is successfully installed, then bind the name + * by interpreting `val name = $line42.$eval.value`. + * * @param name the variable name to bind * @param boundType the type of the variable, as a string * @param value the object value to bind to it @@ -667,22 +611,22 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set */ def bind(name: String, boundType: String, value: Any, modifiers: List[String] = Nil): IR.Result = { val bindRep = new ReadEvalPrint() - bindRep.compile(""" - |object %s { - | var value: %s = _ - | def set(x: Any) = value = x.asInstanceOf[%s] + bindRep.compile(s""" + |object ${bindRep.evalName} { + | var value: $boundType = _ + | def set(x: Any) = value = x.asInstanceOf[$boundType] |} - """.stripMargin.format(bindRep.evalName, boundType, boundType) - ) + """.stripMargin + ) bindRep.callEither("set", value) match { case Left(ex) => repldbg("Set failed in bind(%s, %s, %s)".format(name, boundType, value)) repldbg(util.stackTraceString(ex)) IR.Error - case Right(_) => - val line = "%sval %s = %s.value".format(modifiers map (_ + " ") mkString, name, bindRep.evalPath) - repldbg("Interpreting: " + line) + val mods = if (modifiers.isEmpty) "" else modifiers.mkString("", " ", " ") + val line = s"${mods}val $name = ${ bindRep.evalPath }.value" + repldbg(s"Interpreting: $line") interpret(line) } } @@ -1046,31 +990,6 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set override def toString = "Request(line=%s, %s trees)".format(line, trees.size) } - def createBindings: Bindings = new IBindings { - override def put(name: String, value: Object): Object = { - val n = name.indexOf(":") - val p: NamedParam = if (n < 0) (name, value) else { - val nme = name.substring(0, n).trim - val tpe = name.substring(n + 1).trim - NamedParamClass(nme, tpe, value) - } - if (!p.name.startsWith("javax.script")) bind(p) - null - } - } - - @throws[ScriptException] - def compile(script: String): CompiledScript = eval("new javax.script.CompiledScript { def eval(context: javax.script.ScriptContext): Object = { " + script + " }.asInstanceOf[Object]; def getEngine: javax.script.ScriptEngine = engine }").asInstanceOf[CompiledScript] - - @throws[ScriptException] - def compile(reader: java.io.Reader): CompiledScript = compile(stringFromReader(reader)) - - @throws[ScriptException] - def eval(script: String, context: ScriptContext): Object = compiled(script).eval(context) - - @throws[ScriptException] - def eval(reader: java.io.Reader, context: ScriptContext): Object = eval(stringFromReader(reader), context) - override def finalize = close /** Returns the name of the most recent interpreter result. @@ -1267,54 +1186,9 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set /** Utility methods for the Interpreter. */ object IMain { - import java.util.Arrays.{ asList => asJavaList } /** Dummy identifier fragement inserted at the cursor before presentation compilation. Needed to support completion of `global.def` */ val DummyCursorFragment = "_CURSOR_" - class Factory extends ScriptEngineFactory { - @BeanProperty - val engineName = "Scala Interpreter" - - @BeanProperty - val engineVersion = "1.0" - - @BeanProperty - val extensions: JList[String] = asJavaList("scala") - - @BeanProperty - val languageName = "Scala" - - @BeanProperty - val languageVersion = scala.util.Properties.versionString - - def getMethodCallSyntax(obj: String, m: String, args: String*): String = null - - @BeanProperty - val mimeTypes: JList[String] = asJavaList("application/x-scala") - - @BeanProperty - val names: JList[String] = asJavaList("scala") - - def getOutputStatement(toDisplay: String): String = null - - def getParameter(key: String): Object = key match { - case ScriptEngine.ENGINE => engineName - case ScriptEngine.ENGINE_VERSION => engineVersion - case ScriptEngine.LANGUAGE => languageName - case ScriptEngine.LANGUAGE_VERSION => languageVersion - case ScriptEngine.NAME => names.get(0) - case _ => null - } - - def getProgram(statements: String*): String = null - - def getScriptEngine: ScriptEngine = { - val settings = new Settings() - settings.usemanifestcp.value = true - new IMain(this, settings) - } - } - // The two name forms this is catching are the two sides of this assignment: // // $line3.$read.$iw.$iw.Bippy = @@ -1366,5 +1240,10 @@ object IMain { def stripImpl(str: String): String = naming.unmangle(str) } + private[interpreter] def defaultSettings = new Settings() + private[scala] def defaultOut = new NewLinePrintWriter(new ConsoleWriter, true) + + /** construct an interpreter that reports to Console */ + def apply(initialSettings: Settings = defaultSettings, out: JPrintWriter = defaultOut) = new IMain(initialSettings, out) } diff --git a/src/repl/scala/tools/nsc/interpreter/Scripted.scala b/src/repl/scala/tools/nsc/interpreter/Scripted.scala new file mode 100644 index 000000000000..25d359bc0e1a --- /dev/null +++ b/src/repl/scala/tools/nsc/interpreter/Scripted.scala @@ -0,0 +1,343 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2016 LAMP/EPFL + */ +package scala +package tools.nsc +package interpreter + +import scala.language.dynamics + +import scala.beans.BeanProperty +import scala.collection.JavaConverters._ +import scala.reflect.classTag +import scala.reflect.internal.util.Position +import scala.tools.nsc.util.stringFromReader +import javax.script._, ScriptContext.{ ENGINE_SCOPE, GLOBAL_SCOPE } +import java.io.{ Closeable, Reader } + +/* A REPL adaptor for the javax.script API. */ +class Scripted(@BeanProperty val factory: ScriptEngineFactory, settings: Settings, out: JPrintWriter) + extends AbstractScriptEngine with Compilable { + + def createBindings: Bindings = new SimpleBindings + + // dynamic context bound under this name + final val ctx = "$ctx" + + // the underlying interpreter, tweaked to handle dynamic bindings + val intp = new IMain(settings, out) { + import global.{ Name, TermName } + + /* Modify the template to snag definitions from dynamic context. + * So object $iw { x + 42 } becomes object $iw { def x = $ctx.x ; x + 42 } + */ + override protected def importsCode(wanted: Set[Name], wrapper: Request#Wrapper, definesClass: Boolean, generousImports: Boolean) = { + + // cull references that can be satisfied from the current dynamic context + val contextual = wanted & contextNames + + if (contextual.nonEmpty) { + val neededContext = (wanted &~ contextual) + TermName(ctx) + val ComputedImports(header, preamble, trailer, path) = super.importsCode(neededContext, wrapper, definesClass, generousImports) + val adjusted = contextual.map { n => + val valname = n.decodedName + s"""def `$valname` = $ctx.`$valname` + def `${valname}_=`(x: Object) = $ctx.`$valname` = x""" + }.mkString(preamble, "\n", "\n") + ComputedImports(header, adjusted, trailer, path) + } + else super.importsCode(wanted, wrapper, definesClass, generousImports) + } + + // names available in current dynamic context + def contextNames: Set[Name] = { + val ctx = compileContext + val terms = for { + scope <- ctx.getScopes.asScala + binding <- Option(ctx.getBindings(scope)) map (_.asScala) getOrElse Nil + key = binding._1 + } yield (TermName(key): Name) + terms.to[Set] + } + + // save first error for exception; console display only if debugging + override lazy val reporter: ReplReporter = new ReplReporter(this) { + override def display(pos: Position, msg: String, severity: Severity): Unit = + if (isReplDebug) super.display(pos, msg, severity) + override def error(pos: Position, msg: String): Unit = { + if (firstError.isEmpty) firstError = Some((pos, msg)) + super.error(pos, msg) + } + override def reset() = { super.reset() ; firstError = None } + } + } + intp.initializeSynchronous() + + var compileContext: ScriptContext = getContext + + val scriptContextRep = new intp.ReadEvalPrint + + def dynamicContext_=(ctx: ScriptContext): Unit = scriptContextRep.callEither("set", ctx) + + def dynamicContext: ScriptContext = scriptContextRep.callEither("value") match { + case Right(ctx: ScriptContext) => ctx + case Left(e) => throw e + case Right(other) => throw new ScriptException(s"Unexpected value for context: $other") + } + + if (intp.isInitializeComplete) { + // compile the dynamic ScriptContext object holder + scriptContextRep compile s""" + |import javax.script._ + |object ${scriptContextRep.evalName} { + | var value: ScriptContext = _ + | def set(x: Any) = value = x.asInstanceOf[ScriptContext] + |} + """.stripMargin + dynamicContext = getContext + + // Bridge dynamic references and script context + intp compileString s""" + |package scala.tools.nsc.interpreter + |import language.dynamics + |import javax.script._, ScriptContext.ENGINE_SCOPE + |object dynamicBindings extends Dynamic { + | def context: ScriptContext = ${ scriptContextRep.evalPath }.value + | // $ctx.x retrieves the attribute x + | def selectDynamic(field: String): Object = context.getAttribute(field) + | // $ctx.x = v + | def updateDynamic(field: String)(value: Object) = context.setAttribute(field, value, ENGINE_SCOPE) + |} + |""".stripMargin + intp beQuietDuring { + intp interpret s"val $ctx: scala.tools.nsc.interpreter.dynamicBindings.type = scala.tools.nsc.interpreter.dynamicBindings" + intp bind ("$engine" -> (this: ScriptEngine with Compilable)) + } + } + + // Set the context for dynamic resolution and run the body. + // Defines attributes available for evaluation. + // Avoid reflective access if using default context. + def withScriptContext[A](context: ScriptContext)(body: => A): A = + if (context eq getContext) body else { + val saved = dynamicContext + dynamicContext = context + try body + finally dynamicContext = saved + } + // Defines attributes available for compilation. + def withCompileContext[A](context: ScriptContext)(body: => A): A = { + val saved = compileContext + compileContext = context + try body + finally compileContext = saved + } + + // not obvious that ScriptEngine should accumulate code text + private var code = "" + + private var firstError: Option[(Position, String)] = None + + /* All scripts are compiled. The supplied context defines what references + * not in REPL history are allowed, though a different context may be + * supplied for evaluation of a compiled script. + */ + def compile(script: String, context: ScriptContext): CompiledScript = + withCompileContext(context) { + val cat = code + script + intp.compile(cat, synthetic = false) match { + case Right(req) => + code = "" + new WrappedRequest(req) + case Left(IR.Incomplete) => + code = cat + "\n" + new CompiledScript { + def eval(context: ScriptContext): Object = null + def getEngine: ScriptEngine = Scripted.this + } + case Left(_) => + code = "" + throw firstError map { + case (pos, msg) => new ScriptException(msg, script, pos.line, pos.column) + } getOrElse new ScriptException("compile-time error") + } + } + + // documentation + //protected var context: ScriptContext + //def getContext: ScriptContext = context + + /* Compile with the default context. All references must be resolvable. */ + @throws[ScriptException] + def compile(script: String): CompiledScript = compile(script, context) + + @throws[ScriptException] + def compile(reader: Reader): CompiledScript = compile(stringFromReader(reader), context) + + /* Compile and evaluate with the given context. */ + @throws[ScriptException] + def eval(script: String, context: ScriptContext): Object = compile(script, context).eval(context) + + @throws[ScriptException] + def eval(reader: Reader, context: ScriptContext): Object = compile(stringFromReader(reader), context).eval(context) + + private class WrappedRequest(val req: intp.Request) extends CompiledScript { + var first = true + + private def evalEither(r: intp.Request, ctx: ScriptContext) = { + if (ctx.getWriter == null && ctx.getErrorWriter == null && ctx.getReader == null) r.lineRep.evalEither + else { + val closeables = Array.ofDim[Closeable](2) + val w = if (ctx.getWriter == null) Console.out else { + val v = new WriterOutputStream(ctx.getWriter) + closeables(0) = v + v + } + val e = if (ctx.getErrorWriter == null) Console.err else { + val v = new WriterOutputStream(ctx.getErrorWriter) + closeables(1) = v + v + } + val in = if (ctx.getReader == null) Console.in else ctx.getReader + try { + Console.withOut(w) { + Console.withErr(e) { + Console.withIn(in) { + r.lineRep.evalEither + } + } + } + } finally { + closeables foreach (c => if (c != null) c.close()) + } + } + } + + /* First time, cause lazy evaluation of a memoized result. + * Subsequently, instantiate a new object for evaluation. + * Per the API: Checked exception types thrown by underlying scripting implementations + * must be wrapped in instances of ScriptException. + */ + @throws[ScriptException] + override def eval(context: ScriptContext) = withScriptContext(context) { + if (first) { + val result = evalEither(req, context) match { + case Left(e: RuntimeException) => throw e + case Left(e: Exception) => throw new ScriptException(e) + case Left(e) => throw e + case Right(result) => result.asInstanceOf[Object] + } + intp recordRequest req + first = false + result + } else { + val defines = req.defines + if (defines.isEmpty) { + Scripted.this.eval(s"new ${req.lineRep.readPath}") + intp recordRequest duplicate(req) + null + } else { + val instance = s"val $$INSTANCE = new ${req.lineRep.readPath};" + val newline = (defines map (s => s"val ${s.name} = $$INSTANCE${req.accessPath}.${s.name}")).mkString(instance, ";", ";") + val newreq = intp.requestFromLine(newline).right.get + val ok = newreq.compile + + val result = evalEither(newreq, context) match { + case Left(e: RuntimeException) => throw e + case Left(e: Exception) => throw new ScriptException(e) + case Left(e) => throw e + case Right(result) => intp recordRequest newreq ; result.asInstanceOf[Object] + } + result + } + } + } + + def duplicate(req: intp.Request) = new intp.Request(req.line, req.trees) + + def getEngine: ScriptEngine = Scripted.this + } +} + +object Scripted { + import IMain.{ defaultSettings, defaultOut } + import java.util.Arrays.asList + import scala.util.Properties.versionString + + class Factory extends ScriptEngineFactory { + @BeanProperty val engineName = "Scala REPL" + + @BeanProperty val engineVersion = "2.0" + + @BeanProperty val extensions = asList("scala") + + @BeanProperty val languageName = "Scala" + + @BeanProperty val languageVersion = versionString + + @BeanProperty val mimeTypes = asList("application/x-scala") + + @BeanProperty val names = asList("scala") + + def getMethodCallSyntax(obj: String, m: String, args: String*): String = args.mkString(s"$obj.$m(", ", ", ")") + + def getOutputStatement(toDisplay: String): String = s"Console.println($toDisplay)" + + def getParameter(key: String): Object = key match { + case ScriptEngine.ENGINE => engineName + case ScriptEngine.ENGINE_VERSION => engineVersion + case ScriptEngine.LANGUAGE => languageName + case ScriptEngine.LANGUAGE_VERSION => languageVersion + case ScriptEngine.NAME => names.get(0) + case _ => null + } + + def getProgram(statements: String*): String = statements.mkString("object Main extends App {\n\t", "\n\t", "\n}") + + def getScriptEngine: ScriptEngine = { + val settings = new Settings() + settings.usemanifestcp.value = true + Scripted(this, settings) + } + } + + def apply(factory: ScriptEngineFactory = new Factory, settings: Settings = defaultSettings, out: JPrintWriter = defaultOut) = { + settings.Yreplclassbased.value = true + settings.usejavacp.value = true + val s = new Scripted(factory, settings, out) + s.setBindings(s.createBindings, ScriptContext.ENGINE_SCOPE) + s + } +} + +import java.io.Writer +import java.nio.{ ByteBuffer, CharBuffer } +import java.nio.charset.{ Charset, CodingErrorAction } +import CodingErrorAction.{ REPLACE => Replace } + +/* An OutputStream that decodes bytes and flushes to the writer. */ +class WriterOutputStream(writer: Writer) extends OutputStream { + val decoder = Charset.defaultCharset.newDecoder + decoder onMalformedInput Replace + decoder onUnmappableCharacter Replace + + val byteBuffer = ByteBuffer.allocate(64) + val charBuffer = CharBuffer.allocate(64) + + override def write(b: Int): Unit = { + byteBuffer.put(b.toByte) + byteBuffer.flip() + val result = decoder.decode(byteBuffer, charBuffer, /*eoi=*/ false) + if (byteBuffer.remaining == 0) byteBuffer.clear() + if (charBuffer.position > 0) { + charBuffer.flip() + writer write charBuffer.toString + charBuffer.clear() + } + } + override def close(): Unit = { + decoder.decode(byteBuffer, charBuffer, /*eoi=*/ true) + decoder.flush(charBuffer) + } + override def toString = charBuffer.toString +} diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala index 97b32bfa8672..55949b81a5a5 100644 --- a/src/repl/scala/tools/nsc/interpreter/package.scala +++ b/src/repl/scala/tools/nsc/interpreter/package.scala @@ -204,7 +204,7 @@ package object interpreter extends ReplConfig with ReplStrings { /* An s-interpolator that uses `stringOf(arg)` instead of `String.valueOf(arg)`. */ private[nsc] implicit class `smart stringifier`(val sc: StringContext) extends AnyVal { - import StringContext._, runtime.ScalaRunTime.stringOf + import StringContext.treatEscapes, scala.runtime.ScalaRunTime.stringOf def ss(args: Any*): String = sc.standardInterpolator(treatEscapes, args map stringOf) } /* Try (body) lastly (more) */ diff --git a/test/files/run/repl-serialization.scala b/test/files/run/repl-serialization.scala index 55b7519631d1..8bc0dd3a8b82 100644 --- a/test/files/run/repl-serialization.scala +++ b/test/files/run/repl-serialization.scala @@ -36,7 +36,7 @@ object Test { |extract(() => new AA(x + getX() + y + z + zz + O.apply + u.x)) """.stripMargin - imain = new IMain(settings) + imain = IMain(settings) println("== evaluating lines") imain.directBind("extract", "(AnyRef => Unit)", extract) code.lines.foreach(imain.interpret) diff --git a/test/files/run/t1500.scala b/test/files/run/t1500.scala index 30c026f70f9f..5a2735fbf1ab 100644 --- a/test/files/run/t1500.scala +++ b/test/files/run/t1500.scala @@ -20,7 +20,7 @@ object Test { val settings = new Settings() settings.classpath.value = System.getProperty("java.class.path") - val tool = new interpreter.IMain(settings) + val tool = interpreter.IMain(settings) val global = tool.global import global._ diff --git a/test/files/run/t7843-jsr223-service.check b/test/files/run/t7843-jsr223-service.check deleted file mode 100644 index a668df3567bb..000000000000 --- a/test/files/run/t7843-jsr223-service.check +++ /dev/null @@ -1,2 +0,0 @@ -n: Object = 10 -12345678910 diff --git a/test/files/run/t7843-jsr223-service.scala b/test/files/run/t7843-jsr223-service.scala deleted file mode 100644 index 31112212eaf4..000000000000 --- a/test/files/run/t7843-jsr223-service.scala +++ /dev/null @@ -1,8 +0,0 @@ -import scala.tools.nsc.interpreter.IMain - -object Test extends App { - val engine = new IMain.Factory getScriptEngine() - engine.asInstanceOf[IMain].settings.usejavacp.value = true - engine put ("n", 10) - engine eval "1 to n.asInstanceOf[Int] foreach print" -} diff --git a/test/files/run/t7933.check b/test/files/run/t7933.check deleted file mode 100644 index 317e9677c3bc..000000000000 --- a/test/files/run/t7933.check +++ /dev/null @@ -1,2 +0,0 @@ -hello -hello diff --git a/test/files/run/t7933.scala b/test/files/run/t7933.scala deleted file mode 100644 index b06dffcd80a9..000000000000 --- a/test/files/run/t7933.scala +++ /dev/null @@ -1,11 +0,0 @@ -import scala.tools.nsc.interpreter.IMain - -object Test extends App { - val engine = new IMain.Factory getScriptEngine() - engine.asInstanceOf[IMain].settings.usejavacp.value = true - val res2 = engine.asInstanceOf[javax.script.Compilable] - res2 compile "8" eval() - val res5 = res2 compile """println("hello") ; 8""" - res5 eval() - res5 eval() -} diff --git a/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala b/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala new file mode 100644 index 000000000000..a8dc8eb3e071 --- /dev/null +++ b/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala @@ -0,0 +1,83 @@ +package scala.tools.nsc +package interpreter + +import org.junit._, Assert._, runner.RunWith, runners.JUnit4 +import scala.tools.testing.AssertUtil.assertThrows + +@RunWith(classOf[JUnit4]) +class ScriptedTest { + import javax.script._ + import scala.tools.nsc.interpreter.Scripted + + def scripted: ScriptEngine with Compilable = Scripted() + // same as by service discovery + //new ScriptEngineManager().getEngineByName("scala").asInstanceOf[ScriptEngine with Compilable] + + @Test def eval() = { + val engine = scripted + engine.put("foo","bar") + assert("bar" == engine.eval("foo")) + val bindings = engine.createBindings() + bindings.put("foo","baz") + assert("baz" == engine.eval("foo", bindings)) + val c = engine.compile("def f = foo.asInstanceOf[String] ; f * 2") + assert("barbar" == c.eval()) + assert("bazbaz" == c.eval(bindings)) + } + @Test def `SI-7933 multiple eval compiled script`() = { + val engine = scripted + val init = """val i = new java.util.concurrent.atomic.AtomicInteger""" + val code = """i.getAndIncrement()""" + engine eval init + val c = engine compile code + assert(0 == c.eval()) + assert(1 == c.eval()) + } + @Test def `SI-8422 captured i/o`() = { + import java.io.StringWriter + val engine = scripted + val ctx = new SimpleScriptContext + val w = new StringWriter + val code = """print("hello, world")""" + + ctx.setWriter(w) + engine.eval(code, ctx) + assertEquals("hello, world", w.toString) + } + @Test def `SI-8422 captured multi i/o`() = { + import java.io.{ StringWriter, StringReader } + import scala.compat.Platform.EOL + val engine = scripted + val ctx = new SimpleScriptContext + val out = new StringWriter + val err = new StringWriter + val text = + """Now is the time + |for all good + |dogs to come for supper.""".stripMargin + val in = new StringReader(text) + + val code = + """var s: String = _ + |var i: Int = 0 + |do { + | s = scala.io.StdIn.readLine() + | val out = if ((i & 1) == 0) Console.out else Console.err + | i += 1 + | Option(s) foreach out.println + |} while (s != null)""".stripMargin + + ctx.setWriter(out) + ctx.setErrorWriter(err) + ctx.setReader(in) + engine.eval(code, ctx) + val lines = text.lines.toList + assertEquals(lines.head + EOL + lines.last + EOL, out.toString) + assertEquals(lines(1) + EOL, err.toString) + } + @Test def `on compile error`(): Unit = { + val engine = scripted + val err = "not found: value foo in def f = foo at line number 11 at column number 16" + assertThrows[ScriptException](engine.compile("def f = foo"), _ == err) + } +} From 80d996b8ade4c0803d1fa7a7f0f733663ca292c3 Mon Sep 17 00:00:00 2001 From: Jakob Odersky Date: Mon, 11 Apr 2016 12:14:09 -0700 Subject: [PATCH 0053/2793] Include missing web assets in scaladoc --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index bd7616727885..d592b86aff5d 100644 --- a/build.sbt +++ b/build.sbt @@ -507,7 +507,7 @@ lazy val scaladoc = configureAsSubproject(project) name := "scala-compiler-doc", description := "Scala Documentation Generator", libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep), - includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" + includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf" ) .dependsOn(compiler) From 6be9fc678b8af5df915905059fd31f0fc1d9d821 Mon Sep 17 00:00:00 2001 From: Olli Helenius Date: Thu, 19 May 2016 23:05:45 +0300 Subject: [PATCH 0054/2793] SI-9781 Don't convert erroneous expression to assignment `convertToAssignment` is triggered on a type error but it doesn't seem to really care what the error is as long as the expression can be converted to an assignment expression. This patch fixes that by checking whether the qualifier of the selection contains any errors before deciding to continue with the conversion. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 +++- test/files/neg/t9781.check | 4 ++++ test/files/neg/t9781.scala | 4 ++++ 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t9781.check create mode 100644 test/files/neg/t9781.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 329ce8c23b46..1aed9c3a64cf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4494,7 +4494,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null def onError(reportError: => Tree): Tree = fun match { - case Select(qual, name) if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) => + case Select(qual, name) + if !mode.inPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) && !qual.exists(_.isErroneous) => + val qual1 = typedQualifier(qual) if (treeInfo.isVariableOrGetter(qual1)) { if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart) diff --git a/test/files/neg/t9781.check b/test/files/neg/t9781.check new file mode 100644 index 000000000000..422c51013a74 --- /dev/null +++ b/test/files/neg/t9781.check @@ -0,0 +1,4 @@ +t9781.scala:3: error: not found: value undefinedSymbol + c(undefinedSymbol) += 1 + ^ +one error found diff --git a/test/files/neg/t9781.scala b/test/files/neg/t9781.scala new file mode 100644 index 000000000000..70234dcca54e --- /dev/null +++ b/test/files/neg/t9781.scala @@ -0,0 +1,4 @@ +object T9781 { + val c: collection.mutable.Map[Int, Int] = ??? + c(undefinedSymbol) += 1 +} From 481a39010f63ccc5811c1d532eb510fd07e265a8 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Fri, 13 May 2016 13:26:13 +0200 Subject: [PATCH 0055/2793] Improvements to scala.concurrent.Future * Enhanced Scaladocs with groupings and clarifications * traverse now runs the last step like sequence * A few minor non-semantic changes to method implementations --- src/library/scala/concurrent/Future.scala | 159 ++++++++++++++-------- 1 file changed, 101 insertions(+), 58 deletions(-) diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index d9d3d572e8ed..8abd7feeb7ac 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -20,11 +20,14 @@ import scala.collection.generic.CanBuildFrom import scala.reflect.ClassTag -/** The trait that represents futures. +/** A `Future` represents a value which may or may not *currently* be available, + * but will be available at some point, or an exception if that value could not be made available. * - * Asynchronous computations that yield futures are created with the `Future.apply` call: + * Asynchronous computations that yield futures are created with the `Future.apply` call and are computed using a supplied `ExecutionContext`, + * which can be backed by a Thread pool. * * {{{ + * import ExecutionContext.Implicits.global * val s = "Hello" * val f: Future[String] = Future { * s + " future!" @@ -88,6 +91,7 @@ import scala.reflect.ClassTag * thread. That is, the implementation may run multiple callbacks * in a batch within a single `execute()` and it may run * `execute()` either immediately or asynchronously. + * Completion of the Future must *happen-before* the invocation of the callback. */ trait Future[+T] extends Awaitable[T] { import Future.{ InternalCallbackExecutor => internalExecutor } @@ -101,9 +105,13 @@ trait Future[+T] extends Awaitable[T] { * If the future has already been completed with a value, * this will either be applied immediately or be scheduled asynchronously. * + * Note that the returned value of `pf` will be discarded. + * * $swallowsExceptions * $multipleCallbacks * $callbackInContext + * + * @group Callbacks */ @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12") def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete { @@ -122,9 +130,13 @@ trait Future[+T] extends Awaitable[T] { * * Will not be called in case that the future is completed with a value. * + * Note that the returned value of `pf` will be discarded. + * * $swallowsExceptions * $multipleCallbacks * $callbackInContext + * + * @group Callbacks */ @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12") def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete { @@ -139,24 +151,28 @@ trait Future[+T] extends Awaitable[T] { * If the future has already been completed, * this will either be applied immediately or be scheduled asynchronously. * + * Note that the returned value of `f` will be discarded. + * * $swallowsExceptions * $multipleCallbacks * $callbackInContext * * @tparam U only used to accept any return type of the given callback function * @param f the function to be executed when this `Future` completes + * @group Callbacks */ def onComplete[U](@deprecatedName('func) f: Try[T] => U)(implicit executor: ExecutionContext): Unit /* Miscellaneous */ - /** Returns whether the future has already been completed with + /** Returns whether the future had already been completed with * a value or an exception. * * $nonDeterministic * - * @return `true` if the future is already completed, `false` otherwise + * @return `true` if the future was completed, `false` otherwise + * @group Polling */ def isCompleted: Boolean @@ -164,12 +180,13 @@ trait Future[+T] extends Awaitable[T] { * * $nonDeterministic * - * If the future is not completed the returned value will be `None`. - * If the future is completed the value will be `Some(Success(t))` - * if it contains a valid result, or `Some(Failure(error))` if it contains + * If the future was not completed the returned value will be `None`. + * If the future was completed the value will be `Some(Success(t))` + * if it contained a valid result, or `Some(Failure(error))` if it contained * an exception. * * @return `None` if the `Future` wasn't completed, `Some` if it was. + * @group Polling */ def value: Option[Try[T]] @@ -182,6 +199,7 @@ trait Future[+T] extends Awaitable[T] { * If the original `Future` is successful, the returned `Future` is failed with a `NoSuchElementException`. * * @return a failed projection of this `Future`. + * @group Transformations */ def failed: Future[Throwable] = transform({ @@ -201,6 +219,7 @@ trait Future[+T] extends Awaitable[T] { * @tparam U only used to accept any return type of the given callback function * @param f the function which will be executed if this `Future` completes with a result, * the return value of `f` will be discarded. + * @group Callbacks */ def foreach[U](f: T => U)(implicit executor: ExecutionContext): Unit = onComplete { _ foreach f } @@ -209,10 +228,11 @@ trait Future[+T] extends Awaitable[T] { * exception thrown when 's' or 'f' is applied, that exception will be propagated * to the resulting future. * - * @tparam S the type of the returned `Future` - * @param s function that transforms a successful result of the receiver into a successful result of the returned future - * @param f function that transforms a failure of the receiver into a failure of the returned future - * @return a `Future` that will be completed with the transformed value + * @tparam S the type of the returned `Future` + * @param s function that transforms a successful result of the receiver into a successful result of the returned future + * @param f function that transforms a failure of the receiver into a failure of the returned future + * @return a `Future` that will be completed with the transformed value + * @group Transformations */ def transform[S](s: T => S, f: Throwable => Throwable)(implicit executor: ExecutionContext): Future[S] = transform { @@ -224,9 +244,10 @@ trait Future[+T] extends Awaitable[T] { * of this Future. If there is any non-fatal exception thrown when 'f' * is applied then that exception will be propagated to the resulting future. * - * @tparam S the type of the returned `Future` - * @param f function that transforms the result of this future - * @return a `Future` that will be completed with the transformed value + * @tparam S the type of the returned `Future` + * @param f function that transforms the result of this future + * @return a `Future` that will be completed with the transformed value + * @group Transformations */ def transform[S](f: Try[T] => Try[S])(implicit executor: ExecutionContext): Future[S] @@ -234,9 +255,10 @@ trait Future[+T] extends Awaitable[T] { * of this Future. If there is any non-fatal exception thrown when 'f' * is applied then that exception will be propagated to the resulting future. * - * @tparam S the type of the returned `Future` - * @param f function that transforms the result of this future - * @return a `Future` that will be completed with the transformed value + * @tparam S the type of the returned `Future` + * @param f function that transforms the result of this future + * @return a `Future` that will be completed with the transformed value + * @group Transformations */ def transformWith[S](f: Try[T] => Future[S])(implicit executor: ExecutionContext): Future[S] @@ -257,11 +279,12 @@ trait Future[+T] extends Awaitable[T] { * and `withFilter`. See [[scala.concurrent.Future#flatMap]] for an example of such a comprehension. * * - * @tparam S the type of the returned `Future` - * @param f the function which will be applied to the successful result of this `Future` - * @return a `Future` which will be completed with the result of the application of the function + * @tparam S the type of the returned `Future` + * @param f the function which will be applied to the successful result of this `Future` + * @return a `Future` which will be completed with the result of the application of the function + * @group Transformations */ - def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_.map(f)) + def map[S](f: T => S)(implicit executor: ExecutionContext): Future[S] = transform(_ map f) /** Creates a new future by applying a function to the successful result of * this future, and returns the result of the function as the new future. @@ -270,9 +293,10 @@ trait Future[+T] extends Awaitable[T] { * * $forComprehensionExamples * - * @tparam S the type of the returned `Future` - * @param f the function which will be applied to the successful result of this `Future` - * @return a `Future` which will be completed with the result of the application of the function + * @tparam S the type of the returned `Future` + * @param f the function which will be applied to the successful result of this `Future` + * @return a `Future` which will be completed with the result of the application of the function + * @group Transformations */ def flatMap[S](f: T => Future[S])(implicit executor: ExecutionContext): Future[S] = transformWith { case Success(s) => f(s) @@ -282,7 +306,8 @@ trait Future[+T] extends Awaitable[T] { /** Creates a new future with one level of nesting flattened, this method is equivalent * to `flatMap(identity)`. * - * @tparam S the type of the returned `Future` + * @tparam S the type of the returned `Future` + * @group Transformations */ def flatten[S](implicit ev: T <:< Future[S]): Future[S] = flatMap(ev)(internalExecutor) @@ -302,13 +327,15 @@ trait Future[+T] extends Awaitable[T] { * Await.result(h, Duration.Zero) // throw a NoSuchElementException * }}} * - * @param p the predicate to apply to the successful result of this `Future` - * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException` + * @param p the predicate to apply to the successful result of this `Future` + * @return a `Future` which will hold the successful result of this `Future` if it matches the predicate or a `NoSuchElementException` + * @group Transformations */ def filter(@deprecatedName('pred) p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = map { r => if (p(r)) r else throw new NoSuchElementException("Future.filter predicate is not satisfied") } /** Used by for-comprehensions. + * @group Transformations */ final def withFilter(p: T => Boolean)(implicit executor: ExecutionContext): Future[T] = filter(p)(executor) @@ -332,9 +359,10 @@ trait Future[+T] extends Awaitable[T] { * Await.result(h, Duration.Zero) // throw a NoSuchElementException * }}} * - * @tparam S the type of the returned `Future` - *  @param pf the `PartialFunction` to apply to the successful result of this `Future` - * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException` + * @tparam S the type of the returned `Future` + * @param pf the `PartialFunction` to apply to the successful result of this `Future` + * @return a `Future` holding the result of application of the `PartialFunction` or a `NoSuchElementException` + * @group Transformations */ def collect[S](pf: PartialFunction[T, S])(implicit executor: ExecutionContext): Future[S] = map { @@ -353,9 +381,10 @@ trait Future[+T] extends Awaitable[T] { * Future (6 / 2) recover { case e: ArithmeticException => 0 } // result: 3 * }}} * - * @tparam U the type of the returned `Future` - * @param pf the `PartialFunction` to apply if this `Future` fails - * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction` + * @tparam U the type of the returned `Future` + * @param pf the `PartialFunction` to apply if this `Future` fails + * @return a `Future` with the successful value of this `Future` or the result of the `PartialFunction` + * @group Transformations */ def recover[U >: T](pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Future[U] = transform { _ recover pf } @@ -373,9 +402,10 @@ trait Future[+T] extends Awaitable[T] { * Future (6 / 0) recoverWith { case e: ArithmeticException => f } // result: Int.MaxValue * }}} * - * @tparam U the type of the returned `Future` - * @param pf the `PartialFunction` to apply if this `Future` fails - * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction` + * @tparam U the type of the returned `Future` + * @param pf the `PartialFunction` to apply if this `Future` fails + * @return a `Future` with the successful value of this `Future` or the outcome of the `Future` returned by the `PartialFunction` + * @group Transformations */ def recoverWith[U >: T](pf: PartialFunction[Throwable, Future[U]])(implicit executor: ExecutionContext): Future[U] = transformWith { @@ -391,9 +421,10 @@ trait Future[+T] extends Awaitable[T] { * Otherwise, if `that` future fails, the resulting future is failed * with the throwable stored in `that`. * - * @tparam U the type of the other `Future` - * @param that the other `Future` - * @return a `Future` with the results of both futures or the failure of the first of them that failed + * @tparam U the type of the other `Future` + * @param that the other `Future` + * @return a `Future` with the results of both futures or the failure of the first of them that failed + * @group Transformations */ def zip[U](that: Future[U]): Future[(T, U)] = { implicit val ec = internalExecutor @@ -410,11 +441,12 @@ trait Future[+T] extends Awaitable[T] { * If the application of `f` throws a throwable, the resulting future * is failed with that throwable if it is non-fatal. * - * @tparam U the type of the other `Future` - * @tparam R the type of the resulting `Future` - * @param that the other `Future` - * @param f the function to apply to the results of `this` and `that` - * @return a `Future` with the result of the application of `f` to the results of `this` and `that` + * @tparam U the type of the other `Future` + * @tparam R the type of the resulting `Future` + * @param that the other `Future` + * @param f the function to apply to the results of `this` and `that` + * @return a `Future` with the result of the application of `f` to the results of `this` and `that` + * @group Transformations */ def zipWith[U, R](that: Future[U])(f: (T, U) => R)(implicit executor: ExecutionContext): Future[R] = flatMap(r1 => that.map(r2 => f(r1, r2)))(internalExecutor) @@ -433,9 +465,10 @@ trait Future[+T] extends Awaitable[T] { * h foreach println // Eventually prints 5 * }}} * - * @tparam U the type of the other `Future` and the resulting `Future` - * @param that the `Future` whose result we want to use if this `Future` fails. - * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail + * @tparam U the type of the other `Future` and the resulting `Future` + * @param that the `Future` whose result we want to use if this `Future` fails. + * @return a `Future` with the successful result of this or that `Future` or the failure of this `Future` if both fail + * @group Transformations */ def fallbackTo[U >: T](that: Future[U]): Future[U] = if (this eq that) this @@ -447,9 +480,10 @@ trait Future[+T] extends Awaitable[T] { /** Creates a new `Future[S]` which is completed with this `Future`'s result if * that conforms to `S`'s erased type or a `ClassCastException` otherwise. * - * @tparam S the type of the returned `Future` - * @param tag the `ClassTag` which will be used to cast the result of this `Future` - * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise + * @tparam S the type of the returned `Future` + * @param tag the `ClassTag` which will be used to cast the result of this `Future` + * @return a `Future` holding the casted result of this `Future` or a `ClassCastException` otherwise + * @group Transformations */ def mapTo[S](implicit tag: ClassTag[S]): Future[S] = { implicit val ec = internalExecutor @@ -484,9 +518,12 @@ trait Future[+T] extends Awaitable[T] { * } * }}} * - * @tparam U only used to accept any return type of the given `PartialFunction` - * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future` - * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed. + * $swallowsExceptions + * + * @tparam U only used to accept any return type of the given `PartialFunction` + * @param pf a `PartialFunction` which will be conditionally applied to the outcome of this `Future` + * @return a `Future` which will be completed with the exact same outcome as this `Future` but after the `PartialFunction` has been executed. + * @group Callbacks */ def andThen[U](pf: PartialFunction[Try[T], U])(implicit executor: ExecutionContext): Future[T] = transform { @@ -598,6 +635,13 @@ object Future { /** Starts an asynchronous computation and returns a `Future` instance with the result of that computation. * + * The following expressions are equivalent: + * + * {{{ + * val f1 = Future(expr) + * val f2 = Future.unit.map(_ => expr) + * }}} + * * The result becomes available once the asynchronous computation is completed. * * @tparam T the type of the result @@ -618,7 +662,7 @@ object Future { */ def sequence[A, M[X] <: TraversableOnce[X]](in: M[Future[A]])(implicit cbf: CanBuildFrom[M[Future[A]], A, M[A]], executor: ExecutionContext): Future[M[A]] = { in.foldLeft(successful(cbf(in))) { - (fr, fa) => for (r <- fr; a <- fa) yield (r += a) + (fr, fa) => fr.zipWith(fa)(_ += _) }.map(_.result())(InternalCallbackExecutor) } @@ -791,10 +835,9 @@ object Future { * @return the `Future` of the `TraversableOnce` of results */ def traverse[A, B, M[X] <: TraversableOnce[X]](in: M[A])(fn: A => Future[B])(implicit cbf: CanBuildFrom[M[A], B, M[B]], executor: ExecutionContext): Future[M[B]] = - in.foldLeft(successful(cbf(in))) { (fr, a) => - val fb = fn(a) - for (r <- fr; b <- fb) yield (r += b) - }.map(_.result()) + in.foldLeft(successful(cbf(in))) { + (fr, a) => fr.zipWith(fn(a))(_ += _) + }.map(_.result())(InternalCallbackExecutor) // This is used to run callbacks which are internal From 72a59d932db6f16defd14bd729e0f6ec894c7e1b Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 19 May 2016 21:58:55 +0200 Subject: [PATCH 0056/2793] Rename nsc.backend.jvm.CodeGenTools to testing.BytecodeTesting --- test/junit/scala/issues/BytecodeTest.scala | 2 +- test/junit/scala/issues/OptimizedBytecodeTest.scala | 4 ++-- .../scala/tools/nsc/backend/jvm/BTypesTest.scala | 2 +- .../tools/nsc/backend/jvm/DefaultMethodTest.scala | 2 +- .../tools/nsc/backend/jvm/DirectCompileTest.scala | 2 +- .../scala/tools/nsc/backend/jvm/IndyLambdaTest.scala | 2 +- .../scala/tools/nsc/backend/jvm/IndySammyTest.scala | 2 +- .../tools/nsc/backend/jvm/StringConcatTest.scala | 2 +- .../backend/jvm/analysis/NullnessAnalyzerTest.scala | 2 +- .../backend/jvm/analysis/ProdConsAnalyzerTest.scala | 2 +- .../tools/nsc/backend/jvm/opt/AnalyzerTest.scala | 2 +- .../nsc/backend/jvm/opt/BTypesFromClassfileTest.scala | 2 +- .../tools/nsc/backend/jvm/opt/CallGraphTest.scala | 2 +- .../nsc/backend/jvm/opt/ClosureOptimizerTest.scala | 2 +- .../backend/jvm/opt/CompactLocalVariablesTest.scala | 2 +- .../backend/jvm/opt/EmptyExceptionHandlersTest.scala | 2 +- .../jvm/opt/EmptyLabelsAndLineNumbersTest.scala | 2 +- .../tools/nsc/backend/jvm/opt/InlineInfoTest.scala | 2 +- .../tools/nsc/backend/jvm/opt/InlineWarningTest.scala | 2 +- .../backend/jvm/opt/InlinerIllegalAccessTest.scala | 2 +- .../jvm/opt/InlinerSeparateCompilationTest.scala | 2 +- .../scala/tools/nsc/backend/jvm/opt/InlinerTest.scala | 2 +- .../nsc/backend/jvm/opt/MethodLevelOptsTest.scala | 2 +- .../nsc/backend/jvm/opt/ScalaInlineInfoTest.scala | 2 +- .../tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala | 2 +- .../nsc/backend/jvm/opt/UnreachableCodeTest.scala | 2 +- .../backend/jvm/opt/UnusedLocalVariablesTest.scala | 2 +- .../nsc/transform/delambdafy/DelambdafyTest.scala | 4 +--- .../nsc/transform/patmat/PatmatBytecodeTest.scala | 3 +-- .../BytecodeTesting.scala} | 11 +++++------ 30 files changed, 35 insertions(+), 39 deletions(-) rename test/junit/scala/tools/{nsc/backend/jvm/CodeGenTools.scala => testing/BytecodeTesting.scala} (98%) diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala index 7b9474b52e2b..8aa76bbac24f 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/issues/BytecodeTest.scala @@ -6,7 +6,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import scala.tools.nsc.backend.jvm.AsmUtils -import scala.tools.nsc.backend.jvm.CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import org.junit.Assert._ import scala.collection.JavaConverters._ diff --git a/test/junit/scala/issues/OptimizedBytecodeTest.scala b/test/junit/scala/issues/OptimizedBytecodeTest.scala index c69229ae2295..9c0fbebde77e 100644 --- a/test/junit/scala/issues/OptimizedBytecodeTest.scala +++ b/test/junit/scala/issues/OptimizedBytecodeTest.scala @@ -6,9 +6,9 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import scala.tools.nsc.backend.jvm.{AsmUtils, CodeGenTools} +import scala.tools.nsc.backend.jvm.AsmUtils -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index e7bbbb9a4f23..ebeb57714993 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -7,7 +7,7 @@ import org.junit.Test import scala.tools.asm.Opcodes import org.junit.Assert._ -import scala.tools.nsc.backend.jvm.CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala index 7d4ae866fcaf..0991e5fbcf9a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala @@ -6,7 +6,7 @@ import org.junit.Test import scala.collection.JavaConverters import scala.tools.asm.Opcodes import scala.tools.asm.tree.ClassNode -import scala.tools.nsc.backend.jvm.CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import JavaConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index e984b7551898..ab57c5a1c552 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -4,7 +4,7 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.asm.Opcodes._ import scala.tools.partest.ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala index b906942ffa1a..66054f246f1a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala @@ -6,7 +6,7 @@ import org.junit.{Assert, Test} import scala.tools.asm.{Handle, Opcodes} import scala.tools.asm.tree.InvokeDynamicInsnNode import scala.tools.nsc.backend.jvm.AsmUtils._ -import scala.tools.nsc.backend.jvm.CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.testing.ClearAfterClass import scala.collection.JavaConverters._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala index 5c2ab6a2c785..598899c7051d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala @@ -9,7 +9,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ import scala.tools.nsc.reporters.StoreReporter -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala index fc0c96e71a07..f300090268b3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala @@ -9,7 +9,7 @@ import org.junit.Assert._ import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index 075f42d18f11..d37adb226581 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.asm.tree.{AbstractInsnNode, MethodNode} import scala.tools.nsc.backend.jvm.BTypes._ import scala.tools.partest.ASMConverters diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala index 8d4bc19ec359..7f6aaca67cf2 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala @@ -11,7 +11,7 @@ import scala.tools.asm.Opcodes import scala.tools.asm.tree.AbstractInsnNode import scala.tools.partest.ASMConverters._ import scala.tools.testing.ClearAfterClass -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import AsmUtils._ @RunWith(classOf[JUnit4]) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala index 09675870f0ed..7f07ce51d3d3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala @@ -11,7 +11,7 @@ import scala.tools.asm.tree._ import scala.tools.asm.tree.analysis._ import scala.tools.nsc.backend.jvm.analysis.{AliasingFrame, AliasingAnalyzer} -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala index aba0aab0386e..30d5db06ddea 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -11,7 +11,7 @@ import org.junit.Assert._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 9a27c42cac86..e29d41f0614a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -15,7 +15,7 @@ import scala.tools.asm.tree.analysis._ import scala.tools.nsc.reporters.StoreReporter import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala index e8530af4e0e9..d1432318829d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala @@ -17,7 +17,7 @@ import scala.tools.nsc.io._ import scala.tools.nsc.reporters.StoreReporter import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala index ac1b759fe260..8ee2b2aa6bdf 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala index 6d566c722f4f..d9479fde1df3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala index 7283e20745d8..a833192fb17d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala @@ -9,7 +9,7 @@ import scala.tools.asm.Opcodes._ import org.junit.Assert._ import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index 5cb1aab4a90f..dc3eede556bc 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.collection.generic.Clearable import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 6dd0a33289a0..428841e0e0f1 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -17,7 +17,7 @@ import scala.tools.nsc.io._ import scala.tools.nsc.reporters.StoreReporter import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index ab1aef47cde3..e0b1d758f7f9 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -11,7 +11,7 @@ import org.junit.Assert._ import scala.tools.asm.tree._ import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala index 075513a2b712..748eff88eafa 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index b7641b5ec717..52ee118a9461 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -12,7 +12,7 @@ import org.junit.Assert._ import scala.tools.asm.tree._ import scala.tools.nsc.reporters.StoreReporter -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index 003b2d48803f..1ceaaf7f69de 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -12,7 +12,7 @@ import scala.tools.asm.tree.ClassNode import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index 6cb3fd3bba49..ba6bdcf658b5 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.asm.tree.ClassNode import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InlineInfo} import scala.tools.partest.ASMConverters diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala index 99acb318de65..0133fc9dce2a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import scala.tools.asm.Opcodes._ import org.junit.Assert._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index 46f06d1d3977..ca095b8a5177 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -10,7 +10,7 @@ import org.junit.Assert._ import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala index 77e73e64b99c..7ae946f581b5 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala @@ -9,7 +9,7 @@ import scala.tools.asm.Opcodes._ import org.junit.Assert._ import scala.collection.JavaConverters._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala index e4bf038f327b..d30f45817773 100644 --- a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala +++ b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala @@ -1,9 +1,7 @@ package scala.tools.nsc.transform.delambdafy import scala.reflect.io.Path.jfile2path -import scala.tools.nsc.backend.jvm.CodeGenTools.getGeneratedClassfiles -import scala.tools.nsc.backend.jvm.CodeGenTools.makeSourceFile -import scala.tools.nsc.backend.jvm.CodeGenTools.newCompilerWithoutVirtualOutdir +import scala.tools.testing.BytecodeTesting._ import scala.tools.nsc.io.AbstractFile import scala.tools.testing.TempDir diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala index aa83520efb2b..99975abc50b2 100644 --- a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala @@ -8,10 +8,9 @@ import scala.tools.asm.Opcodes._ import org.junit.Assert._ import scala.tools.nsc.backend.jvm.AsmUtils._ -import scala.tools.nsc.backend.jvm.CodeGenTools import scala.tools.testing.AssertUtil._ -import CodeGenTools._ +import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import scala.tools.testing.ClearAfterClass diff --git a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala similarity index 98% rename from test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala rename to test/junit/scala/tools/testing/BytecodeTesting.scala index 389e5b2ead3b..21b1ce2e77f3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/CodeGenTools.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -1,4 +1,4 @@ -package scala.tools.nsc.backend.jvm +package scala.tools.testing import org.junit.Assert._ @@ -10,14 +10,13 @@ import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode} import scala.tools.cmd.CommandLineParser import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.StoreReporter -import scala.tools.nsc.settings.MutableSettings -import scala.tools.nsc.{Settings, Global} +import scala.tools.nsc.{Global, Settings} import scala.tools.partest.ASMConverters import scala.collection.JavaConverters._ -import scala.tools.testing.TempDir -import AsmUtils._ +import scala.tools.nsc.backend.jvm.AsmUtils -object CodeGenTools { +object BytecodeTesting { + import AsmUtils._ import ASMConverters._ def genMethod( flags: Int = Opcodes.ACC_PUBLIC, From 5d5a6aa154ab533dca4a0e589a7ae67c3c037d61 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 10:59:21 +0200 Subject: [PATCH 0057/2793] Better abstraction for bytecode tests. Also organize some imports. --- test/junit/scala/issues/BytecodeTest.scala | 44 +++-- .../scala/issues/OptimizedBytecodeTest.scala | 47 +++-- .../tools/nsc/backend/jvm/BTypesTest.scala | 33 ++-- .../nsc/backend/jvm/DefaultMethodTest.scala | 15 +- .../nsc/backend/jvm/DirectCompileTest.scala | 24 +-- .../nsc/backend/jvm/IndyLambdaTest.scala | 24 ++- .../tools/nsc/backend/jvm/IndySammyTest.scala | 24 +-- .../nsc/backend/jvm/StringConcatTest.scala | 18 +- .../jvm/analysis/NullnessAnalyzerTest.scala | 47 +++-- .../jvm/analysis/ProdConsAnalyzerTest.scala | 27 +-- .../nsc/backend/jvm/opt/AnalyzerTest.scala | 25 +-- .../jvm/opt/BTypesFromClassfileTest.scala | 28 ++- .../nsc/backend/jvm/opt/CallGraphTest.scala | 34 ++-- .../jvm/opt/ClosureOptimizerTest.scala | 37 ++-- .../jvm/opt/CompactLocalVariablesTest.scala | 20 +-- .../jvm/opt/EmptyExceptionHandlersTest.scala | 25 +-- .../opt/EmptyLabelsAndLineNumbersTest.scala | 10 +- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 31 ++-- .../backend/jvm/opt/InlineWarningTest.scala | 42 ++--- .../jvm/opt/InlinerIllegalAccessTest.scala | 29 ++- .../opt/InlinerSeparateCompilationTest.scala | 9 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 44 +++-- .../backend/jvm/opt/MethodLevelOptsTest.scala | 66 ++++--- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 25 ++- .../backend/jvm/opt/SimplifyJumpsTest.scala | 10 +- .../backend/jvm/opt/UnreachableCodeTest.scala | 34 ++-- .../jvm/opt/UnusedLocalVariablesTest.scala | 23 ++- .../transform/delambdafy/DelambdafyTest.scala | 14 +- .../transform/patmat/PatmatBytecodeTest.scala | 30 ++-- .../scala/tools/testing/BytecodeTesting.scala | 169 +++++++++--------- 30 files changed, 453 insertions(+), 555 deletions(-) diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala index 8aa76bbac24f..3fd5e3a22229 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/issues/BytecodeTest.scala @@ -1,23 +1,22 @@ package scala.issues +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test - -import scala.tools.asm.Opcodes._ -import scala.tools.nsc.backend.jvm.AsmUtils -import scala.tools.testing.BytecodeTesting._ -import org.junit.Assert._ import scala.collection.JavaConverters._ import scala.tools.asm.Opcodes +import scala.tools.asm.Opcodes._ import scala.tools.asm.tree.ClassNode +import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.partest.ASMConverters._ -import scala.tools.testing.ClearAfterClass +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) -class BytecodeTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler()) +class BytecodeTest extends BytecodeTesting { + import compiler._ @Test def t8731(): Unit = { @@ -37,7 +36,7 @@ class BytecodeTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertTrue(getSingleMethod(c, "f").instructions.count(_.isInstanceOf[TableSwitch]) == 1) assertTrue(getSingleMethod(c, "g").instructions.count(_.isInstanceOf[LookupSwitch]) == 1) @@ -64,9 +63,9 @@ class BytecodeTest extends ClearAfterClass { |@AnnotB class B """.stripMargin - val run = new compiler.Run() + val run = new global.Run() run.compileSources(List(new BatchSourceFile("AnnotA.java", annotA), new BatchSourceFile("AnnotB.java", annotB), new BatchSourceFile("Test.scala", scalaSrc))) - val outDir = compiler.settings.outputDirs.getSingleOutput.get + val outDir = global.settings.outputDirs.getSingleOutput.get val outfiles = (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList def check(classfile: String, annotName: String) = { @@ -98,7 +97,7 @@ class BytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val List(mirror, module) = compileClasses(compiler)(code) + val List(mirror, module) = compileClasses(code) val unapplyLineNumbers = getSingleMethod(module, "unapply").instructions.filter(_.isInstanceOf[LineNumber]) assert(unapplyLineNumbers == List(LineNumber(2, Label(0))), unapplyLineNumbers) @@ -145,7 +144,7 @@ class BytecodeTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) // t1: no unnecessary GOTOs assertSameCode(getSingleMethod(c, "t1"), List( @@ -271,7 +270,7 @@ class BytecodeTest extends ClearAfterClass { |class C20 extends T8 """.stripMargin - implicit val classes = compileClasses(compiler)(code).map(c => (c.name, c)).toMap + implicit val classes = compileClasses(code).map(c => (c.name, c)).toMap val noForwarder = List('C1, 'C2, 'C3, 'C4, 'C10, 'C11, 'C12, 'C13, 'C16, 'C17) for (c <- noForwarder) assertEquals(findMethods(c, "f"), Nil) @@ -297,7 +296,7 @@ class BytecodeTest extends ClearAfterClass { |trait T2 { def f(x: String) = 1 } |class C extends T1 with T2 """.stripMargin - val List(c, t1, t2) = compileClasses(compiler)(code) + val List(c, t1, t2) = compileClasses(code) assertEquals(findMethods(c, "f"), Nil) } @@ -329,7 +328,7 @@ class BytecodeTest extends ClearAfterClass { | |class K12 extends J2 with T2 """.stripMargin - implicit val classes = compileClasses(compiler)(code, List(j1, j2, j3, j4)).map(c => (c.name, c)).toMap + implicit val classes = compileClasses(code, List(j1, j2, j3, j4)).map(c => (c.name, c)).toMap val noForwarder = List('K1, 'K2, 'K3, 'K4, 'K5, 'K6, 'K7, 'K8, 'K9, 'K10, 'K11) for (c <- noForwarder) assertEquals(findMethods(c, "f"), Nil) @@ -339,7 +338,7 @@ class BytecodeTest extends ClearAfterClass { @Test def invocationReceivers(): Unit = { - val List(c1, c2, t, u) = compileClasses(compiler)(invocationReceiversTestCode.definitions("Object")) + val List(c1, c2, t, u) = compileClasses(invocationReceiversTestCode.definitions("Object")) // mixin forwarder in C1 assertSameCode(getSingleMethod(c1, "clone"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "T", "clone", "()Ljava/lang/Object;", false), Op(ARETURN))) assertInvoke(getSingleMethod(c1, "f1"), "T", "clone") @@ -349,7 +348,7 @@ class BytecodeTest extends ClearAfterClass { assertInvoke(getSingleMethod(c2, "f2"), "T", "clone") assertInvoke(getSingleMethod(c2, "f3"), "C1", "clone") - val List(c1b, c2b, tb, ub) = compileClasses(compiler)(invocationReceiversTestCode.definitions("String")) + val List(c1b, c2b, tb, ub) = compileClasses(invocationReceiversTestCode.definitions("String")) def ms(c: ClassNode, n: String) = c.methods.asScala.toList.filter(_.name == n) assert(ms(tb, "clone").length == 1) assert(ms(ub, "clone").isEmpty) @@ -396,9 +395,8 @@ class BytecodeTest extends ClearAfterClass { | def f3(j: a.J) = j.f |} """.stripMargin - val List(c) = compileClasses(compiler)(cC, javaCode = List((aC, "A.java"), (bC, "B.java"), (iC, "I.java"), (jC, "J.java"))) + val List(c) = compileClasses(cC, javaCode = List((aC, "A.java"), (bC, "B.java"), (iC, "I.java"), (jC, "J.java"))) assertInvoke(getSingleMethod(c, "f1"), "a/B", "f") // receiver needs to be B (A is not accessible in class C, package b) - println(getSingleMethod(c, "f2").instructions.stringLines) assertInvoke(getSingleMethod(c, "f3"), "a/J", "f") // receiver needs to be J } @@ -413,7 +411,7 @@ class BytecodeTest extends ClearAfterClass { | |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertInvoke(getSingleMethod(c, "f1"), "[Ljava/lang/String;", "clone") // array descriptor as receiver assertInvoke(getSingleMethod(c, "f2"), "java/lang/Object", "hashCode") // object receiver assertInvoke(getSingleMethod(c, "f3"), "java/lang/Object", "hashCode") @@ -424,7 +422,7 @@ class BytecodeTest extends ClearAfterClass { def superConstructorArgumentInSpecializedClass(): Unit = { // see comment in SpecializeTypes.forwardCtorCall val code = "case class C[@specialized(Int) T](_1: T)" - val List(c, cMod, cSpec) = compileClasses(compiler)(code) + val List(c, cMod, cSpec) = compileClasses(code) assertSameSummary(getSingleMethod(cSpec, ""), // pass `null` to super constructor, no box-unbox, no Integer created List(ALOAD, ILOAD, PUTFIELD, ALOAD, ACONST_NULL, "", RETURN)) diff --git a/test/junit/scala/issues/OptimizedBytecodeTest.scala b/test/junit/scala/issues/OptimizedBytecodeTest.scala index 9c0fbebde77e..b074215534a5 100644 --- a/test/junit/scala/issues/OptimizedBytecodeTest.scala +++ b/test/junit/scala/issues/OptimizedBytecodeTest.scala @@ -3,22 +3,21 @@ package scala.issues import org.junit.runner.RunWith import org.junit.runners.JUnit4 import org.junit.Test + import scala.tools.asm.Opcodes._ import org.junit.Assert._ import scala.tools.nsc.backend.jvm.AsmUtils - import scala.tools.testing.BytecodeTesting._ import scala.tools.partest.ASMConverters import ASMConverters._ import AsmUtils._ - -import scala.tools.testing.ClearAfterClass +import scala.tools.testing.{BytecodeTesting, ClearAfterClass} @RunWith(classOf[JUnit4]) -class OptimizedBytecodeTest extends ClearAfterClass { - val args = "-Yopt:l:classpath -Yopt-warnings" - val compiler = cached("compiler", () => newCompiler(extraArgs = args)) +class OptimizedBytecodeTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:classpath -Yopt-warnings" + import compiler._ @Test def t2171(): Unit = { @@ -28,7 +27,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | def t(): Unit = while (true) m("...") |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List(Label(0), Jump(GOTO, Label(0)))) } @@ -46,7 +45,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t"), List( LDC, ASTORE, ALOAD /*0*/, ALOAD /*1*/, "C$$$anonfun$1", IRETURN)) @@ -72,7 +71,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | def h(block: => Unit): Nothing = ??? |} """.stripMargin - val List(c, t, tMod) = compileClasses(compiler)(code, allowMessage = _.msg.contains("not be exhaustive")) + val List(c, t, tMod) = compileClasses(code, allowMessage = _.msg.contains("not be exhaustive")) assertSameSummary(getSingleMethod(c, "t"), List(GETSTATIC, "$qmark$qmark$qmark", ATHROW)) } @@ -97,7 +96,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { |arguments expected by the callee ErrorHandler$::defaultIfIOException(Lscala/Function0;Lscala/Function0;)Ljava/lang/Object;. These values would be discarded |when entering an exception handler declared in the inlined method.""".stripMargin - compileClasses(compiler)(code, allowMessage = _.msg == msg) + compileClasses(code, allowMessage = _.msg == msg) } @Test @@ -110,7 +109,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - compileClasses(compiler)(code) + compileClasses(code) } @Test @@ -120,7 +119,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { |object Warmup { def filter[A](p: Any => Boolean): Any = filter[Any](p) } """.stripMargin val c2 = "class C { def t = warmup.Warmup.filter[Any](x => false) }" - val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = args) + val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = compilerArgs) assertInvoke(getSingleMethod(c, "t"), "warmup/Warmup$", "filter") } @@ -135,7 +134,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - compileClasses(compiler)(code) + compileClasses(code) } @Test @@ -163,7 +162,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - compileClasses(compiler)(code) + compileClasses(code) } @Test @@ -179,7 +178,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - compileClasses(compiler)(code) + compileClasses(code) } @Test @@ -201,7 +200,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | val NoContext = self.analyzer.NoContext |} """.stripMargin - compileClasses(compiler)(code) + compileClasses(code) } @Test @@ -218,7 +217,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t"), List( ALOAD /*1*/, INSTANCEOF /*Some*/, IFNE /*A*/, ALOAD /*0*/, "getInt", POP, @@ -237,7 +236,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t"), List( -1 /*A*/, ILOAD /*1*/, TABLESWITCH, -1, ALOAD, "pr", RETURN, @@ -261,7 +260,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { |} """.stripMargin - val cls = compileClassesSeparately(List(c1, c2), extraArgs = args) + val cls = compileClassesSeparately(List(c1, c2), extraArgs = compilerArgs) val c = cls.find(_.name == "C").get assertSameSummary(getSingleMethod(c, "t"), List( GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW, // module load and null checks not yet eliminated @@ -300,7 +299,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | def f2b() = identity(wrapper2(5)) // not inlined |} """.stripMargin - val List(c) = compileClasses(compiler)(code, allowMessage = _.msg.contains("exception handler declared in the inlined method")) + val List(c) = compileClasses(code, allowMessage = _.msg.contains("exception handler declared in the inlined method")) assertInvoke(getSingleMethod(c, "f1a"), "C", "C$$$anonfun$1") assertInvoke(getSingleMethod(c, "f1b"), "C", "wrapper1") assertInvoke(getSingleMethod(c, "f2a"), "C", "C$$$anonfun$3") @@ -318,7 +317,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { | def t = mbarray_apply_minibox(null, 0) |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertNoInvoke(getSingleMethod(c, "t")) } @@ -336,7 +335,7 @@ class OptimizedBytecodeTest extends ClearAfterClass { |object Nill extends Listt |class Listt """.stripMargin - val List(c, nil, nilMod, listt) = compileClasses(compiler)(code) + val List(c, nil, nilMod, listt) = compileClasses(code) assertInvoke(getSingleMethod(c, "t"), "C", "C$$$anonfun$1") } @@ -355,14 +354,14 @@ class OptimizedBytecodeTest extends ClearAfterClass { | final def apply(a: Any): Any = throw new RuntimeException(key) |} """.stripMargin - val List(c, f) = compileClasses(compiler)(code) + val List(c, f) = compileClasses(code) assertInvoke(getSingleMethod(c, "crash"), "C", "map") } @Test def optimiseEnablesNewOpt(): Unit = { val code = """class C { def t = (1 to 10) foreach println }""" - val List(c) = readAsmClasses(compile(newCompiler(extraArgs = "-optimise -deprecation"))(code, allowMessage = _.msg.contains("is deprecated"))) + val List(c) = readAsmClasses(newCompiler(extraArgs = "-optimise -deprecation").compile(code, allowMessage = _.msg.contains("is deprecated"))) assertInvoke(getSingleMethod(c, "t"), "C", "C$$$anonfun$1") // range-foreach inlined from classpath } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index ebeb57714993..0144fa7366a3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -1,30 +1,29 @@ package scala.tools.nsc package backend.jvm +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes -import org.junit.Assert._ -import scala.tools.testing.BytecodeTesting._ -import scala.tools.testing.ClearAfterClass +import scala.tools.asm.Opcodes +import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) -class BTypesTest extends ClearAfterClass { - val compiler = cached("compiler", () => { - val comp = newCompiler(extraArgs = "-Yopt:l:none") - new comp.Run() // initializes some of the compiler - comp.exitingDelambdafy(comp.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler - comp.exitingDelambdafy(comp.genBCode.bTypes.initializeCoreBTypes()) - comp - }) - import compiler.genBCode.bTypes._ +class BTypesTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:none" + import compiler.global + locally { + new global.Run() // initializes some of the compiler + global.exitingDelambdafy(global.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler + global.exitingDelambdafy(global.genBCode.bTypes.initializeCoreBTypes()) + } + import global.genBCode.bTypes._ - def classBTFS(sym: compiler.Symbol) = compiler.exitingDelambdafy(classBTypeFromSymbol(sym)) + def classBTFS(sym: global.Symbol) = global.exitingDelambdafy(classBTypeFromSymbol(sym)) - def jlo = compiler.definitions.ObjectClass - def jls = compiler.definitions.StringClass + def jlo = global.definitions.ObjectClass + def jls = global.definitions.StringClass def o = classBTFS(jlo) def s = classBTFS(jls) def oArr = ArrayBType(o) diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala index 0991e5fbcf9a..b538ae0bc603 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala @@ -4,30 +4,29 @@ import org.junit.Assert._ import org.junit.Test import scala.collection.JavaConverters +import scala.collection.JavaConverters._ import scala.tools.asm.Opcodes import scala.tools.asm.tree.ClassNode +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import JavaConverters._ -import scala.tools.testing.ClearAfterClass - -class DefaultMethodTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler()) +class DefaultMethodTest extends BytecodeTesting { + import compiler._ @Test def defaultMethodsViaGenBCode(): Unit = { - import compiler._ + import global._ val code = "package pack { trait T { def foo: Int }}" object makeFooDefaultMethod extends Transformer { val Foo = TermName("foo") /** Transforms a single tree. */ - override def transform(tree: compiler.Tree): compiler.Tree = tree match { + override def transform(tree: global.Tree): global.Tree = tree match { case dd @ DefDef(_, Foo, _, _, _, _) => dd.symbol.setFlag(reflect.internal.Flags.JAVA_DEFAULTMETHOD) copyDefDef(dd)(rhs = Literal(Constant(1)).setType(definitions.IntTpe)) case _ => super.transform(tree) } } - val asmClasses: List[ClassNode] = readAsmClasses(compileTransformed(compiler)(code, Nil, makeFooDefaultMethod.transform(_))) + val asmClasses: List[ClassNode] = readAsmClasses(compiler.compileTransformed(code, Nil, makeFooDefaultMethod.transform(_))) val foo = asmClasses.head.methods.iterator.asScala.toList.last assertTrue("default method should not be abstract", (foo.access & Opcodes.ACC_ABSTRACT) == 0) assertTrue("default method body emitted", foo.instructions.size() > 0) diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index ab57c5a1c552..65b4264ee9e3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -1,21 +1,23 @@ package scala.tools.nsc.backend.jvm +import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Assert._ -import scala.tools.testing.BytecodeTesting._ + import scala.tools.asm.Opcodes._ import scala.tools.partest.ASMConverters._ -import scala.tools.testing.ClearAfterClass +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) -class DirectCompileTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:method")) +class DirectCompileTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:method" + import compiler._ @Test def testCompile(): Unit = { - val List(("C.class", bytes)) = compile(compiler)( + val List(("C.class", bytes)) = compile( """class C { | def f = 1 |} @@ -26,12 +28,12 @@ class DirectCompileTest extends ClearAfterClass { @Test def testCompileClasses(): Unit = { - val List(cClass, cModuleClass) = compileClasses(compiler)("class C; object C") + val List(cClass, cModuleClass) = compileClasses("class C; object C") assertTrue(cClass.name == "C") assertTrue(cModuleClass.name == "C$") - val List(dMirror, dModuleClass) = compileClasses(compiler)("object D") + val List(dMirror, dModuleClass) = compileClasses("object D") assertTrue(dMirror.name == "D") assertTrue(dModuleClass.name == "D$") @@ -39,7 +41,7 @@ class DirectCompileTest extends ClearAfterClass { @Test def testCompileMethods(): Unit = { - val List(f, g) = compileMethods(compiler)( + val List(f, g) = compileMethods( """def f = 10 |def g = f """.stripMargin) @@ -56,7 +58,7 @@ class DirectCompileTest extends ClearAfterClass { @Test def testDropNonOpAliveLabels(): Unit = { // makes sure that dropNoOp doesn't drop labels that are being used - val List(f) = compileMethods(compiler)("""def f(x: Int) = if (x == 0) "a" else "b"""") + val List(f) = compileMethods("""def f(x: Int) = if (x == 0) "a" else "b"""") assertSameCode(instructionsFromMethod(f).dropLinesFrames, List( Label(0), VarOp(ILOAD, 1), @@ -86,6 +88,6 @@ class DirectCompileTest extends ClearAfterClass { @Test def compileErroneous(): Unit = { - compileClasses(compiler)("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch") + compileClasses("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch") } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala index 66054f246f1a..22ced47a02f3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala @@ -1,21 +1,19 @@ package scala.tools.nsc.backend.jvm import org.junit.Assert._ -import org.junit.{Assert, Test} +import org.junit.Test -import scala.tools.asm.{Handle, Opcodes} -import scala.tools.asm.tree.InvokeDynamicInsnNode -import scala.tools.nsc.backend.jvm.AsmUtils._ -import scala.tools.testing.BytecodeTesting._ -import scala.tools.testing.ClearAfterClass import scala.collection.JavaConverters._ +import scala.tools.asm.Handle +import scala.tools.asm.tree.InvokeDynamicInsnNode +import scala.tools.testing.BytecodeTesting -class IndyLambdaTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler()) +class IndyLambdaTest extends BytecodeTesting { + import compiler._ @Test def boxingBridgeMethodUsedSelectively(): Unit = { def implMethodDescriptorFor(code: String): String = { - val method = compileMethods(compiler)(s"""def f = $code """).find(_.name == "f").get + val method = compileMethods(s"""def f = $code """).find(_.name == "f").get val x = method.instructions.iterator.asScala.toList x.flatMap { case insn : InvokeDynamicInsnNode => insn.bsmArgs.collect { case h : Handle => h.getDesc } @@ -48,17 +46,17 @@ class IndyLambdaTest extends ClearAfterClass { assertEquals("(I)I", implMethodDescriptorFor("(x: Int) => x")) // non-builtin sams are like specialized functions - compileClasses(compiler)("class VC(private val i: Int) extends AnyVal; trait FunVC { def apply(a: VC): VC }") + compileClasses("class VC(private val i: Int) extends AnyVal; trait FunVC { def apply(a: VC): VC }") assertEquals("(I)I", implMethodDescriptorFor("((x: VC) => x): FunVC")) - compileClasses(compiler)("trait Fun1[T, U] { def apply(a: T): U }") + compileClasses("trait Fun1[T, U] { def apply(a: T): U }") assertEquals(s"($obj)$str", implMethodDescriptorFor("(x => x.toString): Fun1[Int, String]")) assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x => println(x)): Fun1[Int, Unit]")) assertEquals(s"($obj)$str", implMethodDescriptorFor("((x: VC) => \"\") : Fun1[VC, String]")) assertEquals(s"($str)$obj", implMethodDescriptorFor("((x: String) => new VC(0)) : Fun1[String, VC]")) - compileClasses(compiler)("trait Coll[A, Repr] extends Any") - compileClasses(compiler)("final class ofInt(val repr: Array[Int]) extends AnyVal with Coll[Int, Array[Int]]") + compileClasses("trait Coll[A, Repr] extends Any") + compileClasses("final class ofInt(val repr: Array[Int]) extends AnyVal with Coll[Int, Array[Int]]") assertEquals(s"([I)$obj", implMethodDescriptorFor("((xs: Array[Int]) => new ofInt(xs)): Array[Int] => Coll[Int, Array[Int]]")) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala index 598899c7051d..d7c1f191d0f9 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala @@ -2,26 +2,20 @@ package scala.tools.nsc package backend.jvm import org.junit.Assert.assertEquals +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test import scala.tools.asm.Opcodes._ -import scala.tools.asm.tree._ import scala.tools.nsc.reporters.StoreReporter +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ - -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class IndySammyTest extends ClearAfterClass { - - val compiler = cached("compiler", () => newCompiler()) - def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = - compileClasses(compiler)(scalaCode, javaCode, allowMessage) +class IndySammyTest extends BytecodeTesting { + import compiler._ def funClassName(from: String, to: String) = s"Fun$from$to" def classPrologue(from: String, to: String) = @@ -45,8 +39,8 @@ class IndySammyTest extends ClearAfterClass { def test(from: String, to: String, arg: String, body: String => String = x => x) (expectedSig: String, lamBody: List[Instruction], appArgs: List[Instruction], ret: Instruction) (allowMessage: StoreReporter#Info => Boolean = _ => false) = { - val cls = compile(s"${classPrologue(from, to)}") - val methodNodes = compileMethods(compiler)(lamDef(from, to, body) +";"+ appDef(arg), allowMessage) + val cls = compileClasses(s"${classPrologue(from, to)}") + val methodNodes = compileMethods(lamDef(from, to, body) +";"+ appDef(arg), allowMessage) val applySig = cls.head.methods.get(0).desc val anonfun = methodNodes.find(_.name contains "$anonfun$").map(convertMethod).get @@ -64,7 +58,7 @@ class IndySammyTest extends ClearAfterClass { } // def testSpecial(lam: String, lamTp: String, arg: String)(allowMessage: StoreReporter#Info => Boolean = _ => false) = { -// val cls = compile("trait Special[@specialized A] { def apply(a: A): A}" ) +// val cls = compileClasses("trait Special[@specialized A] { def apply(a: A): A}" ) // val methodNodes = compileMethods(compiler)(s"def lam : $lamTp = $lam" +";"+ appDef(arg), allowMessage) // // val anonfun = methodNodes.filter(_.name contains "$anonfun$").map(convertMethod) @@ -146,7 +140,7 @@ class IndySammyTest extends ClearAfterClass { // Tests ThisReferringMethodsTraverser @Test def testStaticIfNoThisReference: Unit = { - val methodNodes = compileMethods(compiler)("def foo = () => () => () => 42") + val methodNodes = compileMethods("def foo = () => () => () => 42") methodNodes.forall(m => !m.name.contains("anonfun") || (m.access & ACC_STATIC) == ACC_STATIC) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala index f300090268b3..f231df8af060 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala @@ -1,22 +1,18 @@ package scala.tools.nsc package backend.jvm +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - -import scala.tools.testing.AssertUtil._ +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class StringConcatTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler()) +class StringConcatTest extends BytecodeTesting { + import compiler._ @Test def appendOverloadNoBoxing(): Unit = { @@ -54,7 +50,7 @@ class StringConcatTest extends ClearAfterClass { | chrs: Array[Char]) = this + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) def invokeNameDesc(m: String): List[String] = getSingleMethod(c, m).instructions collect { case Invoke(_, _, name, desc, _) => name + desc diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index d37adb226581..358a46102605 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -2,29 +2,26 @@ package scala.tools.nsc package backend.jvm package analysis +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ -import scala.tools.testing.BytecodeTesting._ -import scala.tools.asm.tree.{AbstractInsnNode, MethodNode} +import scala.collection.JavaConverters._ +import scala.tools.asm.tree.MethodNode +import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.nsc.backend.jvm.BTypes._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import scala.tools.testing.ClearAfterClass import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ -import AsmUtils._ - -import scala.collection.JavaConverters._ +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) -class NullnessAnalyzerTest extends ClearAfterClass { - val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) - import noOptCompiler.genBCode.bTypes.backendUtils._ +class NullnessAnalyzerTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:none" + import compiler._ + import global.genBCode.bTypes.backendUtils._ - def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(noOptCompiler.genBCode.bTypes)) + def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(global.genBCode.bTypes)) def testNullness(analyzer: AsmAnalyzer[NullnessValue], method: MethodNode, query: String, index: Int, nullness: NullnessValue): Unit = { for (i <- findInstr(method, query)) { @@ -53,7 +50,7 @@ class NullnessAnalyzerTest extends ClearAfterClass { @Test def showNullnessFramesTest(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = this.toString") + val List(m) = compileMethods("def f = this.toString") // NOTE: the frame for an instruction represents the state *before* executing that instr. // So in the frame for `ALOAD 0`, the stack is still empty. @@ -71,14 +68,14 @@ class NullnessAnalyzerTest extends ClearAfterClass { @Test def thisNonNull(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = this.toString") + val List(m) = compileMethods("def f = this.toString") val a = newNullnessAnalyzer(m) testNullness(a, m, "ALOAD 0", 0, NotNullValue) } @Test def instanceMethodCall(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f(a: String) = a.trim") + val List(m) = compileMethods("def f(a: String) = a.trim") val a = newNullnessAnalyzer(m) testNullness(a, m, "INVOKEVIRTUAL java/lang/String.trim", 1, UnknownValue1) testNullness(a, m, "ARETURN", 1, NotNullValue) @@ -86,7 +83,7 @@ class NullnessAnalyzerTest extends ClearAfterClass { @Test def constructorCall(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Object; a.toString }") + val List(m) = compileMethods("def f = { val a = new Object; a.toString }") val a = newNullnessAnalyzer(m) // for reference, the output of showAllNullnessFrames(a, m) - note that the frame represents the state *before* executing the instr. @@ -111,7 +108,7 @@ class NullnessAnalyzerTest extends ClearAfterClass { @Test def explicitNull(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = { var a: Object = null; a }") + val List(m) = compileMethods("def f = { var a: Object = null; a }") val a = newNullnessAnalyzer(m) for ((insn, index, nullness) <- List( ("+ACONST_NULL", 2, NullValue), @@ -122,14 +119,14 @@ class NullnessAnalyzerTest extends ClearAfterClass { @Test def stringLiteralsNotNull(): Unit = { - val List(m) = compileMethods(noOptCompiler)("""def f = { val a = "hi"; a.trim }""") + val List(m) = compileMethods("""def f = { val a = "hi"; a.trim }""") val a = newNullnessAnalyzer(m) testNullness(a, m, "+ASTORE 1", 1, NotNullValue) } @Test def newArraynotNull() { - val List(m) = compileMethods(noOptCompiler)("def f = { val a = new Array[Int](2); a(0) }") + val List(m) = compileMethods("def f = { val a = new Array[Int](2); a(0) }") val a = newNullnessAnalyzer(m) testNullness(a, m, "+NEWARRAY T_INT", 2, NotNullValue) // new array on stack testNullness(a, m, "+ASTORE 1", 1, NotNullValue) // local var (a) @@ -147,7 +144,7 @@ class NullnessAnalyzerTest extends ClearAfterClass { | a.toString |} """.stripMargin - val List(m) = compileMethods(noOptCompiler)(code) + val List(m) = compileMethods(code) val a = newNullnessAnalyzer(m) val toSt = "+INVOKEVIRTUAL java/lang/Object.toString" testNullness(a, m, toSt, 3, UnknownValue1) @@ -173,7 +170,7 @@ class NullnessAnalyzerTest extends ClearAfterClass { | // d is null here, assinged in both branches. |} """.stripMargin - val List(m) = compileMethods(noOptCompiler)(code) + val List(m) = compileMethods(code) val a = newNullnessAnalyzer(m) val trim = "INVOKEVIRTUAL java/lang/String.trim" @@ -209,7 +206,7 @@ class NullnessAnalyzerTest extends ClearAfterClass { | a.asInstanceOf[String].trim // the stack value (LOAD of local a) is still not-null after the CHECKCAST |} """.stripMargin - val List(m) = compileMethods(noOptCompiler)(code) + val List(m) = compileMethods(code) val a = newNullnessAnalyzer(m) val instof = "+INSTANCEOF" diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala index 7f6aaca67cf2..be10370312b5 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala @@ -2,22 +2,23 @@ package scala.tools.nsc package backend.jvm package analysis +import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Assert._ import scala.tools.asm.Opcodes import scala.tools.asm.tree.AbstractInsnNode +import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.partest.ASMConverters._ -import scala.tools.testing.ClearAfterClass +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import AsmUtils._ @RunWith(classOf[JUnit4]) -class ProdConsAnalyzerTest extends ClearAfterClass { - val noOptCompiler =cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) - import noOptCompiler.genBCode.bTypes.backendUtils._ +class ProdConsAnalyzerTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:none" + import compiler._ + import global.genBCode.bTypes.backendUtils._ def prodToString(producer: AbstractInsnNode) = producer match { case p: InitialProducer => p.toString @@ -48,7 +49,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def parameters(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = this.toString") + val List(m) = compileMethods("def f = this.toString") val a = new ProdConsAnalyzer(m, "C") val call = findInstr(m, "INVOKEVIRTUAL").head @@ -92,7 +93,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def branching(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f(x: Int) = { var a = x; if (a == 0) a = 12; a }") + val List(m) = compileMethods("def f(x: Int) = { var a = x; if (a == 0) a = 12; a }") val a = new ProdConsAnalyzer(m, "C") val List(ret) = findInstr(m, "IRETURN") @@ -106,7 +107,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def checkCast(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f(o: Object) = o.asInstanceOf[String]") + val List(m) = compileMethods("def f(o: Object) = o.asInstanceOf[String]") val a = new ProdConsAnalyzer(m, "C") assert(findInstr(m, "CHECKCAST java/lang/String").length == 1) @@ -116,7 +117,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def instanceOf(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f(o: Object) = o.isInstanceOf[String]") + val List(m) = compileMethods("def f(o: Object) = o.isInstanceOf[String]") val a = new ProdConsAnalyzer(m, "C") assert(findInstr(m, "INSTANCEOF java/lang/String").length == 1) @@ -126,7 +127,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def unInitLocal(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f(b: Boolean) = { if (b) { var a = 0; println(a) }; 1 }") + val List(m) = compileMethods("def f(b: Boolean) = { if (b) { var a = 0; println(a) }; 1 }") val a = new ProdConsAnalyzer(m, "C") val List(store) = findInstr(m, "ISTORE") @@ -140,7 +141,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def dupCopying(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = new Object") + val List(m) = compileMethods("def f = new Object") val a = new ProdConsAnalyzer(m, "C") val List(newO) = findInstr(m, "NEW") @@ -222,7 +223,7 @@ class ProdConsAnalyzerTest extends ClearAfterClass { @Test def copyingInsns(): Unit = { - val List(m) = compileMethods(noOptCompiler)("def f = 0l.asInstanceOf[Int]") + val List(m) = compileMethods("def f = 0l.asInstanceOf[Int]") val a = new ProdConsAnalyzer(m, "C") val List(cnst) = findInstr(m, "LCONST_0") diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala index 7f07ce51d3d3..a5fb1e7d17d6 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala @@ -2,28 +2,21 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import org.junit.Assert._ -import scala.tools.asm.tree._ import scala.tools.asm.tree.analysis._ -import scala.tools.nsc.backend.jvm.analysis.{AliasingFrame, AliasingAnalyzer} - +import scala.tools.nsc.backend.jvm.analysis.{AliasingAnalyzer, AliasingFrame} +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ -import BackendReporting._ -import BytecodeUtils._ - -import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class AnalyzerTest extends ClearAfterClass { - val noOptCompiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) +class AnalyzerTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:none" + import compiler._ @Test def aliasingOfPrimitives(): Unit = { @@ -39,7 +32,7 @@ class AnalyzerTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(noOptCompiler)(code) + val List(c) = compileClasses(code) val a = new AliasingAnalyzer(new BasicInterpreter) val f = findAsmMethod(c, "f") a.analyze("C", f) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala index 30d5db06ddea..1169871ecda0 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -2,37 +2,29 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.tools.asm.Opcodes._ import scala.tools.nsc.backend.jvm.BTypes.InternalName -import scala.tools.testing.AssertUtil._ - -import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ - -import BackendReporting._ - -import scala.collection.JavaConverters._ +import scala.tools.nsc.backend.jvm.BackendReporting._ +import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) -class BTypesFromClassfileTest { +class BTypesFromClassfileTest extends BytecodeTesting { // inliner enabled -> inlineInfos are collected (and compared) in ClassBTypes - val compiler = newCompiler(extraArgs = "-Yopt:inline-global") + override def compilerArgs = "-Yopt:inline-global" - import compiler._ + import compiler.global._ import definitions._ import genBCode.bTypes import bTypes._ - def duringBackend[T](f: => T) = compiler.exitingDelambdafy(f) + def duringBackend[T](f: => T) = global.exitingDelambdafy(f) - val run = new compiler.Run() // initializes some of the compiler - duringBackend(compiler.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler + val run = new global.Run() // initializes some of the compiler + duringBackend(global.scalaPrimitives.init()) // needed: it's only done when running the backend, and we don't actually run the compiler duringBackend(bTypes.initializeCoreBTypes()) def clearCache() = bTypes.classBTypeFromInternalName.clear() diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index e29d41f0614a..900608837f42 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -2,46 +2,38 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test + +import scala.collection.JavaConverters._ import scala.collection.generic.Clearable import scala.collection.immutable.IntMap -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis._ +import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.StoreReporter -import scala.tools.testing.AssertUtil._ - +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ -import BackendReporting._ - -import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class CallGraphTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:inline-global -Yopt-warnings") - ) - import compiler.genBCode.bTypes +class CallGraphTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:inline-global -Yopt-warnings" + import compiler._ + import global.genBCode.bTypes val notPerRun: List[Clearable] = List( bTypes.classBTypeFromInternalName, bTypes.byteCodeRepository.compilingClasses, bTypes.byteCodeRepository.parsedClasses, bTypes.callGraph.callsites) - notPerRun foreach compiler.perRunCaches.unrecordCache + notPerRun foreach global.perRunCaches.unrecordCache - import compiler.genBCode.bTypes._ + import global.genBCode.bTypes._ import callGraph._ def compile(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { notPerRun.foreach(_.clear()) - compileClasses(compiler)(code, allowMessage = allowMessage).map(c => byteCodeRepository.classNode(c.name).get) + compileClasses(code, allowMessage = allowMessage).map(c => byteCodeRepository.classNode(c.name).get) } def callsInMethod(methodNode: MethodNode): List[MethodInsnNode] = methodNode.instructions.iterator.asScala.collect({ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala index d1432318829d..ddd95ddc0260 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala @@ -2,34 +2,19 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.collection.generic.Clearable -import scala.collection.mutable.ListBuffer -import scala.reflect.internal.util.BatchSourceFile -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - -import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis._ -import scala.tools.nsc.io._ -import scala.tools.nsc.reporters.StoreReporter -import scala.tools.testing.AssertUtil._ +import scala.tools.asm.Opcodes._ +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ - -import BackendReporting._ - -import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class ClosureOptimizerTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:classpath -Yopt-warnings:_")) +class ClosureOptimizerTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:classpath -Yopt-warnings:_" + import compiler._ @Test def nothingTypedClosureBody(): Unit = { @@ -41,7 +26,7 @@ class ClosureOptimizerTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) val t = findAsmMethod(c, "t") val List(bodyCall) = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Nothing$") assert(bodyCall.getNext.getOpcode == ATHROW) @@ -57,7 +42,7 @@ class ClosureOptimizerTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) val t = findAsmMethod(c, "t") val List(bodyCall) = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Null$") assert(bodyCall.getNext.getOpcode == POP) @@ -74,7 +59,7 @@ class ClosureOptimizerTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List(VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "scala/collection/immutable/List", "head", "()Ljava/lang/Object;", false), TypeOp(CHECKCAST, "java/lang/String"), Invoke(INVOKESTATIC, "C", "C$$$anonfun$1", "(Ljava/lang/String;)Ljava/lang/String;", false), @@ -95,7 +80,7 @@ class ClosureOptimizerTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t"), List(NEW, DUP, LDC, "", ATHROW)) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala index 8ee2b2aa6bdf..50e3af6ee5ef 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala @@ -2,23 +2,21 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.tools.partest.ASMConverters._ import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ +import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class CompactLocalVariablesTest { - +class CompactLocalVariablesTest extends ClearAfterClass { // recurse-unreachable-jumps is required for eliminating catch blocks, in the first dce round they // are still live.only after eliminating the empty handler the catch blocks become unreachable. - val methodOptCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code,compact-locals") - val noCompactVarsCompiler = newCompiler(extraArgs = "-Yopt:unreachable-code") + val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code,compact-locals")) + val noCompactVarsCompiler = cached("noCompactVarsCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) @Test def compactUnused(): Unit = { @@ -58,8 +56,8 @@ class CompactLocalVariablesTest { |} |""".stripMargin - val List(noCompact) = compileMethods(noCompactVarsCompiler)(code) - val List(withCompact) = compileMethods(methodOptCompiler)(code) + val List(noCompact) = noCompactVarsCompiler.compileMethods(code) + val List(withCompact) = methodOptCompiler.compileMethods(code) // code is the same, except for local var indices assertTrue(noCompact.instructions.size == withCompact.instructions.size) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala index d9479fde1df3..9fb4aa1658d4 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala @@ -2,22 +2,23 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.tools.asm.Opcodes._ +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class EmptyExceptionHandlersTest extends ClearAfterClass { +class EmptyExceptionHandlersTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:unreachable-code" + def dceCompiler = compiler + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) - val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) val exceptionDescriptor = "java/lang/Exception" @@ -59,8 +60,8 @@ class EmptyExceptionHandlersTest extends ClearAfterClass { def eliminateUnreachableHandler(): Unit = { val code = "def f: Unit = try { } catch { case _: Exception => println(0) }; println(1)" - assertTrue(singleMethod(noOptCompiler)(code).handlers.length == 1) - val optMethod = singleMethod(dceCompiler)(code) + assertTrue(noOptCompiler.singleMethod(code).handlers.length == 1) + val optMethod = dceCompiler.singleMethod(code) assertTrue(optMethod.handlers.isEmpty) val code2 = @@ -72,7 +73,7 @@ class EmptyExceptionHandlersTest extends ClearAfterClass { | println(2) |}""".stripMargin - assertTrue(singleMethod(dceCompiler)(code2).handlers.isEmpty) + assertTrue(dceCompiler.singleMethod(code2).handlers.isEmpty) } @Test @@ -84,6 +85,6 @@ class EmptyExceptionHandlersTest extends ClearAfterClass { | catch { case _: Exception => 2 } |}""".stripMargin - assertTrue(singleMethod(dceCompiler)(code).handlers.length == 1) + assertTrue(dceCompiler.singleMethod(code).handlers.length == 1) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala index a833192fb17d..d57d44f2a3c2 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala @@ -2,16 +2,16 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test + import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.tools.partest.ASMConverters +import scala.tools.partest.ASMConverters._ import scala.tools.testing.AssertUtil._ - import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ @RunWith(classOf[JUnit4]) class EmptyLabelsAndLineNumbersTest { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index dc3eede556bc..e45d7139a3be 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -2,36 +2,31 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.collection.generic.Clearable -import org.junit.Assert._ - -import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ -import scala.tools.testing.ClearAfterClass - -import BackendReporting._ import scala.collection.JavaConverters._ +import scala.collection.generic.Clearable +import scala.tools.nsc.backend.jvm.BackendReporting._ +import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) -class InlineInfoTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:classpath")) +class InlineInfoTest extends BytecodeTesting { + import compiler.global + import global.genBCode.bTypes + + override def compilerArgs = "-Yopt:l:classpath" - import compiler.genBCode.bTypes def notPerRun: List[Clearable] = List( bTypes.classBTypeFromInternalName, bTypes.byteCodeRepository.compilingClasses, bTypes.byteCodeRepository.parsedClasses) - notPerRun foreach compiler.perRunCaches.unrecordCache + notPerRun foreach global.perRunCaches.unrecordCache def compile(code: String) = { notPerRun.foreach(_.clear()) - compileClasses(compiler)(code) + compiler.compileClasses(code) } @Test @@ -55,11 +50,11 @@ class InlineInfoTest extends ClearAfterClass { """.stripMargin val classes = compile(code) - val fromSyms = classes.map(c => compiler.genBCode.bTypes.classBTypeFromInternalName(c.name).info.get.inlineInfo) + val fromSyms = classes.map(c => global.genBCode.bTypes.classBTypeFromInternalName(c.name).info.get.inlineInfo) val fromAttrs = classes.map(c => { assert(c.attrs.asScala.exists(_.isInstanceOf[InlineInfoAttribute]), c.attrs) - compiler.genBCode.bTypes.inlineInfoFromClassfile(c) + global.genBCode.bTypes.inlineInfoFromClassfile(c) }) assert(fromSyms == fromAttrs) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 428841e0e0f1..876c47a84ef1 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -2,41 +2,21 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.collection.generic.Clearable -import scala.collection.mutable.ListBuffer -import scala.reflect.internal.util.BatchSourceFile -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - -import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis._ -import scala.tools.nsc.io._ -import scala.tools.nsc.reporters.StoreReporter -import scala.tools.testing.AssertUtil._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ -import BackendReporting._ +@RunWith(classOf[JUnit4]) +class InlineWarningTest extends BytecodeTesting { + def optCp = "-Yopt:l:classpath" + override def compilerArgs = s"$optCp -Yopt-warnings" -import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass + import compiler._ -@RunWith(classOf[JUnit4]) -class InlineWarningTest extends ClearAfterClass { - val argsNoWarn = "-Yopt:l:classpath" - val args = argsNoWarn + " -Yopt-warnings" - val compiler = cached("compiler", () => newCompiler(extraArgs = args)) - val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:_")) - - def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false, compiler: Global = compiler): List[ClassNode] = { - compileClasses(compiler)(scalaCode, javaCode, allowMessage) - } + val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = s"$optCp -Yopt-warnings:_")) @Test def nonFinal(): Unit = { @@ -107,10 +87,10 @@ class InlineWarningTest extends ClearAfterClass { assert(c == 1, c) // no warnings here - compileClasses(newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:none"))(scalaCode, List((javaCode, "A.java"))) + newCompiler(extraArgs = s"$optCp -Yopt-warnings:none").compile(scalaCode, List((javaCode, "A.java"))) c = 0 - compileClasses(newCompiler(extraArgs = argsNoWarn + " -Yopt-warnings:no-inline-mixed"))(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) + newCompiler(extraArgs = s"$optCp -Yopt-warnings:no-inline-mixed").compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) assert(c == 2, c) } @@ -164,7 +144,7 @@ class InlineWarningTest extends ClearAfterClass { |that would cause an IllegalAccessError when inlined into class N""".stripMargin var c = 0 - compile(code, compiler = compilerWarnAll, allowMessage = i => { c += 1; i.msg contains warn }) + compilerWarnAll.compile(code, allowMessage = i => { c += 1; i.msg contains warn }) assert(c == 1, c) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index e0b1d758f7f9..c2ada8afec1f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -2,27 +2,22 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - -import scala.tools.asm.tree._ -import scala.tools.testing.AssertUtil._ - -import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass +import scala.tools.asm.Opcodes._ +import scala.tools.asm.tree._ +import scala.tools.nsc.backend.jvm.AsmUtils._ +import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) -class InlinerIllegalAccessTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) - import compiler.genBCode.bTypes._ +class InlinerIllegalAccessTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:none" + + import compiler._ + import global.genBCode.bTypes._ def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile) def assertEmpty(ins: Option[AbstractInsnNode]) = for (i <- ins) @@ -44,7 +39,7 @@ class InlinerIllegalAccessTest extends ClearAfterClass { |} """.stripMargin - val allClasses = compileClasses(compiler)(code) + val allClasses = compileClasses(code) val List(cClass, dClass, eClass) = allClasses assert(cClass.name == "a/C" && dClass.name == "a/D" && eClass.name == "b/E", s"${cClass.name}, ${dClass.name}, ${eClass.name}") addToRepo(allClasses) // they are not on the compiler's classpath, so we add them manually to the code repo @@ -120,7 +115,7 @@ class InlinerIllegalAccessTest extends ClearAfterClass { |} """.stripMargin - val allClasses = compileClasses(compiler)(code) + val allClasses = compileClasses(code) val List(cCl, dCl, eCl, fCl, gCl, hCl, iCl) = allClasses addToRepo(allClasses) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala index 748eff88eafa..8a44f12045c1 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala @@ -2,18 +2,11 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ - -import scala.collection.JavaConverters._ object InlinerSeparateCompilationTest { val args = "-Yopt:l:classpath" diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 52ee118a9461..4db7695fddd3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -2,48 +2,44 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test + +import scala.collection.JavaConverters._ import scala.collection.generic.Clearable import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - import scala.tools.asm.tree._ +import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.StoreReporter - +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ - -import BackendReporting._ - -import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class InlinerTest extends ClearAfterClass { - val args = "-Yopt:l:classpath -Yopt-warnings" - val compiler = cached("compiler", () => newCompiler(extraArgs = args)) +class InlinerTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:classpath -Yopt-warnings" + val inlineOnlyCompiler = cached("inlineOnlyCompiler", () => newCompiler(extraArgs = "-Yopt:inline-project")) - import compiler.genBCode.bTypes + + import compiler._ + import global.genBCode.bTypes // allows inspecting the caches after a compilation run def notPerRun: List[Clearable] = List( bTypes.classBTypeFromInternalName, bTypes.byteCodeRepository.compilingClasses, bTypes.byteCodeRepository.parsedClasses, bTypes.callGraph.callsites) - notPerRun foreach compiler.perRunCaches.unrecordCache + notPerRun foreach global.perRunCaches.unrecordCache - import compiler.genBCode.bTypes._ - import compiler.genBCode.bTypes.backendUtils._ + import global.genBCode.bTypes.{byteCodeRepository, callGraph, inliner, inlinerHeuristics} import inlinerHeuristics._ def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { notPerRun.foreach(_.clear()) - compileClasses(compiler)(scalaCode, javaCode, allowMessage) + compileClasses(scalaCode, javaCode, allowMessage) // Use the class nodes stored in the byteCodeRepository. The ones returned by compileClasses are not the same, // these are created new from the classfile byte array. They are completely separate instances which cannot // be used to look up methods / callsites in the callGraph hash maps for example. @@ -138,7 +134,7 @@ class InlinerTest extends ClearAfterClass { assertSameCode(convertMethod(g), gBeforeLocalOpt) - compiler.genBCode.bTypes.localOpt.methodOptimizations(g, "C") + global.genBCode.bTypes.localOpt.methodOptimizations(g, "C") assertSameCode(convertMethod(g), invokeQQQ :+ Op(ATHROW)) } @@ -380,7 +376,7 @@ class InlinerTest extends ClearAfterClass { """.stripMargin // use a compiler without local optimizations (cleanups) - val List(c) = compileClasses(inlineOnlyCompiler)(code) + val List(c) = inlineOnlyCompiler.compileClasses(code) val ms @ List(f1, f2, g1, g2) = c.methods.asScala.filter(_.name.length == 2).toList // stack height at callsite of f1 is 1, so max of g1 after inlining is max of f1 + 1 @@ -829,7 +825,7 @@ class InlinerTest extends ClearAfterClass { var c = 0 - compileClasses(newCompiler(extraArgs = args + " -Yopt-warnings:_"))( + newCompiler(extraArgs = compilerArgs + " -Yopt-warnings:_").compileClasses( scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn}) @@ -891,7 +887,7 @@ class InlinerTest extends ClearAfterClass { | def t = System.arraycopy(null, 0, null, 0, 0) |} """.stripMargin - val List(c) = compileClasses(newCompiler(extraArgs = args + " -Yopt-inline-heuristics:everything"))(code) + val List(c) = newCompiler(extraArgs = compilerArgs + " -Yopt-inline-heuristics:everything").compileClasses(code) assertInvoke(getSingleMethod(c, "t"), "java/lang/System", "arraycopy") } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index 1ceaaf7f69de..3867f101451d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -2,25 +2,23 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.collection.JavaConverters._ +import scala.tools.asm.Opcodes._ import scala.tools.asm.tree.ClassNode import scala.tools.nsc.backend.jvm.AsmUtils._ -import scala.tools.testing.AssertUtil._ - +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import scala.tools.testing.ClearAfterClass -import scala.collection.JavaConverters._ @RunWith(classOf[JUnit4]) -class MethodLevelOptsTest extends ClearAfterClass { - val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method")) +class MethodLevelOptsTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:method" + import compiler._ def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) @@ -30,14 +28,14 @@ class MethodLevelOptsTest extends ClearAfterClass { def eliminateEmptyTry(): Unit = { val code = "def f = { try {} catch { case _: Throwable => 0; () }; 1 }" val warn = "a pure expression does nothing in statement position" - assertSameCode(singleMethodInstructions(methodOptCompiler)(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN))) + assertSameCode(singleMethodInstructions(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN))) } @Test def eliminateLoadBoxedUnit(): Unit = { // the compiler inserts a boxed into the try block. it's therefore non-empty (and live) and not eliminated. val code = "def f = { try {} catch { case _: Throwable => 0 }; 1 }" - val m = singleMethod(methodOptCompiler)(code) + val m = singleMethod(code) assertTrue(m.handlers.length == 0) assertSameCode(m, List(Op(ICONST_1), Op(IRETURN))) } @@ -46,7 +44,7 @@ class MethodLevelOptsTest extends ClearAfterClass { def inlineThrowInCatchNotTry(): Unit = { // the try block does not contain the `ATHROW` instruction, but in the catch block, `ATHROW` is inlined val code = "def f(e: Exception) = throw { try e catch { case _: Throwable => e } }" - val m = singleMethod(methodOptCompiler)(code) + val m = singleMethod(code) assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5) assertSameCode(m.instructions, wrapInDefault(VarOp(ALOAD, 1), Label(3), Op(ATHROW), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), VarOp(ALOAD, 1), Op(ATHROW)) @@ -57,7 +55,7 @@ class MethodLevelOptsTest extends ClearAfterClass { def inlineReturnInCatchNotTry(): Unit = { val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }" // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState) - val m = singleMethod(methodOptCompiler)(code) + val m = singleMethod(code) assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5) assertSameCode(m.instructions, wrapInDefault(Op(ICONST_1), Label(3), Op(IRETURN), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), Op(ICONST_2), Op(IRETURN))) @@ -79,7 +77,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | println(x) | } """.stripMargin - val m = singleMethod(methodOptCompiler)(code) + val m = singleMethod(code) assertTrue(m.handlers.isEmpty) assertSameCode(m, List(Op(ICONST_3), Op(IRETURN))) } @@ -99,7 +97,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List( Op(ACONST_NULL), Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false), Op(ARETURN))) } @@ -116,7 +114,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode( getSingleMethod(c, "t"), List(Ldc(LDC, "c"), Op(ARETURN))) } @@ -136,7 +134,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List( Ldc(LDC, "el"), VarOp(ASTORE, 1), @@ -160,7 +158,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List( IntOp(BIPUSH, 23), IntOp(NEWARRAY, 5), Op(POP), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN))) } @@ -175,7 +173,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List( TypeOp(NEW, "java/lang/Integer"), Ldc(LDC, "nono"), Invoke(INVOKESPECIAL, "java/lang/Integer", "", "(Ljava/lang/String;)V", false), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN))) @@ -201,7 +199,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List(Op(ICONST_0), Op(IRETURN))) } @@ -217,7 +215,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode(getSingleMethod(c, "t"), List( IntOp(BIPUSH, 30), VarOp(ISTORE, 3), // no constant propagation, so we keep the store (and load below) of a const VarOp(ILOAD, 1), @@ -238,7 +236,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) val t = getSingleMethod(c, "t") assert(!t.instructions.exists(_.opcode == INVOKEDYNAMIC), t) } @@ -319,7 +317,7 @@ class MethodLevelOptsTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertNoInvoke(getSingleMethod(c, "t1")) assertNoInvoke(getSingleMethod(c, "t2")) @@ -395,7 +393,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t1"), List(ICONST_0, IRETURN)) assertNoInvoke(getSingleMethod(c, "t2")) assertSameSummary(getSingleMethod(c, "t3"), List(LDC, LDC, LADD, LRETURN)) @@ -459,7 +457,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertNoInvoke(getSingleMethod(c, "t1")) assertSameSummary(getSingleMethod(c, "t2"), List(ICONST_1, ICONST_3, IADD, IRETURN)) assertSameSummary(getSingleMethod(c, "t3"), List(ICONST_3, ICONST_4, IADD, IRETURN)) @@ -524,7 +522,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t1"), List(NEW, DUP, "", ARETURN)) assertSameCode(getSingleMethod(c, "t2"), List(Op(LCONST_0), Op(LRETURN))) assertSameCode(getSingleMethod(c, "t3"), List(Op(ICONST_1), Op(IRETURN))) @@ -544,7 +542,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameCode( getSingleMethod(c, "t"), List( VarOp(ALOAD, 1), Jump(IFNULL, Label(6)), Op(ICONST_1), Op(IRETURN), Label(6), Op(ICONST_0), Op(IRETURN))) @@ -615,7 +613,7 @@ class MethodLevelOptsTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) def stores(m: String) = getSingleMethod(c, m).instructions.filter(_.opcode == ASTORE) assertEquals(locals(c, "t1"), List(("this",0), ("kept1",1), ("result",2))) @@ -683,7 +681,7 @@ class MethodLevelOptsTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertEquals(locals(c, "t1"), List(("this", 0), ("x", 1))) assertEquals(locals(c, "t2"), List(("this", 0), ("x", 1))) @@ -711,7 +709,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) val t = getSingleMethod(c, "t") assertEquals(t.handlers, Nil) assertEquals(locals(c, "t"), List(("this", 0))) @@ -729,7 +727,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertNoInvoke(getSingleMethod(c, "compare")) } @@ -743,7 +741,7 @@ class MethodLevelOptsTest extends ClearAfterClass { | } |} """.stripMargin - val List(c) = compileClasses(methodOptCompiler)(code) + val List(c) = compileClasses(code) assertSameSummary(getSingleMethod(c, "t"), List( BIPUSH, ILOAD, IF_ICMPNE, diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index ba6bdcf658b5..5bd285f97f7d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -2,23 +2,20 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ -import scala.tools.testing.BytecodeTesting._ -import scala.tools.asm.tree.ClassNode -import scala.tools.nsc.backend.jvm.BTypes.{MethodInlineInfo, InlineInfo} -import scala.tools.partest.ASMConverters -import ASMConverters._ import scala.collection.JavaConverters._ -import scala.tools.testing.ClearAfterClass +import scala.tools.asm.tree.ClassNode +import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, MethodInlineInfo} +import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) -class ScalaInlineInfoTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler(extraArgs = "-Yopt:l:none")) +class ScalaInlineInfoTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:l:none" + import compiler._ def inlineInfo(c: ClassNode): InlineInfo = c.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).head @@ -72,7 +69,7 @@ class ScalaInlineInfoTest extends ClearAfterClass { |} """.stripMargin - val cs @ List(c, t, tl, to) = compileClasses(compiler)(code) + val cs @ List(c, t, tl, to) = compileClasses(code) val infoT = inlineInfo(t) val expectT = InlineInfo ( false, // final class @@ -149,7 +146,7 @@ class ScalaInlineInfoTest extends ClearAfterClass { | def nullary: Int |} """.stripMargin - val cs = compileClasses(compiler)(code) + val cs = compileClasses(code) val sams = cs.map(c => (c.name, inlineInfo(c).sam)) assertEquals(sams, List( @@ -165,7 +162,7 @@ class ScalaInlineInfoTest extends ClearAfterClass { @Test def lzyComputeInlineInfo(): Unit = { val code = "class C { object O }" - val List(c, om) = compileClasses(compiler)(code) + val List(c, om) = compileClasses(code) val infoC = inlineInfo(c) val expected = Map( "()V" -> MethodInlineInfo(false,false,false), diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala index 0133fc9dce2a..992a0e541bbc 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala @@ -2,15 +2,15 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ -import scala.tools.testing.BytecodeTesting._ +import scala.tools.asm.Opcodes._ import scala.tools.partest.ASMConverters -import ASMConverters._ +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class SimplifyJumpsTest { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index ca095b8a5177..99a662b8977a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -2,17 +2,15 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.tools.asm.Opcodes._ +import scala.tools.partest.ASMConverters._ import scala.tools.testing.AssertUtil._ - import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) @@ -20,12 +18,12 @@ class UnreachableCodeTest extends ClearAfterClass { // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks, // see comment in BCodeBodyBuilder val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method")) - val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) - val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) + val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) def assertEliminateDead(code: (Instruction, Boolean)*): Unit = { val method = genMethod()(code.map(_._1): _*) - dceCompiler.genBCode.bTypes.localOpt.removeUnreachableCodeImpl(method, "C") + dceCompiler.global.genBCode.bTypes.localOpt.removeUnreachableCodeImpl(method, "C") val nonEliminated = instructionsFromMethod(method) val expectedLive = code.filter(_._2).map(_._1).toList assertSameCode(nonEliminated, expectedLive) @@ -112,10 +110,10 @@ class UnreachableCodeTest extends ClearAfterClass { @Test def basicEliminationCompiler(): Unit = { val code = "def f: Int = { return 1; 2 }" - val withDce = singleMethodInstructions(dceCompiler)(code) + val withDce = dceCompiler.singleMethodInstructions(code) assertSameCode(withDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN))) - val noDce = singleMethodInstructions(noOptCompiler)(code) + val noDce = noOptCompiler.singleMethodInstructions(code) // The emitted code is ICONST_1, IRETURN, ICONST_2, IRETURN. The latter two are dead. // @@ -141,23 +139,23 @@ class UnreachableCodeTest extends ClearAfterClass { def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) val code = "def f: Int = { return 0; try { 1 } catch { case _: Exception => 2 } }" - val m = singleMethod(dceCompiler)(code) + val m = dceCompiler.singleMethod(code) assertTrue(m.handlers.isEmpty) // redundant (if code is gone, handler is gone), but done once here for extra safety assertSameCode(m.instructions, wrapInDefault(Op(ICONST_0), Op(IRETURN))) val code2 = "def f: Unit = { try { } catch { case _: Exception => () }; () }" // requires fixpoint optimization of methodOptCompiler (dce alone is not enough): first the handler is eliminated, then it's dead catch block. - assertSameCode(singleMethodInstructions(methodOptCompiler)(code2), wrapInDefault(Op(RETURN))) + assertSameCode(methodOptCompiler.singleMethodInstructions(code2), wrapInDefault(Op(RETURN))) val code3 = "def f: Unit = { try { } catch { case _: Exception => try { } catch { case _: Exception => () } }; () }" - assertSameCode(singleMethodInstructions(methodOptCompiler)(code3), wrapInDefault(Op(RETURN))) + assertSameCode(methodOptCompiler.singleMethodInstructions(code3), wrapInDefault(Op(RETURN))) // this example requires two iterations to get rid of the outer handler. // the first iteration of DCE cannot remove the inner handler. then the inner (empty) handler is removed. // then the second iteration of DCE removes the inner catch block, and then the outer handler is removed. val code4 = "def f: Unit = { try { try { } catch { case _: Exception => () } } catch { case _: Exception => () }; () }" - assertSameCode(singleMethodInstructions(methodOptCompiler)(code4), wrapInDefault(Op(RETURN))) + assertSameCode(methodOptCompiler.singleMethodInstructions(code4), wrapInDefault(Op(RETURN))) } @Test // test the dce-testing tools @@ -174,7 +172,7 @@ class UnreachableCodeTest extends ClearAfterClass { } @Test - def bytecodeEquivalence: Unit = { + def bytecodeEquivalence(): Unit = { assertTrue(List(VarOp(ILOAD, 1)) === List(VarOp(ILOAD, 2))) assertTrue(List(VarOp(ILOAD, 1), VarOp(ISTORE, 1)) === @@ -216,7 +214,7 @@ class UnreachableCodeTest extends ClearAfterClass { | def t4 = cons(nt) |} """.stripMargin - val List(c) = compileClasses(noOptCompiler)(code) + val List(c) = noOptCompiler.compileClasses(code) assertSameSummary(getSingleMethod(c, "nl"), List(ACONST_NULL, ARETURN)) @@ -243,7 +241,7 @@ class UnreachableCodeTest extends ClearAfterClass { assertSameSummary(getSingleMethod(c, "t4"), List( ALOAD, ALOAD, "nt", ATHROW, NOP, NOP, NOP, ATHROW)) - val List(cDCE) = compileClasses(dceCompiler)(code) + val List(cDCE) = dceCompiler.compileClasses(code) assertSameSummary(getSingleMethod(cDCE, "t3"), List(ALOAD, NEW, DUP, LDC, "", ATHROW)) assertSameSummary(getSingleMethod(cDCE, "t4"), List(ALOAD, ALOAD, "nt", ATHROW)) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala index 7ae946f581b5..303600aa70fd 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala @@ -2,21 +2,20 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ -import scala.collection.JavaConverters._ -import scala.tools.testing.BytecodeTesting._ +import scala.collection.JavaConverters._ import scala.tools.partest.ASMConverters -import ASMConverters._ -import scala.tools.testing.ClearAfterClass +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) -class UnusedLocalVariablesTest extends ClearAfterClass { - val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) +class UnusedLocalVariablesTest extends BytecodeTesting { + override def compilerArgs = "-Yopt:unreachable-code" + import compiler._ @Test def removeUnusedVar(): Unit = { @@ -49,7 +48,7 @@ class UnusedLocalVariablesTest extends ClearAfterClass { | } |} |""".stripMargin - val cls = compileClasses(dceCompiler)(code).head + val cls = compileClasses(code).head val m = convertMethod(cls.methods.asScala.toList.find(_.desc == "(I)V").get) assertTrue(m.localVars.length == 2) // this, a, but not y @@ -70,7 +69,7 @@ class UnusedLocalVariablesTest extends ClearAfterClass { |} """.stripMargin - val clss2 = compileClasses(dceCompiler)(code2) + val clss2 = compileClasses(code2) val cls2 = clss2.find(_.name == "C").get val companion2 = clss2.find(_.name == "C$").get @@ -82,7 +81,7 @@ class UnusedLocalVariablesTest extends ClearAfterClass { } def assertLocalVarCount(code: String, numVars: Int): Unit = { - assertTrue(singleMethod(dceCompiler)(code).localVars.length == numVars) + assertTrue(singleMethod(code).localVars.length == numVars) } } diff --git a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala index d30f45817773..609f481721dd 100644 --- a/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala +++ b/test/junit/scala/tools/nsc/transform/delambdafy/DelambdafyTest.scala @@ -1,15 +1,15 @@ package scala.tools.nsc.transform.delambdafy -import scala.reflect.io.Path.jfile2path -import scala.tools.testing.BytecodeTesting._ -import scala.tools.nsc.io.AbstractFile -import scala.tools.testing.TempDir - import org.junit.Assert.assertTrue import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import scala.reflect.io.Path.jfile2path +import scala.tools.nsc.io.AbstractFile +import scala.tools.testing.BytecodeTesting._ +import scala.tools.testing.TempDir + @RunWith(classOf[JUnit4]) class DelambdafyTest { def compileToMultipleOutputWithDelamdbafyMethod(): List[(String, Array[Byte])] = { @@ -53,9 +53,9 @@ object Delambdafy { val extraArgs = "-Ydelambdafy:method" val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath" val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir) - compiler.settings.outputDirs.add(srcFile.file, outDir) + compiler.global.settings.outputDirs.add(srcFile.file, outDir) - new compiler.Run().compileSources(List(srcFile)) + new compiler.global.Run().compileSources(List(srcFile)) val classfiles = getGeneratedClassfiles(outDir) outDir.delete() diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala index 99975abc50b2..cc6d1d748356 100644 --- a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala @@ -1,25 +1,21 @@ package scala.tools.nsc package transform.patmat +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test -import scala.tools.asm.Opcodes._ -import org.junit.Assert._ +import scala.tools.asm.Opcodes._ import scala.tools.nsc.backend.jvm.AsmUtils._ -import scala.tools.testing.AssertUtil._ - +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import scala.tools.testing.ClearAfterClass @RunWith(classOf[JUnit4]) -class PatmatBytecodeTest extends ClearAfterClass { - val compiler = cached("compiler", () => newCompiler()) +class PatmatBytecodeTest extends BytecodeTesting { val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-Yopt:l:project")) + import compiler._ + @Test def t6956(): Unit = { val code = @@ -42,7 +38,7 @@ class PatmatBytecodeTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assert(getSingleMethod(c, "s1").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) assert(getSingleMethod(c, "s2").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) } @@ -70,7 +66,7 @@ class PatmatBytecodeTest extends ClearAfterClass { |} """.stripMargin - val List(c) = compileClasses(compiler)(code) + val List(c) = compileClasses(code) assert(getSingleMethod(c, "s1").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) assert(getSingleMethod(c, "s2").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) } @@ -85,7 +81,7 @@ class PatmatBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val c = compileClasses(optCompiler)(code).head + val c = optCompiler.compileClasses(code).head assertSameSummary(getSingleMethod(c, "a"), List( NEW, DUP, ICONST_1, LDC, "", @@ -102,7 +98,7 @@ class PatmatBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val c = compileClasses(optCompiler)(code).head + val c = optCompiler.compileClasses(code).head assert(!getSingleMethod(c, "a").instructions.exists(i => i.opcode == IFNULL || i.opcode == IFNONNULL), textify(findAsmMethod(c, "a"))) } @@ -116,7 +112,7 @@ class PatmatBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val c = compileClasses(optCompiler)(code).head + val c = optCompiler.compileClasses(code).head assertSameSummary(getSingleMethod(c, "a"), List( NEW, DUP, ICONST_1, "boxToInteger", LDC, "", ASTORE /*1*/, ALOAD /*1*/, "y", ASTORE /*2*/, @@ -137,7 +133,7 @@ class PatmatBytecodeTest extends ClearAfterClass { | } |} """.stripMargin - val c = compileClasses(optCompiler)(code, allowMessage = _.msg.contains("may not be exhaustive")).head + val c = optCompiler.compileClasses(code, allowMessage = _.msg.contains("may not be exhaustive")).head val expected = List( ALOAD /*1*/ , INSTANCEOF /*::*/ , IFEQ /*A*/ , @@ -169,7 +165,7 @@ class PatmatBytecodeTest extends ClearAfterClass { | def t9 = { val C(a, _) = C("hi", 23); a.toString } |} """.stripMargin - val List(c, cMod) = compileClasses(optCompiler)(code) + val List(c, cMod) = optCompiler.compileClasses(code) assertSameSummary(getSingleMethod(c, "t1"), List(ICONST_1, ICONST_2, IADD, IRETURN)) assertSameSummary(getSingleMethod(c, "t2"), List(ICONST_1, IRETURN)) assertInvokedMethods(getSingleMethod(c, "t3"), List("C.tplCall", "scala/Tuple2._1", "scala/Tuple2._2$mcI$sp", "scala/MatchError.", "java/lang/String.length")) diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index 21b1ce2e77f3..d6f8dbc21937 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -2,30 +2,99 @@ package scala.tools.testing import org.junit.Assert._ +import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.reflect.internal.util.BatchSourceFile import scala.reflect.io.VirtualDirectory import scala.tools.asm.Opcodes import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode} import scala.tools.cmd.CommandLineParser +import scala.tools.nsc.backend.jvm.AsmUtils +import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.{Global, Settings} -import scala.tools.partest.ASMConverters -import scala.collection.JavaConverters._ -import scala.tools.nsc.backend.jvm.AsmUtils +import scala.tools.partest.ASMConverters._ + +trait BytecodeTesting extends ClearAfterClass { + def compilerArgs = "" // to be overridden + val compiler = cached("compiler", () => BytecodeTesting.newCompiler(extraArgs = compilerArgs)) +} + +class Compiler(val global: Global) { + import BytecodeTesting._ + + def resetOutput(): Unit = { + global.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) + } + + private def newRun: global.Run = { + global.reporter.reset() + resetOutput() + new global.Run() + } + + private def reporter = global.reporter.asInstanceOf[StoreReporter] + + def checkReport(allowMessage: StoreReporter#Info => Boolean = _ => false): Unit = { + val disallowed = reporter.infos.toList.filter(!allowMessage(_)) // toList prevents an infer-non-wildcard-existential warning. + if (disallowed.nonEmpty) { + val msg = disallowed.mkString("\n") + assert(false, "The compiler issued non-allowed warnings or errors:\n" + msg) + } + } + + def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = { + val run = newRun + run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2))) + checkReport(allowMessage) + getGeneratedClassfiles(global.settings.outputDirs.getSingleOutput.get) + } + + def compileTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[(String, Array[Byte])] = { + import global._ + settings.stopBefore.value = "jvm" :: Nil + val run = newRun + val scalaUnit = newCompilationUnit(scalaCode, "unitTestSource.scala") + val javaUnits = javaCode.map(p => newCompilationUnit(p._1, p._2)) + val units = scalaUnit :: javaUnits + run.compileUnits(units, run.parserPhase) + settings.stopBefore.value = Nil + scalaUnit.body = beforeBackend(scalaUnit.body) + checkReport(_ => false) + val run1 = newRun + run1.compileUnits(units, run1.phaseNamed("jvm")) + checkReport(_ => false) + getGeneratedClassfiles(settings.outputDirs.getSingleOutput.get) + } + + def compileClasses(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { + readAsmClasses(compile(code, javaCode, allowMessage)) + } + + def compileMethods(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = { + compileClasses(s"class C { $code }", allowMessage = allowMessage).head.methods.asScala.toList.filterNot(_.name == "") + } + + def singleMethodInstructions(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = { + val List(m) = compileMethods(code, allowMessage = allowMessage) + instructionsFromMethod(m) + } + + def singleMethod(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = { + val List(m) = compileMethods(code, allowMessage = allowMessage) + convertMethod(m) + } +} object BytecodeTesting { - import AsmUtils._ - import ASMConverters._ - - def genMethod( flags: Int = Opcodes.ACC_PUBLIC, - name: String = "m", - descriptor: String = "()V", - genericSignature: String = null, - throwsExceptions: Array[String] = null, - handlers: List[ExceptionHandler] = Nil, - localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = { + def genMethod(flags: Int = Opcodes.ACC_PUBLIC, + name: String = "m", + descriptor: String = "()V", + genericSignature: String = null, + throwsExceptions: Array[String] = null, + handlers: List[ExceptionHandler] = Nil, + localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = { val node = new MethodNode(flags, name, descriptor, genericSignature, throwsExceptions) applyToMethod(node, Method(body.toList, handlers, localVars)) node @@ -38,33 +107,21 @@ object BytecodeTesting { cls } - private def resetOutput(compiler: Global): Unit = { - compiler.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) - } - - def newCompiler(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = { + def newCompiler(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Compiler = { val compiler = newCompilerWithoutVirtualOutdir(defaultArgs, extraArgs) - resetOutput(compiler) + compiler.resetOutput() compiler } - def newCompilerWithoutVirtualOutdir(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Global = { + def newCompilerWithoutVirtualOutdir(defaultArgs: String = "-usejavacp", extraArgs: String = ""): Compiler = { def showError(s: String) = throw new Exception(s) val settings = new Settings(showError) val args = (CommandLineParser tokenize defaultArgs) ++ (CommandLineParser tokenize extraArgs) val (_, nonSettingsArgs) = settings.processArguments(args, processAll = true) if (nonSettingsArgs.nonEmpty) showError("invalid compiler flags: " + nonSettingsArgs.mkString(" ")) - new Global(settings, new StoreReporter) - } - - def newRun(compiler: Global): compiler.Run = { - compiler.reporter.reset() - resetOutput(compiler) - new compiler.Run() + new Compiler(new Global(settings, new StoreReporter)) } - def reporter(compiler: Global) = compiler.reporter.asInstanceOf[StoreReporter] - def makeSourceFile(code: String, filename: String): BatchSourceFile = new BatchSourceFile(filename, code) def getGeneratedClassfiles(outDir: AbstractFile): List[(String, Array[Byte])] = { @@ -79,38 +136,6 @@ object BytecodeTesting { files(outDir) } - def checkReport(compiler: Global, allowMessage: StoreReporter#Info => Boolean = _ => false): Unit = { - val disallowed = reporter(compiler).infos.toList.filter(!allowMessage(_)) // toList prevents an infer-non-wildcard-existential warning. - if (disallowed.nonEmpty) { - val msg = disallowed.mkString("\n") - assert(false, "The compiler issued non-allowed warnings or errors:\n" + msg) - } - } - - def compile(compiler: Global)(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = { - val run = newRun(compiler) - run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2))) - checkReport(compiler, allowMessage) - getGeneratedClassfiles(compiler.settings.outputDirs.getSingleOutput.get) - } - - def compileTransformed(compiler: Global)(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: compiler.Tree => compiler.Tree): List[(String, Array[Byte])] = { - compiler.settings.stopBefore.value = "jvm" :: Nil - val run = newRun(compiler) - import compiler._ - val scalaUnit = newCompilationUnit(scalaCode, "unitTestSource.scala") - val javaUnits = javaCode.map(p => newCompilationUnit(p._1, p._2)) - val units = scalaUnit :: javaUnits - run.compileUnits(units, run.parserPhase) - compiler.settings.stopBefore.value = Nil - scalaUnit.body = beforeBackend(scalaUnit.body) - checkReport(compiler, _ => false) - val run1 = newRun(compiler) - run1.compileUnits(units, run1.phaseNamed("jvm")) - checkReport(compiler, _ => false) - getGeneratedClassfiles(compiler.settings.outputDirs.getSingleOutput.get) - } - /** * Compile multiple Scala files separately into a single output directory. * @@ -127,8 +152,8 @@ object BytecodeTesting { for (code <- codes) { val compiler = newCompilerWithoutVirtualOutdir(extraArgs = argsWithOutDir) - new compiler.Run().compileSources(List(makeSourceFile(code, "unitTestSource.scala"))) - checkReport(compiler, allowMessage) + new compiler.global.Run().compileSources(List(makeSourceFile(code, "unitTestSource.scala"))) + compiler.checkReport(allowMessage) afterEach(outDir) } @@ -145,24 +170,6 @@ object BytecodeTesting { classfiles.map(p => AsmUtils.readClass(p._2)).sortBy(_.name) } - def compileClasses(compiler: Global)(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { - readAsmClasses(compile(compiler)(code, javaCode, allowMessage)) - } - - def compileMethods(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = { - compileClasses(compiler)(s"class C { $code }", allowMessage = allowMessage).head.methods.asScala.toList.filterNot(_.name == "") - } - - def singleMethodInstructions(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = { - val List(m) = compileMethods(compiler)(code, allowMessage = allowMessage) - instructionsFromMethod(m) - } - - def singleMethod(compiler: Global)(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = { - val List(m) = compileMethods(compiler)(code, allowMessage = allowMessage) - convertMethod(m) - } - def assertSameCode(method: Method, expected: List[Instruction]): Unit = assertSameCode(method.instructions.dropNonOp, expected) def assertSameCode(actual: List[Instruction], expected: List[Instruction]): Unit = { assert(actual === expected, s"\nExpected: $expected\nActual : $actual") From d9ce4dc1eeb351a52e98c6c0fa1551a5cc3b87f5 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 08:20:41 +0200 Subject: [PATCH 0058/2793] Better abstraction for JUnit run tests --- test/junit/scala/issues/RunTest.scala | 16 +++++----------- test/junit/scala/tools/testing/RunTesting.scala | 17 +++++++++++++++++ 2 files changed, 22 insertions(+), 11 deletions(-) create mode 100644 test/junit/scala/tools/testing/RunTesting.scala diff --git a/test/junit/scala/issues/RunTest.scala b/test/junit/scala/issues/RunTest.scala index 3ebdc8a72ff6..0686d73d9bdf 100644 --- a/test/junit/scala/issues/RunTest.scala +++ b/test/junit/scala/issues/RunTest.scala @@ -1,13 +1,11 @@ package scala.issues +import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.{AfterClass, BeforeClass, Test} -import org.junit.Assert._ -import scala.reflect.runtime._ -import scala.tools.reflect.ToolBox -import scala.tools.testing.ClearAfterClass +import scala.tools.testing.RunTesting object RunTest { class VC(val x: Any) extends AnyVal @@ -15,12 +13,8 @@ object RunTest { } @RunWith(classOf[JUnit4]) -class RunTest extends ClearAfterClass { - val toolBox = cached("toolbox", () => universe.runtimeMirror(getClass.getClassLoader).mkToolBox()) - - def run[T](code: String): T = { - toolBox.eval(toolBox.parse(code)).asInstanceOf[T] - } +class RunTest extends RunTesting { + import runner._ @Test def classOfValueClassAlias(): Unit = { diff --git a/test/junit/scala/tools/testing/RunTesting.scala b/test/junit/scala/tools/testing/RunTesting.scala new file mode 100644 index 000000000000..1320db42302e --- /dev/null +++ b/test/junit/scala/tools/testing/RunTesting.scala @@ -0,0 +1,17 @@ +package scala.tools.testing + +import scala.reflect.runtime._ +import scala.tools.reflect.ToolBox + +trait RunTesting extends ClearAfterClass { + def compilerArgs = "" // to be overridden + val runner = cached("toolbox", () => Runner.make(compilerArgs)) +} + +class Runner(val toolBox: ToolBox[universe.type]) { + def run[T](code: String): T = toolBox.eval(toolBox.parse(code)).asInstanceOf[T] +} + +object Runner { + def make(compilerArgs: String) = new Runner(universe.runtimeMirror(getClass.getClassLoader).mkToolBox(options = compilerArgs)) +} From ba510abcdcf176c06ba93c8e9dc6398015877f5e Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 12:22:06 +0200 Subject: [PATCH 0059/2793] Clean up bytecode testing methods. --- test/junit/scala/issues/BytecodeTest.scala | 84 +++--- .../scala/issues/OptimizedBytecodeTest.scala | 58 ++-- .../nsc/backend/jvm/DefaultMethodTest.scala | 2 +- .../nsc/backend/jvm/DirectCompileTest.scala | 16 +- .../nsc/backend/jvm/IndyLambdaTest.scala | 10 +- .../tools/nsc/backend/jvm/IndySammyTest.scala | 14 +- .../nsc/backend/jvm/StringConcatTest.scala | 4 +- .../jvm/analysis/NullnessAnalyzerTest.scala | 22 +- .../jvm/analysis/ProdConsAnalyzerTest.scala | 68 ++--- .../nsc/backend/jvm/opt/AnalyzerTest.scala | 8 +- .../nsc/backend/jvm/opt/CallGraphTest.scala | 10 +- .../jvm/opt/ClosureOptimizerTest.scala | 20 +- .../jvm/opt/CompactLocalVariablesTest.scala | 4 +- .../jvm/opt/EmptyExceptionHandlersTest.scala | 8 +- .../backend/jvm/opt/InlineWarningTest.scala | 18 +- .../opt/InlinerSeparateCompilationTest.scala | 14 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 260 +++++++++--------- .../backend/jvm/opt/MethodLevelOptsTest.scala | 156 +++++------ .../backend/jvm/opt/UnreachableCodeTest.scala | 32 +-- .../jvm/opt/UnusedLocalVariablesTest.scala | 17 +- .../transform/patmat/PatmatBytecodeTest.scala | 48 ++-- .../scala/tools/testing/BytecodeTesting.scala | 85 ++++-- 22 files changed, 496 insertions(+), 462 deletions(-) diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala index 3fd5e3a22229..0bb87a4ea6a2 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/issues/BytecodeTest.scala @@ -36,10 +36,10 @@ class BytecodeTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) - assertTrue(getSingleMethod(c, "f").instructions.count(_.isInstanceOf[TableSwitch]) == 1) - assertTrue(getSingleMethod(c, "g").instructions.count(_.isInstanceOf[LookupSwitch]) == 1) + assertTrue(getInstructions(c, "f").count(_.isInstanceOf[TableSwitch]) == 1) + assertTrue(getInstructions(c, "g").count(_.isInstanceOf[LookupSwitch]) == 1) } @Test @@ -99,7 +99,7 @@ class BytecodeTest extends BytecodeTesting { """.stripMargin val List(mirror, module) = compileClasses(code) - val unapplyLineNumbers = getSingleMethod(module, "unapply").instructions.filter(_.isInstanceOf[LineNumber]) + val unapplyLineNumbers = getInstructions(module, "unapply").filter(_.isInstanceOf[LineNumber]) assert(unapplyLineNumbers == List(LineNumber(2, Label(0))), unapplyLineNumbers) val expected = List( @@ -122,7 +122,7 @@ class BytecodeTest extends BytecodeTesting { Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false) ) - val mainIns = getSingleMethod(module, "main").instructions filter { + val mainIns = getInstructions(module, "main") filter { case _: LineNumber | _: Invoke | _: Jump => true case _ => false } @@ -144,24 +144,24 @@ class BytecodeTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) // t1: no unnecessary GOTOs - assertSameCode(getSingleMethod(c, "t1"), List( + assertSameCode(getMethod(c, "t1"), List( VarOp(ILOAD, 1), Jump(IFEQ, Label(6)), Op(ICONST_1), Jump(GOTO, Label(9)), Label(6), Op(ICONST_2), Label(9), Op(IRETURN))) // t2: no unnecessary GOTOs - assertSameCode(getSingleMethod(c, "t2"), List( + assertSameCode(getMethod(c, "t2"), List( VarOp(ILOAD, 1), IntOp(SIPUSH, 393), Jump(IF_ICMPNE, Label(7)), Op(ICONST_1), Jump(GOTO, Label(10)), Label(7), Op(ICONST_2), Label(10), Op(IRETURN))) // t3: Array == is translated to reference equality, AnyRef == to null checks and equals - assertSameCode(getSingleMethod(c, "t3"), List( + assertSameCode(getMethod(c, "t3"), List( // Array == VarOp(ALOAD, 1), VarOp(ALOAD, 2), Jump(IF_ACMPEQ, Label(23)), // AnyRef == @@ -180,13 +180,13 @@ class BytecodeTest extends BytecodeTesting { Label(13), Op(IRETURN)) // t4: one side is known null, so just a null check on the other - assertSameCode(getSingleMethod(c, "t4"), t4t5) + assertSameCode(getMethod(c, "t4"), t4t5) // t5: one side known null, so just a null check on the other - assertSameCode(getSingleMethod(c, "t5"), t4t5) + assertSameCode(getMethod(c, "t5"), t4t5) // t6: no unnecessary GOTOs - assertSameCode(getSingleMethod(c, "t6"), List( + assertSameCode(getMethod(c, "t6"), List( VarOp(ILOAD, 1), IntOp(BIPUSH, 10), Jump(IF_ICMPNE, Label(7)), VarOp(ILOAD, 2), Jump(IFNE, Label(12)), Label(7), VarOp(ILOAD, 1), Op(ICONST_1), Jump(IF_ICMPEQ, Label(16)), @@ -195,10 +195,10 @@ class BytecodeTest extends BytecodeTesting { Label(19), Op(IRETURN))) // t7: universal equality - assertInvoke(getSingleMethod(c, "t7"), "scala/runtime/BoxesRunTime", "equals") + assertInvoke(getMethod(c, "t7"), "scala/runtime/BoxesRunTime", "equals") // t8: no null checks invoking equals on modules and constants - assertSameCode(getSingleMethod(c, "t8"), List( + assertSameCode(getMethod(c, "t8"), List( Field(GETSTATIC, "scala/collection/immutable/Nil$", "MODULE$", "Lscala/collection/immutable/Nil$;"), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(10)), Ldc(LDC, ""), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(14)), Label(10), Op(ICONST_1), Jump(GOTO, Label(17)), @@ -207,13 +207,11 @@ class BytecodeTest extends BytecodeTesting { } object forwarderTestUtils { - def findMethods(cls: ClassNode, name: String): List[Method] = cls.methods.iterator.asScala.find(_.name == name).map(convertMethod).toList - import language.implicitConversions implicit def s2c(s: Symbol)(implicit classes: Map[String, ClassNode]): ClassNode = classes(s.name) def checkForwarder(c: ClassNode, target: String) = { - val List(f) = findMethods(c, "f") + val List(f) = getMethods(c, "f") assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, target, "f", "()I", false), Op(IRETURN))) } } @@ -273,7 +271,7 @@ class BytecodeTest extends BytecodeTesting { implicit val classes = compileClasses(code).map(c => (c.name, c)).toMap val noForwarder = List('C1, 'C2, 'C3, 'C4, 'C10, 'C11, 'C12, 'C13, 'C16, 'C17) - for (c <- noForwarder) assertEquals(findMethods(c, "f"), Nil) + for (c <- noForwarder) assertEquals(getMethods(c, "f"), Nil) checkForwarder('C5, "T3") checkForwarder('C6, "T4") @@ -282,10 +280,10 @@ class BytecodeTest extends BytecodeTesting { checkForwarder('C9, "T5") checkForwarder('C14, "T4") checkForwarder('C15, "T5") - assertSameSummary(getSingleMethod('C18, "f"), List(BIPUSH, IRETURN)) + assertSameSummary(getMethod('C18, "f"), List(BIPUSH, IRETURN)) checkForwarder('C19, "T7") - assertSameCode(getSingleMethod('C19, "T7$$super$f"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "C18", "f", "()I", false), Op(IRETURN))) - assertInvoke(getSingleMethod('C20, "clone"), "T8", "clone") // mixin forwarder + assertSameCode(getMethod('C19, "T7$$super$f"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "C18", "f", "()I", false), Op(IRETURN))) + assertInvoke(getMethod('C20, "clone"), "T8", "clone") // mixin forwarder } @Test @@ -297,7 +295,7 @@ class BytecodeTest extends BytecodeTesting { |class C extends T1 with T2 """.stripMargin val List(c, t1, t2) = compileClasses(code) - assertEquals(findMethods(c, "f"), Nil) + assertEquals(getMethods(c, "f"), Nil) } @Test @@ -331,7 +329,7 @@ class BytecodeTest extends BytecodeTesting { implicit val classes = compileClasses(code, List(j1, j2, j3, j4)).map(c => (c.name, c)).toMap val noForwarder = List('K1, 'K2, 'K3, 'K4, 'K5, 'K6, 'K7, 'K8, 'K9, 'K10, 'K11) - for (c <- noForwarder) assertEquals(findMethods(c, "f"), Nil) + for (c <- noForwarder) assertEquals(getMethods(c, "f"), Nil) checkForwarder('K12, "T2") } @@ -340,13 +338,13 @@ class BytecodeTest extends BytecodeTesting { def invocationReceivers(): Unit = { val List(c1, c2, t, u) = compileClasses(invocationReceiversTestCode.definitions("Object")) // mixin forwarder in C1 - assertSameCode(getSingleMethod(c1, "clone"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "T", "clone", "()Ljava/lang/Object;", false), Op(ARETURN))) - assertInvoke(getSingleMethod(c1, "f1"), "T", "clone") - assertInvoke(getSingleMethod(c1, "f2"), "T", "clone") - assertInvoke(getSingleMethod(c1, "f3"), "C1", "clone") - assertInvoke(getSingleMethod(c2, "f1"), "T", "clone") - assertInvoke(getSingleMethod(c2, "f2"), "T", "clone") - assertInvoke(getSingleMethod(c2, "f3"), "C1", "clone") + assertSameCode(getMethod(c1, "clone"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "T", "clone", "()Ljava/lang/Object;", false), Op(ARETURN))) + assertInvoke(getMethod(c1, "f1"), "T", "clone") + assertInvoke(getMethod(c1, "f2"), "T", "clone") + assertInvoke(getMethod(c1, "f3"), "C1", "clone") + assertInvoke(getMethod(c2, "f1"), "T", "clone") + assertInvoke(getMethod(c2, "f2"), "T", "clone") + assertInvoke(getMethod(c2, "f3"), "C1", "clone") val List(c1b, c2b, tb, ub) = compileClasses(invocationReceiversTestCode.definitions("String")) def ms(c: ClassNode, n: String) = c.methods.asScala.toList.filter(_.name == n) @@ -357,11 +355,11 @@ class BytecodeTest extends BytecodeTesting { assert((c1Clone.access | Opcodes.ACC_BRIDGE) != 0) assertSameCode(convertMethod(c1Clone), List(VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C1", "clone", "()Ljava/lang/String;", false), Op(ARETURN))) - def iv(m: Method) = getSingleMethod(c1b, "f1").instructions.collect({case i: Invoke => i}) - assertSameCode(iv(getSingleMethod(c1b, "f1")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true))) - assertSameCode(iv(getSingleMethod(c1b, "f2")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true))) + def iv(m: Method) = getInstructions(c1b, "f1").collect({case i: Invoke => i}) + assertSameCode(iv(getMethod(c1b, "f1")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true))) + assertSameCode(iv(getMethod(c1b, "f2")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true))) // invokeinterface T.clone in C1 is OK here because it is not an override of Object.clone (different siganture) - assertSameCode(iv(getSingleMethod(c1b, "f3")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true))) + assertSameCode(iv(getMethod(c1b, "f3")), List(Invoke(INVOKEINTERFACE, "T", "clone", "()Ljava/lang/String;", true))) } @Test @@ -395,9 +393,9 @@ class BytecodeTest extends BytecodeTesting { | def f3(j: a.J) = j.f |} """.stripMargin - val List(c) = compileClasses(cC, javaCode = List((aC, "A.java"), (bC, "B.java"), (iC, "I.java"), (jC, "J.java"))) - assertInvoke(getSingleMethod(c, "f1"), "a/B", "f") // receiver needs to be B (A is not accessible in class C, package b) - assertInvoke(getSingleMethod(c, "f3"), "a/J", "f") // receiver needs to be J + val c = compileClass(cC, javaCode = List((aC, "A.java"), (bC, "B.java"), (iC, "I.java"), (jC, "J.java"))) + assertInvoke(getMethod(c, "f1"), "a/B", "f") // receiver needs to be B (A is not accessible in class C, package b) + assertInvoke(getMethod(c, "f3"), "a/J", "f") // receiver needs to be J } @Test @@ -411,11 +409,11 @@ class BytecodeTest extends BytecodeTesting { | |} """.stripMargin - val List(c) = compileClasses(code) - assertInvoke(getSingleMethod(c, "f1"), "[Ljava/lang/String;", "clone") // array descriptor as receiver - assertInvoke(getSingleMethod(c, "f2"), "java/lang/Object", "hashCode") // object receiver - assertInvoke(getSingleMethod(c, "f3"), "java/lang/Object", "hashCode") - assertInvoke(getSingleMethod(c, "f4"), "java/lang/Object", "toString") + val c = compileClass(code) + assertInvoke(getMethod(c, "f1"), "[Ljava/lang/String;", "clone") // array descriptor as receiver + assertInvoke(getMethod(c, "f2"), "java/lang/Object", "hashCode") // object receiver + assertInvoke(getMethod(c, "f3"), "java/lang/Object", "hashCode") + assertInvoke(getMethod(c, "f4"), "java/lang/Object", "toString") } @Test @@ -423,7 +421,7 @@ class BytecodeTest extends BytecodeTesting { // see comment in SpecializeTypes.forwardCtorCall val code = "case class C[@specialized(Int) T](_1: T)" val List(c, cMod, cSpec) = compileClasses(code) - assertSameSummary(getSingleMethod(cSpec, ""), + assertSameSummary(getMethod(cSpec, ""), // pass `null` to super constructor, no box-unbox, no Integer created List(ALOAD, ILOAD, PUTFIELD, ALOAD, ACONST_NULL, "", RETURN)) } diff --git a/test/junit/scala/issues/OptimizedBytecodeTest.scala b/test/junit/scala/issues/OptimizedBytecodeTest.scala index b074215534a5..af1c50acac37 100644 --- a/test/junit/scala/issues/OptimizedBytecodeTest.scala +++ b/test/junit/scala/issues/OptimizedBytecodeTest.scala @@ -27,8 +27,8 @@ class OptimizedBytecodeTest extends BytecodeTesting { | def t(): Unit = while (true) m("...") |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), List(Label(0), Jump(GOTO, Label(0)))) + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List(Label(0), Jump(GOTO, Label(0)))) } @Test @@ -45,12 +45,12 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) - assertSameSummary(getSingleMethod(c, "t"), List( + assertSameSummary(getMethod(c, "t"), List( LDC, ASTORE, ALOAD /*0*/, ALOAD /*1*/, "C$$$anonfun$1", IRETURN)) - assertSameSummary(getSingleMethod(c, "C$$$anonfun$1"), List(LDC, "C$$$anonfun$2", IRETURN)) - assertSameSummary(getSingleMethod(c, "C$$$anonfun$2"), List(-1 /*A*/, GOTO /*A*/)) + assertSameSummary(getMethod(c, "C$$$anonfun$1"), List(LDC, "C$$$anonfun$2", IRETURN)) + assertSameSummary(getMethod(c, "C$$$anonfun$2"), List(-1 /*A*/, GOTO /*A*/)) } @Test @@ -72,7 +72,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { |} """.stripMargin val List(c, t, tMod) = compileClasses(code, allowMessage = _.msg.contains("not be exhaustive")) - assertSameSummary(getSingleMethod(c, "t"), List(GETSTATIC, "$qmark$qmark$qmark", ATHROW)) + assertSameSummary(getMethod(c, "t"), List(GETSTATIC, "$qmark$qmark$qmark", ATHROW)) } @Test @@ -109,7 +109,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - compileClasses(code) + compileToBytes(code) } @Test @@ -120,7 +120,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { """.stripMargin val c2 = "class C { def t = warmup.Warmup.filter[Any](x => false) }" val List(c, _, _) = compileClassesSeparately(List(c1, c2), extraArgs = compilerArgs) - assertInvoke(getSingleMethod(c, "t"), "warmup/Warmup$", "filter") + assertInvoke(getMethod(c, "t"), "warmup/Warmup$", "filter") } @Test @@ -134,7 +134,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - compileClasses(code) + compileToBytes(code) } @Test @@ -162,7 +162,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - compileClasses(code) + compileToBytes(code) } @Test @@ -178,7 +178,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - compileClasses(code) + compileToBytes(code) } @Test @@ -217,8 +217,8 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameSummary(getSingleMethod(c, "t"), List( + val c = compileClass(code) + assertSameSummary(getMethod(c, "t"), List( ALOAD /*1*/, INSTANCEOF /*Some*/, IFNE /*A*/, ALOAD /*0*/, "getInt", POP, -1 /*A*/, BIPUSH, IRETURN)) @@ -236,8 +236,8 @@ class OptimizedBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameSummary(getSingleMethod(c, "t"), List( + val c = compileClass(code) + assertSameSummary(getMethod(c, "t"), List( -1 /*A*/, ILOAD /*1*/, TABLESWITCH, -1, ALOAD, "pr", RETURN, -1, ALOAD, "pr", RETURN, @@ -261,8 +261,8 @@ class OptimizedBytecodeTest extends BytecodeTesting { """.stripMargin val cls = compileClassesSeparately(List(c1, c2), extraArgs = compilerArgs) - val c = cls.find(_.name == "C").get - assertSameSummary(getSingleMethod(c, "t"), List( + val c = findClass(cls, "C") + assertSameSummary(getMethod(c, "t"), List( GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW, // module load and null checks not yet eliminated -1, ICONST_1, GETSTATIC, IFNONNULL, ACONST_NULL, ATHROW, -1, ICONST_2, IADD, IRETURN)) @@ -299,11 +299,11 @@ class OptimizedBytecodeTest extends BytecodeTesting { | def f2b() = identity(wrapper2(5)) // not inlined |} """.stripMargin - val List(c) = compileClasses(code, allowMessage = _.msg.contains("exception handler declared in the inlined method")) - assertInvoke(getSingleMethod(c, "f1a"), "C", "C$$$anonfun$1") - assertInvoke(getSingleMethod(c, "f1b"), "C", "wrapper1") - assertInvoke(getSingleMethod(c, "f2a"), "C", "C$$$anonfun$3") - assertInvoke(getSingleMethod(c, "f2b"), "C", "wrapper2") + val c = compileClass(code, allowMessage = _.msg.contains("exception handler declared in the inlined method")) + assertInvoke(getMethod(c, "f1a"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "f1b"), "C", "wrapper1") + assertInvoke(getMethod(c, "f2a"), "C", "C$$$anonfun$3") + assertInvoke(getMethod(c, "f2b"), "C", "wrapper2") } @Test @@ -317,8 +317,8 @@ class OptimizedBytecodeTest extends BytecodeTesting { | def t = mbarray_apply_minibox(null, 0) |} """.stripMargin - val List(c) = compileClasses(code) - assertNoInvoke(getSingleMethod(c, "t")) + val c = compileClass(code) + assertNoInvoke(getMethod(c, "t")) } @Test @@ -336,7 +336,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { |class Listt """.stripMargin val List(c, nil, nilMod, listt) = compileClasses(code) - assertInvoke(getSingleMethod(c, "t"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "t"), "C", "C$$$anonfun$1") } @Test @@ -355,13 +355,13 @@ class OptimizedBytecodeTest extends BytecodeTesting { |} """.stripMargin val List(c, f) = compileClasses(code) - assertInvoke(getSingleMethod(c, "crash"), "C", "map") + assertInvoke(getMethod(c, "crash"), "C", "map") } @Test def optimiseEnablesNewOpt(): Unit = { val code = """class C { def t = (1 to 10) foreach println }""" - val List(c) = readAsmClasses(newCompiler(extraArgs = "-optimise -deprecation").compile(code, allowMessage = _.msg.contains("is deprecated"))) - assertInvoke(getSingleMethod(c, "t"), "C", "C$$$anonfun$1") // range-foreach inlined from classpath + val List(c) = readAsmClasses(newCompiler(extraArgs = "-optimise -deprecation").compileToBytes(code, allowMessage = _.msg.contains("is deprecated"))) + assertInvoke(getMethod(c, "t"), "C", "C$$$anonfun$1") // range-foreach inlined from classpath } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala index b538ae0bc603..c9a958ee4f4d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala @@ -26,7 +26,7 @@ class DefaultMethodTest extends BytecodeTesting { case _ => super.transform(tree) } } - val asmClasses: List[ClassNode] = readAsmClasses(compiler.compileTransformed(code, Nil, makeFooDefaultMethod.transform(_))) + val asmClasses: List[ClassNode] = compiler.compileClassesTransformed(code, Nil, makeFooDefaultMethod.transform(_)) val foo = asmClasses.head.methods.iterator.asScala.toList.last assertTrue("default method should not be abstract", (foo.access & Opcodes.ACC_ABSTRACT) == 0) assertTrue("default method body emitted", foo.instructions.size() > 0) diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index 65b4264ee9e3..7fdfb315779d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -17,7 +17,7 @@ class DirectCompileTest extends BytecodeTesting { @Test def testCompile(): Unit = { - val List(("C.class", bytes)) = compile( + val List(("C.class", bytes)) = compileToBytes( """class C { | def f = 1 |} @@ -45,21 +45,19 @@ class DirectCompileTest extends BytecodeTesting { """def f = 10 |def g = f """.stripMargin) - assertTrue(f.name == "f") - assertTrue(g.name == "g") - assertSameCode(instructionsFromMethod(f).dropNonOp, + assertSameCode(f.instructions.dropNonOp, List(IntOp(BIPUSH, 10), Op(IRETURN))) - assertSameCode(instructionsFromMethod(g).dropNonOp, + assertSameCode(g.instructions.dropNonOp, List(VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "f", "()I", itf = false), Op(IRETURN))) } @Test def testDropNonOpAliveLabels(): Unit = { // makes sure that dropNoOp doesn't drop labels that are being used - val List(f) = compileMethods("""def f(x: Int) = if (x == 0) "a" else "b"""") - assertSameCode(instructionsFromMethod(f).dropLinesFrames, List( + val is = compileInstructions("""def f(x: Int) = if (x == 0) "a" else "b"""") + assertSameCode(is.dropLinesFrames, List( Label(0), VarOp(ILOAD, 1), Op(ICONST_0), @@ -79,7 +77,7 @@ class DirectCompileTest extends BytecodeTesting { val codeA = "class A { def f = 1 }" val codeB = "class B extends A { def g = f }" val List(a, b) = compileClassesSeparately(List(codeA, codeB)) - val ins = getSingleMethod(b, "g").instructions + val ins = getInstructions(b, "g") assert(ins exists { case Invoke(_, "B", "f", _, _) => true case _ => false @@ -88,6 +86,6 @@ class DirectCompileTest extends BytecodeTesting { @Test def compileErroneous(): Unit = { - compileClasses("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch") + compileToBytes("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch") } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala index 22ced47a02f3..ac2aab01dc4b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndyLambdaTest.scala @@ -13,7 +13,7 @@ class IndyLambdaTest extends BytecodeTesting { @Test def boxingBridgeMethodUsedSelectively(): Unit = { def implMethodDescriptorFor(code: String): String = { - val method = compileMethods(s"""def f = $code """).find(_.name == "f").get + val method = compileAsmMethods(s"""def f = $code """).find(_.name == "f").get val x = method.instructions.iterator.asScala.toList x.flatMap { case insn : InvokeDynamicInsnNode => insn.bsmArgs.collect { case h : Handle => h.getDesc } @@ -46,17 +46,17 @@ class IndyLambdaTest extends BytecodeTesting { assertEquals("(I)I", implMethodDescriptorFor("(x: Int) => x")) // non-builtin sams are like specialized functions - compileClasses("class VC(private val i: Int) extends AnyVal; trait FunVC { def apply(a: VC): VC }") + compileToBytes("class VC(private val i: Int) extends AnyVal; trait FunVC { def apply(a: VC): VC }") assertEquals("(I)I", implMethodDescriptorFor("((x: VC) => x): FunVC")) - compileClasses("trait Fun1[T, U] { def apply(a: T): U }") + compileToBytes("trait Fun1[T, U] { def apply(a: T): U }") assertEquals(s"($obj)$str", implMethodDescriptorFor("(x => x.toString): Fun1[Int, String]")) assertEquals(s"($obj)$obj", implMethodDescriptorFor("(x => println(x)): Fun1[Int, Unit]")) assertEquals(s"($obj)$str", implMethodDescriptorFor("((x: VC) => \"\") : Fun1[VC, String]")) assertEquals(s"($str)$obj", implMethodDescriptorFor("((x: String) => new VC(0)) : Fun1[String, VC]")) - compileClasses("trait Coll[A, Repr] extends Any") - compileClasses("final class ofInt(val repr: Array[Int]) extends AnyVal with Coll[Int, Array[Int]]") + compileToBytes("trait Coll[A, Repr] extends Any") + compileToBytes("final class ofInt(val repr: Array[Int]) extends AnyVal with Coll[Int, Array[Int]]") assertEquals(s"([I)$obj", implMethodDescriptorFor("((xs: Array[Int]) => new ofInt(xs)): Array[Int] => Coll[Int, Array[Int]]")) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala index d7c1f191d0f9..2bcbcc870cf5 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala @@ -39,13 +39,13 @@ class IndySammyTest extends BytecodeTesting { def test(from: String, to: String, arg: String, body: String => String = x => x) (expectedSig: String, lamBody: List[Instruction], appArgs: List[Instruction], ret: Instruction) (allowMessage: StoreReporter#Info => Boolean = _ => false) = { - val cls = compileClasses(s"${classPrologue(from, to)}") - val methodNodes = compileMethods(lamDef(from, to, body) +";"+ appDef(arg), allowMessage) + val List(funClass, vcClass, vcCompanion) = compileClasses(s"${classPrologue(from, to)}") + val c = compileClass(s"class C { ${lamDef(from, to, body)}; ${appDef(arg)} }", allowMessage = allowMessage) - val applySig = cls.head.methods.get(0).desc - val anonfun = methodNodes.find(_.name contains "$anonfun$").map(convertMethod).get - val lamInsn = methodNodes.find(_.name == "lam").map(instructionsFromMethod).get.dropNonOp - val applyInvoke = methodNodes.find(_.name == "app").map(convertMethod).get + val applySig = getAsmMethod(funClass, "apply").desc + val anonfun = getMethod(c, "C$$$anonfun$1") + val lamInsn = getInstructions(c, "lam").dropNonOp + val applyInvoke = getMethod(c, "app") assertEquals(expectedSig, applySig) assert(lamInsn.length == 2 && lamInsn.head.isInstanceOf[InvokeDynamic], lamInsn) @@ -140,7 +140,7 @@ class IndySammyTest extends BytecodeTesting { // Tests ThisReferringMethodsTraverser @Test def testStaticIfNoThisReference: Unit = { - val methodNodes = compileMethods("def foo = () => () => () => 42") + val methodNodes = compileAsmMethods("def foo = () => () => () => 42") methodNodes.forall(m => !m.name.contains("anonfun") || (m.access & ACC_STATIC) == ACC_STATIC) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala index f231df8af060..af2c8f9ce008 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala @@ -50,9 +50,9 @@ class StringConcatTest extends BytecodeTesting { | chrs: Array[Char]) = this + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) - def invokeNameDesc(m: String): List[String] = getSingleMethod(c, m).instructions collect { + def invokeNameDesc(m: String): List[String] = getInstructions(c, m) collect { case Invoke(_, _, name, desc, _) => name + desc } assertEquals(invokeNameDesc("t1"), List( diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index 358a46102605..b0a86dfd28b3 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -24,7 +24,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(global.genBCode.bTypes)) def testNullness(analyzer: AsmAnalyzer[NullnessValue], method: MethodNode, query: String, index: Int, nullness: NullnessValue): Unit = { - for (i <- findInstr(method, query)) { + for (i <- findInstrs(method, query)) { val r = analyzer.frameAt(i).getValue(index) assertTrue(s"Expected: $nullness, found: $r. At instr ${textify(i)}", nullness == r) } @@ -50,7 +50,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { @Test def showNullnessFramesTest(): Unit = { - val List(m) = compileMethods("def f = this.toString") + val m = compileAsmMethod("def f = this.toString") // NOTE: the frame for an instruction represents the state *before* executing that instr. // So in the frame for `ALOAD 0`, the stack is still empty. @@ -68,14 +68,14 @@ class NullnessAnalyzerTest extends BytecodeTesting { @Test def thisNonNull(): Unit = { - val List(m) = compileMethods("def f = this.toString") + val m = compileAsmMethod("def f = this.toString") val a = newNullnessAnalyzer(m) testNullness(a, m, "ALOAD 0", 0, NotNullValue) } @Test def instanceMethodCall(): Unit = { - val List(m) = compileMethods("def f(a: String) = a.trim") + val m = compileAsmMethod("def f(a: String) = a.trim") val a = newNullnessAnalyzer(m) testNullness(a, m, "INVOKEVIRTUAL java/lang/String.trim", 1, UnknownValue1) testNullness(a, m, "ARETURN", 1, NotNullValue) @@ -83,7 +83,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { @Test def constructorCall(): Unit = { - val List(m) = compileMethods("def f = { val a = new Object; a.toString }") + val m = compileAsmMethod("def f = { val a = new Object; a.toString }") val a = newNullnessAnalyzer(m) // for reference, the output of showAllNullnessFrames(a, m) - note that the frame represents the state *before* executing the instr. @@ -108,7 +108,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { @Test def explicitNull(): Unit = { - val List(m) = compileMethods("def f = { var a: Object = null; a }") + val m = compileAsmMethod("def f = { var a: Object = null; a }") val a = newNullnessAnalyzer(m) for ((insn, index, nullness) <- List( ("+ACONST_NULL", 2, NullValue), @@ -119,14 +119,14 @@ class NullnessAnalyzerTest extends BytecodeTesting { @Test def stringLiteralsNotNull(): Unit = { - val List(m) = compileMethods("""def f = { val a = "hi"; a.trim }""") + val m = compileAsmMethod("""def f = { val a = "hi"; a.trim }""") val a = newNullnessAnalyzer(m) testNullness(a, m, "+ASTORE 1", 1, NotNullValue) } @Test def newArraynotNull() { - val List(m) = compileMethods("def f = { val a = new Array[Int](2); a(0) }") + val m = compileAsmMethod("def f = { val a = new Array[Int](2); a(0) }") val a = newNullnessAnalyzer(m) testNullness(a, m, "+NEWARRAY T_INT", 2, NotNullValue) // new array on stack testNullness(a, m, "+ASTORE 1", 1, NotNullValue) // local var (a) @@ -144,7 +144,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { | a.toString |} """.stripMargin - val List(m) = compileMethods(code) + val m = compileAsmMethod(code) val a = newNullnessAnalyzer(m) val toSt = "+INVOKEVIRTUAL java/lang/Object.toString" testNullness(a, m, toSt, 3, UnknownValue1) @@ -170,7 +170,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { | // d is null here, assinged in both branches. |} """.stripMargin - val List(m) = compileMethods(code) + val m = compileAsmMethod(code) val a = newNullnessAnalyzer(m) val trim = "INVOKEVIRTUAL java/lang/String.trim" @@ -206,7 +206,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { | a.asInstanceOf[String].trim // the stack value (LOAD of local a) is still not-null after the CHECKCAST |} """.stripMargin - val List(m) = compileMethods(code) + val m = compileAsmMethod(code) val a = newNullnessAnalyzer(m) val instof = "+INSTANCEOF" diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala index be10370312b5..fc2678523764 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala @@ -49,9 +49,9 @@ class ProdConsAnalyzerTest extends BytecodeTesting { @Test def parameters(): Unit = { - val List(m) = compileMethods("def f = this.toString") + val m = compileAsmMethod("def f = this.toString") val a = new ProdConsAnalyzer(m, "C") - val call = findInstr(m, "INVOKEVIRTUAL").head + val call = findInstr(m, "INVOKEVIRTUAL") testSingleInsn(a.producersForValueAt(call, 1), "ALOAD 0") // producer of stack value testSingleInsn(a.producersForInputsOf(call), "ALOAD 0") @@ -84,55 +84,55 @@ class ProdConsAnalyzerTest extends BytecodeTesting { m.maxStack = 1 val a = new ProdConsAnalyzer(m, "C") - val ifne = findInstr(m, "IFNE").head + val ifne = findInstr(m, "IFNE") testSingleInsn(a.producersForValueAt(ifne, 1), "ParameterProducer") - val ret = findInstr(m, "IRETURN").head + val ret = findInstr(m, "IRETURN") testMultiInsns(a.producersForValueAt(ret, 1), List("ParameterProducer", "ISTORE 1")) } @Test def branching(): Unit = { - val List(m) = compileMethods("def f(x: Int) = { var a = x; if (a == 0) a = 12; a }") + val m = compileAsmMethod("def f(x: Int) = { var a = x; if (a == 0) a = 12; a }") val a = new ProdConsAnalyzer(m, "C") - val List(ret) = findInstr(m, "IRETURN") + val ret = findInstr(m, "IRETURN") testMultiInsns(a.producersForValueAt(ret, 2), List("ISTORE 2", "ISTORE 2")) testMultiInsns(a.initialProducersForValueAt(ret, 2), List("BIPUSH 12", "ParameterProducer")) - val List(bipush) = findInstr(m, "BIPUSH 12") + val bipush = findInstr(m, "BIPUSH 12") testSingleInsn(a.consumersOfOutputsFrom(bipush), "ISTORE 2") testSingleInsn(a.ultimateConsumersOfValueAt(bipush.getNext, 3), "IRETURN") } @Test def checkCast(): Unit = { - val List(m) = compileMethods("def f(o: Object) = o.asInstanceOf[String]") + val m = compileAsmMethod("def f(o: Object) = o.asInstanceOf[String]") val a = new ProdConsAnalyzer(m, "C") - assert(findInstr(m, "CHECKCAST java/lang/String").length == 1) + assert(findInstrs(m, "CHECKCAST java/lang/String").length == 1) - val List(ret) = findInstr(m, "ARETURN") + val ret = findInstr(m, "ARETURN") testSingleInsn(a.initialProducersForInputsOf(ret), "ParameterProducer(1)") } @Test def instanceOf(): Unit = { - val List(m) = compileMethods("def f(o: Object) = o.isInstanceOf[String]") + val m = compileAsmMethod("def f(o: Object) = o.isInstanceOf[String]") val a = new ProdConsAnalyzer(m, "C") - assert(findInstr(m, "INSTANCEOF java/lang/String").length == 1) + assert(findInstrs(m, "INSTANCEOF java/lang/String").length == 1) - val List(ret) = findInstr(m, "IRETURN") + val ret = findInstr(m, "IRETURN") testSingleInsn(a.initialProducersForInputsOf(ret), "INSTANCEOF") } @Test def unInitLocal(): Unit = { - val List(m) = compileMethods("def f(b: Boolean) = { if (b) { var a = 0; println(a) }; 1 }") + val m = compileAsmMethod("def f(b: Boolean) = { if (b) { var a = 0; println(a) }; 1 }") val a = new ProdConsAnalyzer(m, "C") - val List(store) = findInstr(m, "ISTORE") - val List(call) = findInstr(m, "INVOKEVIRTUAL") - val List(ret) = findInstr(m, "IRETURN") + val store = findInstr(m, "ISTORE") + val call = findInstr(m, "INVOKEVIRTUAL") + val ret = findInstr(m, "IRETURN") testSingleInsn(a.producersForValueAt(store, 2), "UninitializedLocalProducer(2)") testSingleInsn(a.producersForValueAt(call, 2), "ISTORE") @@ -141,11 +141,11 @@ class ProdConsAnalyzerTest extends BytecodeTesting { @Test def dupCopying(): Unit = { - val List(m) = compileMethods("def f = new Object") + val m = compileAsmMethod("def f = new Object") val a = new ProdConsAnalyzer(m, "C") - val List(newO) = findInstr(m, "NEW") - val List(constr) = findInstr(m, "INVOKESPECIAL") + val newO = findInstr(m, "NEW") + val constr = findInstr(m, "INVOKESPECIAL") testSingleInsn(a.producersForInputsOf(constr), "DUP") testSingleInsn(a.initialProducersForInputsOf(constr), "NEW") @@ -170,11 +170,11 @@ class ProdConsAnalyzerTest extends BytecodeTesting { m.maxStack = 4 val a = new ProdConsAnalyzer(m, "C") - val List(dup2) = findInstr(m, "DUP2") - val List(add) = findInstr(m, "IADD") - val List(swap) = findInstr(m, "SWAP") - val List(store) = findInstr(m, "ISTORE") - val List(ret) = findInstr(m, "IRETURN") + val dup2 = findInstr(m, "DUP2") + val add = findInstr(m, "IADD") + val swap = findInstr(m, "SWAP") + val store = findInstr(m, "ISTORE") + val ret = findInstr(m, "IRETURN") testMultiInsns(a.producersForInputsOf(dup2), List("ILOAD", "ILOAD")) testSingleInsn(a.consumersOfValueAt(dup2.getNext, 4), "IADD") @@ -205,9 +205,9 @@ class ProdConsAnalyzerTest extends BytecodeTesting { m.maxStack = 1 val a = new ProdConsAnalyzer(m, "C") - val List(inc) = findInstr(m, "IINC") - val List(load) = findInstr(m, "ILOAD") - val List(ret) = findInstr(m, "IRETURN") + val inc = findInstr(m, "IINC") + val load = findInstr(m, "ILOAD") + val ret = findInstr(m, "IRETURN") testSingleInsn(a.producersForInputsOf(inc), "ParameterProducer(1)") testSingleInsn(a.consumersOfOutputsFrom(inc), "ILOAD") @@ -223,12 +223,12 @@ class ProdConsAnalyzerTest extends BytecodeTesting { @Test def copyingInsns(): Unit = { - val List(m) = compileMethods("def f = 0l.asInstanceOf[Int]") + val m = compileAsmMethod("def f = 0l.asInstanceOf[Int]") val a = new ProdConsAnalyzer(m, "C") - val List(cnst) = findInstr(m, "LCONST_0") - val List(l2i) = findInstr(m, "L2I") // l2i is not a copying instruction - val List(ret) = findInstr(m, "IRETURN") + val cnst = findInstr(m, "LCONST_0") + val l2i = findInstr(m, "L2I") // l2i is not a copying instruction + val ret = findInstr(m, "IRETURN") testSingleInsn(a.consumersOfOutputsFrom(cnst), "L2I") testSingleInsn(a.ultimateConsumersOfOutputsFrom(cnst), "L2I") @@ -264,10 +264,10 @@ class ProdConsAnalyzerTest extends BytecodeTesting { m.maxStack = 2 val a = new ProdConsAnalyzer(m, "C") - val List(iadd) = findInstr(m, "IADD") + val iadd = findInstr(m, "IADD") val firstLoad = iadd.getPrevious.getPrevious assert(firstLoad.getOpcode == ILOAD) - val secondLoad = findInstr(m, "ISTORE").head.getPrevious + val secondLoad = findInstr(m, "ISTORE").getPrevious assert(secondLoad.getOpcode == ILOAD) testSingleInsn(a.producersForValueAt(iadd, 2), "ILOAD") diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala index a5fb1e7d17d6..025248ac2880 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala @@ -32,17 +32,17 @@ class AnalyzerTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) val a = new AliasingAnalyzer(new BasicInterpreter) - val f = findAsmMethod(c, "f") + val f = getAsmMethod(c, "f") a.analyze("C", f) - val List(_, i2l) = findInstr(f, "I2L") + val List(_, i2l) = findInstrs(f, "I2L") val aliasesAtI2l = a.frameAt(i2l, f).asInstanceOf[AliasingFrame[_]].aliases assertEquals(aliasesAtI2l(1).iterator.toList, List(1, 8, 9)) // a, e and stack top assertEquals(aliasesAtI2l(4).iterator.toList, List(4, 6)) - val List(add) = findInstr(f, "LADD") + val add = findInstr(f, "LADD") val aliasesAtAdd = a.frameAt(add, f).asInstanceOf[AliasingFrame[_]].aliases assertEquals(aliasesAtAdd(1).iterator.toList, List(1, 8)) // after i2l the value on the stack is no longer an alias assertEquals(aliasesAtAdd(4).iterator.toList, List(4, 6, 10)) // c, d and stack top diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 900608837f42..630416a92582 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -104,10 +104,10 @@ class CallGraphTest extends BytecodeTesting { val List(cCls, cMod, dCls, testCls) = compile(code, checkMsg) assert(msgCount == 6, msgCount) - val List(cf1, cf2, cf3, cf4, cf5, cf6, cf7) = findAsmMethods(cCls, _.startsWith("f")) - val List(df1, df3) = findAsmMethods(dCls, _.startsWith("f")) - val g1 = findAsmMethod(cMod, "g1") - val List(t1, t2) = findAsmMethods(testCls, _.startsWith("t")) + val List(cf1, cf2, cf3, cf4, cf5, cf6, cf7) = getAsmMethods(cCls, _.startsWith("f")) + val List(df1, df3) = getAsmMethods(dCls, _.startsWith("f")) + val g1 = getAsmMethod(cMod, "g1") + val List(t1, t2) = getAsmMethods(testCls, _.startsWith("t")) val List(cf1Call, cf2Call, cf3Call, cf4Call, cf5Call, cf6Call, cf7Call, cg1Call) = callsInMethod(t1) val List(df1Call, df2Call, df3Call, df4Call, df5Call, df6Call, df7Call, dg1Call) = callsInMethod(t2) @@ -143,7 +143,7 @@ class CallGraphTest extends BytecodeTesting { |} """.stripMargin val List(c) = compile(code) - val m = findAsmMethod(c, "m") + val m = getAsmMethod(c, "m") val List(fn) = callsInMethod(m) val forNameMeth = byteCodeRepository.methodNode("java/lang/Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;").get._1 val classTp = classBTypeFromInternalName("java/lang/Class") diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala index ddd95ddc0260..218b02f82259 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala @@ -26,9 +26,9 @@ class ClosureOptimizerTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) - val t = findAsmMethod(c, "t") - val List(bodyCall) = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Nothing$") + val c = compileClass(code) + val t = getAsmMethod(c, "t") + val bodyCall = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Nothing$") assert(bodyCall.getNext.getOpcode == ATHROW) } @@ -42,9 +42,9 @@ class ClosureOptimizerTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) - val t = findAsmMethod(c, "t") - val List(bodyCall) = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Null$") + val c = compileClass(code) + val t = getAsmMethod(c, "t") + val bodyCall = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Null$") assert(bodyCall.getNext.getOpcode == POP) assert(bodyCall.getNext.getNext.getOpcode == ACONST_NULL) } @@ -59,8 +59,8 @@ class ClosureOptimizerTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List(VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "scala/collection/immutable/List", "head", "()Ljava/lang/Object;", false), TypeOp(CHECKCAST, "java/lang/String"), Invoke(INVOKESTATIC, "C", "C$$$anonfun$1", "(Ljava/lang/String;)Ljava/lang/String;", false), Op(ARETURN))) @@ -80,7 +80,7 @@ class ClosureOptimizerTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameSummary(getSingleMethod(c, "t"), List(NEW, DUP, LDC, "", ATHROW)) + val c = compileClass(code) + assertSameSummary(getMethod(c, "t"), List(NEW, DUP, LDC, "", ATHROW)) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala index 50e3af6ee5ef..c3748a05bd46 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala @@ -56,8 +56,8 @@ class CompactLocalVariablesTest extends ClearAfterClass { |} |""".stripMargin - val List(noCompact) = noCompactVarsCompiler.compileMethods(code) - val List(withCompact) = methodOptCompiler.compileMethods(code) + val noCompact = noCompactVarsCompiler.compileAsmMethod(code) + val withCompact = methodOptCompiler.compileAsmMethod(code) // code is the same, except for local var indices assertTrue(noCompact.instructions.size == withCompact.instructions.size) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala index 9fb4aa1658d4..3324058cb762 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala @@ -60,8 +60,8 @@ class EmptyExceptionHandlersTest extends BytecodeTesting { def eliminateUnreachableHandler(): Unit = { val code = "def f: Unit = try { } catch { case _: Exception => println(0) }; println(1)" - assertTrue(noOptCompiler.singleMethod(code).handlers.length == 1) - val optMethod = dceCompiler.singleMethod(code) + assertTrue(noOptCompiler.compileMethod(code).handlers.length == 1) + val optMethod = dceCompiler.compileMethod(code) assertTrue(optMethod.handlers.isEmpty) val code2 = @@ -73,7 +73,7 @@ class EmptyExceptionHandlersTest extends BytecodeTesting { | println(2) |}""".stripMargin - assertTrue(dceCompiler.singleMethod(code2).handlers.isEmpty) + assertTrue(dceCompiler.compileMethod(code2).handlers.isEmpty) } @Test @@ -85,6 +85,6 @@ class EmptyExceptionHandlersTest extends BytecodeTesting { | catch { case _: Exception => 2 } |}""".stripMargin - assertTrue(dceCompiler.singleMethod(code).handlers.length == 1) + assertTrue(dceCompiler.compileMethod(code).handlers.length == 1) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 876c47a84ef1..f0913f36318d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -38,7 +38,7 @@ class InlineWarningTest extends BytecodeTesting { "C::m1()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", "T::m2()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", "D::m2()I is annotated @inline but cannot be inlined: the method is not final and may be overridden") - compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)}) + compileToBytes(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)}) assert(count == 4, count) } @@ -53,7 +53,7 @@ class InlineWarningTest extends BytecodeTesting { """.stripMargin var c = 0 - compile(code, allowMessage = i => {c += 1; i.msg contains "operand stack at the callsite in C::t1()V contains more values"}) + compileToBytes(code, allowMessage = i => {c += 1; i.msg contains "operand stack at the callsite in C::t1()V contains more values"}) assert(c == 1, c) } @@ -83,14 +83,14 @@ class InlineWarningTest extends BytecodeTesting { |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin) var c = 0 - val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)}) + val List(b) = compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)}) assert(c == 1, c) // no warnings here - newCompiler(extraArgs = s"$optCp -Yopt-warnings:none").compile(scalaCode, List((javaCode, "A.java"))) + newCompiler(extraArgs = s"$optCp -Yopt-warnings:none").compileToBytes(scalaCode, List((javaCode, "A.java"))) c = 0 - newCompiler(extraArgs = s"$optCp -Yopt-warnings:no-inline-mixed").compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) + newCompiler(extraArgs = s"$optCp -Yopt-warnings:no-inline-mixed").compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) assert(c == 2, c) } @@ -117,7 +117,7 @@ class InlineWarningTest extends BytecodeTesting { |that would cause an IllegalAccessError when inlined into class N""".stripMargin var c = 0 - compile(code, allowMessage = i => { c += 1; i.msg contains warn }) + compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn }) assert(c == 1, c) } @@ -136,7 +136,7 @@ class InlineWarningTest extends BytecodeTesting { | def t(a: M) = a.f(x => x + 1) |} """.stripMargin - compile(code, allowMessage = _ => false) // no warnings allowed + compileToBytes(code, allowMessage = _ => false) // no warnings allowed val warn = """M::f(Lscala/Function1;)I could not be inlined: @@ -144,7 +144,7 @@ class InlineWarningTest extends BytecodeTesting { |that would cause an IllegalAccessError when inlined into class N""".stripMargin var c = 0 - compilerWarnAll.compile(code, allowMessage = i => { c += 1; i.msg contains warn }) + compilerWarnAll.compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn }) assert(c == 1, c) } @@ -165,7 +165,7 @@ class InlineWarningTest extends BytecodeTesting { |does not have the same strictfp mode as the callee C::f()I.""".stripMargin var c = 0 - compile(code, allowMessage = i => { c += 1; i.msg contains warn }) + compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn }) assert(c == 1, c) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala index 8a44f12045c1..e7c3bab62f6b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala @@ -37,9 +37,9 @@ class InlinerSeparateCompilationTest { val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden" val List(c, o, oMod, t) = compileClassesSeparately(List(codeA, codeB), args + " -Yopt-warnings", _.msg contains warn) - assertInvoke(getSingleMethod(c, "t1"), "T", "f") - assertNoInvoke(getSingleMethod(c, "t2")) - assertNoInvoke(getSingleMethod(c, "t3")) + assertInvoke(getMethod(c, "t1"), "T", "f") + assertNoInvoke(getMethod(c, "t2")) + assertNoInvoke(getMethod(c, "t3")) } @Test @@ -57,7 +57,7 @@ class InlinerSeparateCompilationTest { """.stripMargin val List(c, t) = compileClassesSeparately(List(codeA, codeB), args) - assertNoInvoke(getSingleMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t1")) } @Test @@ -80,7 +80,7 @@ class InlinerSeparateCompilationTest { """.stripMargin val List(c, t, u) = compileClassesSeparately(List(codeA, codeB), args) - for (m <- List("t1", "t2", "t3")) assertNoInvoke(getSingleMethod(c, m)) + for (m <- List("t1", "t2", "t3")) assertNoInvoke(getMethod(c, m)) } @Test @@ -101,7 +101,7 @@ class InlinerSeparateCompilationTest { """.stripMargin val List(a, t) = compileClassesSeparately(List(codeA, assembly), args) - assertNoInvoke(getSingleMethod(t, "f")) - assertNoInvoke(getSingleMethod(a, "n")) + assertNoInvoke(getMethod(t, "f")) + assertNoInvoke(getMethod(a, "n")) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 4db7695fddd3..24e889cf186e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -39,7 +39,7 @@ class InlinerTest extends BytecodeTesting { def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { notPerRun.foreach(_.clear()) - compileClasses(scalaCode, javaCode, allowMessage) + compileToBytes(scalaCode, javaCode, allowMessage) // Use the class nodes stored in the byteCodeRepository. The ones returned by compileClasses are not the same, // these are created new from the classfile byte array. They are completely separate instances which cannot // be used to look up methods / callsites in the callGraph hash maps for example. @@ -60,7 +60,7 @@ class InlinerTest extends BytecodeTesting { def gMethAndFCallsite(code: String, mod: ClassNode => Unit = _ => ()) = { val List(c) = compile(code) mod(c) - val gMethod = findAsmMethod(c, "g") + val gMethod = getAsmMethod(c, "g") val fCall = getCallsite(gMethod, "f") (gMethod, fCall) } @@ -148,7 +148,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val can = canInlineTest(code, cls => { - val f = cls.methods.asScala.find(_.name == "f").get + val f = getAsmMethod(cls, "f") f.access |= ACC_SYNCHRONIZED }) assert(can.nonEmpty && can.get.isInstanceOf[SynchronizedMethod], can) @@ -197,7 +197,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, d) = compile(code) - val hMeth = findAsmMethod(d, "h") + val hMeth = getAsmMethod(d, "h") val gCall = getCallsite(hMeth, "g") val r = inliner.canInlineBody(gCall) assert(r.nonEmpty && r.get.isInstanceOf[IllegalAccessInstruction], r) @@ -214,7 +214,7 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(cCls) = compile(code) - val instructions = getSingleMethod(cCls, "test").instructions + val instructions = getInstructions(cCls, "test") assert(instructions.contains(Op(ICONST_0)), instructions.stringLines) assert(!instructions.contains(Op(ICONST_1)), instructions) } @@ -280,7 +280,7 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, _, _) = compile(code) - val ins = getSingleMethod(c, "f").instructions + val ins = getInstructions(c, "f") val invokeSysArraycopy = Invoke(INVOKESTATIC, "java/lang/System", "arraycopy", "(Ljava/lang/Object;ILjava/lang/Object;II)V", false) assert(ins contains invokeSysArraycopy, ins.stringLines) } @@ -312,7 +312,7 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, t) = compile(code) - assertNoInvoke(getSingleMethod(c, "g")) + assertNoInvoke(getMethod(c, "g")) } @Test @@ -325,7 +325,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) // no more invoke, f is inlined - assertNoInvoke(getSingleMethod(c, "g")) + assertNoInvoke(getMethod(c, "g")) } @Test @@ -337,7 +337,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - val fMeth = findAsmMethod(c, "f") + val fMeth = getAsmMethod(c, "f") val call = getCallsite(fMeth, "lowestOneBit") val warning = inliner.canInlineBody(call) @@ -376,7 +376,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin // use a compiler without local optimizations (cleanups) - val List(c) = inlineOnlyCompiler.compileClasses(code) + val c = inlineOnlyCompiler.compileClass(code) val ms @ List(f1, f2, g1, g2) = c.methods.asScala.filter(_.name.length == 2).toList // stack height at callsite of f1 is 1, so max of g1 after inlining is max of f1 + 1 @@ -421,7 +421,7 @@ class InlinerTest extends BytecodeTesting { var c = 0 val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn}) assert(c == 1, c) - val ins = getSingleMethod(b, "g").instructions + val ins = getInstructions(b, "g") val invokeFlop = Invoke(INVOKEVIRTUAL, "B", "flop", "()I", false) assert(ins contains invokeFlop, ins.stringLines) } @@ -441,8 +441,8 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, t) = compile(code) // both are just `return 1`, no more calls - assertNoInvoke(getSingleMethod(c, "t1")) - assertNoInvoke(getSingleMethod(c, "t2")) + assertNoInvoke(getMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t2")) } @Test @@ -460,8 +460,8 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, t, u) = compile(code) - assertNoInvoke(getSingleMethod(c, "t1")) - assertNoInvoke(getSingleMethod(c, "t2")) + assertNoInvoke(getMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t2")) } @Test @@ -481,8 +481,8 @@ class InlinerTest extends BytecodeTesting { var count = 0 val List(c, t) = compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)}) assert(count == 2, count) - assertInvoke(getSingleMethod(c, "t1"), "T", "f") - assertInvoke(getSingleMethod(c, "t2"), "C", "f") + assertInvoke(getMethod(c, "t1"), "T", "f") + assertInvoke(getMethod(c, "t2"), "C", "f") } @Test @@ -496,7 +496,7 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, t) = compile(code) - assertNoInvoke(getSingleMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t1")) } @Test @@ -520,11 +520,11 @@ class InlinerTest extends BytecodeTesting { val List(c, oMirror, oModule, t) = compile(code, allowMessage = i => {count += 1; i.msg contains warn}) assert(count == 1, count) - assertNoInvoke(getSingleMethod(t, "f")) + assertNoInvoke(getMethod(t, "f")) - assertNoInvoke(getSingleMethod(c, "t1")) - assertNoInvoke(getSingleMethod(c, "t2")) - assertInvoke(getSingleMethod(c, "t3"), "T", "f") + assertNoInvoke(getMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t2")) + assertInvoke(getMethod(c, "t3"), "T", "f") } @Test @@ -546,12 +546,12 @@ class InlinerTest extends BytecodeTesting { val List(assembly, c, t) = compile(code) - assertNoInvoke(getSingleMethod(t, "f")) + assertNoInvoke(getMethod(t, "f")) - assertNoInvoke(getSingleMethod(assembly, "n")) + assertNoInvoke(getMethod(assembly, "n")) - assertNoInvoke(getSingleMethod(c, "t1")) - assertNoInvoke(getSingleMethod(c, "t2")) + assertNoInvoke(getMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t2")) } @Test @@ -624,20 +624,20 @@ class InlinerTest extends BytecodeTesting { val List(ca, cb, t1, t2a, t2b) = compile(code, allowMessage = i => {count += 1; i.msg contains warning}) assert(count == 4, count) // see comments, f is not inlined 4 times - assertNoInvoke(getSingleMethod(t2a, "g2a")) - assertInvoke(getSingleMethod(t2b, "g2b"), "T1", "f") + assertNoInvoke(getMethod(t2a, "g2a")) + assertInvoke(getMethod(t2b, "g2b"), "T1", "f") - assertInvoke(getSingleMethod(ca, "m1a"), "T1", "f") - assertNoInvoke(getSingleMethod(ca, "m2a")) // no invoke, see comment on def g2a - assertNoInvoke(getSingleMethod(ca, "m3a")) - assertInvoke(getSingleMethod(ca, "m4a"), "T1", "f") - assertNoInvoke(getSingleMethod(ca, "m5a")) + assertInvoke(getMethod(ca, "m1a"), "T1", "f") + assertNoInvoke(getMethod(ca, "m2a")) // no invoke, see comment on def g2a + assertNoInvoke(getMethod(ca, "m3a")) + assertInvoke(getMethod(ca, "m4a"), "T1", "f") + assertNoInvoke(getMethod(ca, "m5a")) - assertInvoke(getSingleMethod(cb, "m1b"), "T1", "f") - assertInvoke(getSingleMethod(cb, "m2b"), "T1", "f") // invoke, see comment on def g2b - assertNoInvoke(getSingleMethod(cb, "m3b")) - assertInvoke(getSingleMethod(cb, "m4b"), "T1", "f") - assertNoInvoke(getSingleMethod(cb, "m5b")) + assertInvoke(getMethod(cb, "m1b"), "T1", "f") + assertInvoke(getMethod(cb, "m2b"), "T1", "f") // invoke, see comment on def g2b + assertNoInvoke(getMethod(cb, "m3b")) + assertInvoke(getMethod(cb, "m4b"), "T1", "f") + assertNoInvoke(getMethod(cb, "m5b")) } @Test @@ -654,7 +654,7 @@ class InlinerTest extends BytecodeTesting { |} // so d.f can be resolved statically. same for E.f """.stripMargin val List(c, d, e, eModule, t) = compile(code) - assertNoInvoke(getSingleMethod(t, "t1")) + assertNoInvoke(getMethod(t, "t1")) } @Test @@ -669,8 +669,8 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, d, t) = compile(code) - assertNoInvoke(getSingleMethod(d, "m")) - assertNoInvoke(getSingleMethod(c, "m")) + assertNoInvoke(getMethod(d, "m")) + assertNoInvoke(getMethod(c, "m")) } @Test @@ -684,8 +684,8 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, t) = compile(code) - val t1 = getSingleMethod(t, "t1") - val t2 = getSingleMethod(t, "t2") + val t1 = getMethod(t, "t1") + val t2 = getMethod(t, "t2") val cast = TypeOp(CHECKCAST, "C") Set(t1, t2).foreach(m => assert(m.instructions.contains(cast), m.instructions)) } @@ -765,27 +765,27 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, t, u) = compile(code, allowMessage = _.msg contains "i()I is annotated @inline but cannot be inlined") - val m1 = getSingleMethod(c, "m1") + val m1 = getMethod(c, "m1") assertInvoke(m1, "T", "a") assertInvoke(m1, "T", "b") assertInvoke(m1, "T", "c") - assertNoInvoke(getSingleMethod(c, "m2")) + assertNoInvoke(getMethod(c, "m2")) - val m3 = getSingleMethod(c, "m3") + val m3 = getMethod(c, "m3") assertInvoke(m3, "T", "f") assertInvoke(m3, "T", "g") assertInvoke(m3, "T", "h") assertInvoke(m3, "T", "i") - val m4 = getSingleMethod(c, "m4") + val m4 = getMethod(c, "m4") assertInvoke(m4, "U", "a") assertInvoke(m4, "U", "b") assertInvoke(m4, "U", "c") - assertNoInvoke(getSingleMethod(c, "m5")) + assertNoInvoke(getMethod(c, "m5")) - val m6 = getSingleMethod(c, "m6") + val m6 = getMethod(c, "m6") assertInvoke(m6, "U", "f") assertInvoke(m6, "U", "g") assertInvoke(m6, "U", "h") @@ -869,15 +869,15 @@ class InlinerTest extends BytecodeTesting { val List(a, b, t) = compile(code, allowMessage = i => {c += 1; i.msg contains warn}) assert(c == 1, c) - assertInvoke(getSingleMethod(b, "t1"), "Aa", "f1") - assertInvoke(getSingleMethod(b, "t2"), "B", "B$$f2m") - assertInvoke(getSingleMethod(b, "t3"), "B", "") - assertInvoke(getSingleMethod(b, "t4"), "B", "") + assertInvoke(getMethod(b, "t1"), "Aa", "f1") + assertInvoke(getMethod(b, "t2"), "B", "B$$f2m") + assertInvoke(getMethod(b, "t3"), "B", "") + assertInvoke(getMethod(b, "t4"), "B", "") - assertInvoke(getSingleMethod(t, "t1"), "B", "f1") - assertInvoke(getSingleMethod(t, "t2"), "B", "B$$f2m") - assertInvoke(getSingleMethod(t, "t3"), "B", "") - assertInvoke(getSingleMethod(t, "t4"), "B", "") + assertInvoke(getMethod(t, "t1"), "B", "f1") + assertInvoke(getMethod(t, "t2"), "B", "B$$f2m") + assertInvoke(getMethod(t, "t3"), "B", "") + assertInvoke(getMethod(t, "t4"), "B", "") } @Test @@ -887,8 +887,8 @@ class InlinerTest extends BytecodeTesting { | def t = System.arraycopy(null, 0, null, 0, 0) |} """.stripMargin - val List(c) = newCompiler(extraArgs = compilerArgs + " -Yopt-inline-heuristics:everything").compileClasses(code) - assertInvoke(getSingleMethod(c, "t"), "java/lang/System", "arraycopy") + val c = newCompiler(extraArgs = compilerArgs + " -Yopt-inline-heuristics:everything").compileClass(code) + assertInvoke(getMethod(c, "t"), "java/lang/System", "arraycopy") } @Test @@ -902,7 +902,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - assertInvoke(getSingleMethod(c, "t"), "java/lang/Error", "") + assertInvoke(getMethod(c, "t"), "java/lang/Error", "") } @Test @@ -915,7 +915,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - val t = getSingleMethod(c, "t").instructions + val t = getInstructions(c, "t") assertNoInvoke(t) assert(1 == t.collect({case Ldc(_, "hai!") => }).size) // push-pop eliminates the first LDC("hai!") assert(1 == t.collect({case Jump(IFNONNULL, _) => }).size) // one single null check @@ -942,12 +942,12 @@ class InlinerTest extends BytecodeTesting { val List(c, _, _) = compile(code) - val t1 = getSingleMethod(c, "t1") + val t1 = getMethod(c, "t1") assertNoIndy(t1) // the indy call is inlined into t, and the closure elimination rewrites the closure invocation to the body method assertInvoke(t1, "C", "C$$$anonfun$2") - val t2 = getSingleMethod(c, "t2") + val t2 = getMethod(c, "t2") assertNoIndy(t2) assertInvoke(t2, "M$", "M$$$anonfun$1") } @@ -964,9 +964,9 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - val hMeth = findAsmMethod(c, "h") - val gMeth = findAsmMethod(c, "g") - val iMeth = findAsmMethod(c, "i") + val hMeth = getAsmMethod(c, "h") + val gMeth = getAsmMethod(c, "g") + val iMeth = getAsmMethod(c, "i") val fCall = getCallsite(gMeth, "f") val gCall = getCallsite(hMeth, "g") val hCall = getCallsite(iMeth, "h") @@ -993,7 +993,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(cl) = compile(code) - val List(b, c, d) = List("b", "c", "d").map(findAsmMethod(cl, _)) + val List(b, c, d) = List("b", "c", "d").map(getAsmMethod(cl, _)) val aCall = getCallsite(b, "a") val bCall = getCallsite(c, "b") val cCall = getCallsite(d, "c") @@ -1033,15 +1033,15 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - assertInvoke(getSingleMethod(c, "t1"), "C", "C$$$anonfun$1") - assertInvoke(getSingleMethod(c, "t2"), "C", "a") - assertInvoke(getSingleMethod(c, "t3"), "C", "b") - assertNoInvoke(getSingleMethod(c, "t4")) - assertNoInvoke(getSingleMethod(c, "t5")) - assertNoInvoke(getSingleMethod(c, "t6")) - assertInvoke(getSingleMethod(c, "t7"), "C", "c") - assertInvoke(getSingleMethod(c, "t8"), "scala/Predef$", "println") - assertNoInvoke(getSingleMethod(c, "t9")) + assertInvoke(getMethod(c, "t1"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "t2"), "C", "a") + assertInvoke(getMethod(c, "t3"), "C", "b") + assertNoInvoke(getMethod(c, "t4")) + assertNoInvoke(getMethod(c, "t5")) + assertNoInvoke(getMethod(c, "t6")) + assertInvoke(getMethod(c, "t7"), "C", "c") + assertInvoke(getMethod(c, "t8"), "scala/Predef$", "println") + assertNoInvoke(getMethod(c, "t9")) } @Test @@ -1066,15 +1066,15 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - assertNoInvoke(getSingleMethod(c, "t1")) - assertInvoke(getSingleMethod(c, "t2"), "C", "f2") - assertInvoke(getSingleMethod(c, "t3"), "C", "f1") - assertInvoke(getSingleMethod(c, "t4"), "C", "f2") - assertNoInvoke(getSingleMethod(c, "t5")) - assertInvoke(getSingleMethod(c, "t6"), "C", "f3") - assertNoInvoke(getSingleMethod(c, "t7")) - assertInvoke(getSingleMethod(c, "t8"), "C", "f1") - assertNoInvoke(getSingleMethod(c, "t9")) + assertNoInvoke(getMethod(c, "t1")) + assertInvoke(getMethod(c, "t2"), "C", "f2") + assertInvoke(getMethod(c, "t3"), "C", "f1") + assertInvoke(getMethod(c, "t4"), "C", "f2") + assertNoInvoke(getMethod(c, "t5")) + assertInvoke(getMethod(c, "t6"), "C", "f3") + assertNoInvoke(getMethod(c, "t7")) + assertInvoke(getMethod(c, "t8"), "C", "f1") + assertNoInvoke(getMethod(c, "t9")) } @Test @@ -1097,11 +1097,11 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - assertInvoke(getSingleMethod(c, "t1"), "C", "C$$$anonfun$1") - assertInvoke(getSingleMethod(c, "t2"), "C", "C$$$anonfun$2") - assertInvoke(getSingleMethod(c, "t3"), "scala/Function1", "apply$mcII$sp") - assertInvoke(getSingleMethod(c, "t4"), "scala/Function1", "apply$mcII$sp") - assertInvoke(getSingleMethod(c, "t5"), "C", "h") + assertInvoke(getMethod(c, "t1"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "t2"), "C", "C$$$anonfun$2") + assertInvoke(getMethod(c, "t3"), "scala/Function1", "apply$mcII$sp") + assertInvoke(getMethod(c, "t4"), "scala/Function1", "apply$mcII$sp") + assertInvoke(getMethod(c, "t5"), "C", "h") } @Test @@ -1121,7 +1121,7 @@ class InlinerTest extends BytecodeTesting { |when entering an exception handler declared in the inlined method.""".stripMargin val List(c) = compile(code, allowMessage = _.msg contains warn) - assertInvoke(getSingleMethod(c, "t"), "C", "g") + assertInvoke(getMethod(c, "t"), "C", "g") } @Test @@ -1145,8 +1145,8 @@ class InlinerTest extends BytecodeTesting { |that would cause an IllegalAccessError when inlined into class D.""".stripMargin val List(c, d) = compile(code, allowMessage = _.msg contains warn) - assertInvoke(getSingleMethod(c, "h"), "C", "f$1") - assertInvoke(getSingleMethod(d, "t"), "C", "h") + assertInvoke(getMethod(c, "h"), "C", "f$1") + assertInvoke(getMethod(d, "t"), "C", "h") } @Test @@ -1164,8 +1164,8 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, d) = compile(code) - assertNoInvoke(getSingleMethod(c, "g")) - assertNoInvoke(getSingleMethod(d, "t")) + assertNoInvoke(getMethod(c, "g")) + assertNoInvoke(getMethod(d, "t")) } @Test @@ -1273,40 +1273,40 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, _, _) = compile(code) - assertSameSummary(getSingleMethod(c, "t1"), List(BIPUSH, "C$$$anonfun$1", IRETURN)) - assertSameSummary(getSingleMethod(c, "t1a"), List(LCONST_1, "C$$$anonfun$2", IRETURN)) - assertSameSummary(getSingleMethod(c, "t2"), List(ICONST_1, ICONST_2, "C$$$anonfun$3",IRETURN)) + assertSameSummary(getMethod(c, "t1"), List(BIPUSH, "C$$$anonfun$1", IRETURN)) + assertSameSummary(getMethod(c, "t1a"), List(LCONST_1, "C$$$anonfun$2", IRETURN)) + assertSameSummary(getMethod(c, "t2"), List(ICONST_1, ICONST_2, "C$$$anonfun$3",IRETURN)) // val a = new ValKl(n); new ValKl(anonfun(a.x)).x // value class instantiation-extraction should be optimized by boxing elim - assertSameSummary(getSingleMethod(c, "t3"), List( + assertSameSummary(getMethod(c, "t3"), List( NEW, DUP, ICONST_1, "", ASTORE, NEW, DUP, ALOAD, "x", "C$$$anonfun$4", "", "x", IRETURN)) - assertSameSummary(getSingleMethod(c, "t4"), List(BIPUSH, "C$$$anonfun$5", "boxToInteger", ARETURN)) - assertSameSummary(getSingleMethod(c, "t4a"), List(ICONST_1, LDC, "C$$$anonfun$6", LRETURN)) - assertSameSummary(getSingleMethod(c, "t5"), List(BIPUSH, ICONST_3, "C$$$anonfun$7", "boxToInteger", ARETURN)) - assertSameSummary(getSingleMethod(c, "t5a"), List(BIPUSH, BIPUSH, I2B, "C$$$anonfun$8", IRETURN)) - assertSameSummary(getSingleMethod(c, "t6"), List(BIPUSH, "C$$$anonfun$9", RETURN)) - assertSameSummary(getSingleMethod(c, "t7"), List(ICONST_1, "C$$$anonfun$10", RETURN)) - assertSameSummary(getSingleMethod(c, "t8"), List(ICONST_1, LDC, "C$$$anonfun$11", LRETURN)) - assertSameSummary(getSingleMethod(c, "t9"), List(ICONST_1, "boxToInteger", "C$$$anonfun$12", RETURN)) + assertSameSummary(getMethod(c, "t4"), List(BIPUSH, "C$$$anonfun$5", "boxToInteger", ARETURN)) + assertSameSummary(getMethod(c, "t4a"), List(ICONST_1, LDC, "C$$$anonfun$6", LRETURN)) + assertSameSummary(getMethod(c, "t5"), List(BIPUSH, ICONST_3, "C$$$anonfun$7", "boxToInteger", ARETURN)) + assertSameSummary(getMethod(c, "t5a"), List(BIPUSH, BIPUSH, I2B, "C$$$anonfun$8", IRETURN)) + assertSameSummary(getMethod(c, "t6"), List(BIPUSH, "C$$$anonfun$9", RETURN)) + assertSameSummary(getMethod(c, "t7"), List(ICONST_1, "C$$$anonfun$10", RETURN)) + assertSameSummary(getMethod(c, "t8"), List(ICONST_1, LDC, "C$$$anonfun$11", LRETURN)) + assertSameSummary(getMethod(c, "t9"), List(ICONST_1, "boxToInteger", "C$$$anonfun$12", RETURN)) // t9a inlines Range.foreach, which is quite a bit of code, so just testing the core - assertInvoke(getSingleMethod(c, "t9a"), "C", "C$$$anonfun$13") - assertInvoke(getSingleMethod(c, "t9a"), "scala/runtime/BoxesRunTime", "boxToInteger") + assertInvoke(getMethod(c, "t9a"), "C", "C$$$anonfun$13") + assertInvoke(getMethod(c, "t9a"), "scala/runtime/BoxesRunTime", "boxToInteger") - assertSameSummary(getSingleMethod(c, "t10"), List( + assertSameSummary(getMethod(c, "t10"), List( ICONST_1, ISTORE, ALOAD, ILOAD, "C$$$anonfun$14", RETURN)) // t10a inlines Range.foreach - assertInvoke(getSingleMethod(c, "t10a"), "C", "C$$$anonfun$15") - assertDoesNotInvoke(getSingleMethod(c, "t10a"), "boxToInteger") + assertInvoke(getMethod(c, "t10a"), "C", "C$$$anonfun$15") + assertDoesNotInvoke(getMethod(c, "t10a"), "boxToInteger") } @Test @@ -1329,8 +1329,8 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c) = compile(code) - assertSameCode(getSingleMethod(c, "t1"), List(Op(ICONST_0), Op(ICONST_1), Op(IADD), Op(IRETURN))) - assertEquals(getSingleMethod(c, "t2").instructions collect { case i: Invoke => i.owner +"."+ i.name }, List( + assertSameCode(getMethod(c, "t1"), List(Op(ICONST_0), Op(ICONST_1), Op(IADD), Op(IRETURN))) + assertEquals(getInstructions(c, "t2") collect { case i: Invoke => i.owner +"."+ i.name }, List( "scala/runtime/IntRef.create", "C.C$$$anonfun$1")) } @@ -1370,11 +1370,11 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c) = compile(code) - assertSameCode(getSingleMethod(c, "t1"), List(Op(ICONST_3), Op(ICONST_4), Op(IADD), Op(IRETURN))) - assertSameCode(getSingleMethod(c, "t2"), List(Op(ICONST_1), Op(ICONST_2), Op(IADD), Op(IRETURN))) - assertSameCode(getSingleMethod(c, "t3"), List(Op(ICONST_1), Op(ICONST_3), Op(ISUB), Op(IRETURN))) - assertNoInvoke(getSingleMethod(c, "t4")) - assertNoInvoke(getSingleMethod(c, "t5")) + assertSameCode(getMethod(c, "t1"), List(Op(ICONST_3), Op(ICONST_4), Op(IADD), Op(IRETURN))) + assertSameCode(getMethod(c, "t2"), List(Op(ICONST_1), Op(ICONST_2), Op(IADD), Op(IRETURN))) + assertSameCode(getMethod(c, "t3"), List(Op(ICONST_1), Op(ICONST_3), Op(ISUB), Op(IRETURN))) + assertNoInvoke(getMethod(c, "t4")) + assertNoInvoke(getMethod(c, "t5")) } @Test @@ -1400,10 +1400,10 @@ class InlinerTest extends BytecodeTesting { |class D extends C """.stripMargin val List(c, _) = compile(code) - def casts(m: String) = getSingleMethod(c, m).instructions collect { case TypeOp(CHECKCAST, tp) => tp } - assertSameCode(getSingleMethod(c, "t1"), List(VarOp(ALOAD, 1), Op(ARETURN))) - assertSameCode(getSingleMethod(c, "t2"), List(VarOp(ALOAD, 1), Op(ARETURN))) - assertSameCode(getSingleMethod(c, "t3"), List(VarOp(ALOAD, 1), TypeOp(CHECKCAST, "C"), Op(ARETURN))) + def casts(m: String) = getInstructions(c, m) collect { case TypeOp(CHECKCAST, tp) => tp } + assertSameCode(getMethod(c, "t1"), List(VarOp(ALOAD, 1), Op(ARETURN))) + assertSameCode(getMethod(c, "t2"), List(VarOp(ALOAD, 1), Op(ARETURN))) + assertSameCode(getMethod(c, "t3"), List(VarOp(ALOAD, 1), TypeOp(CHECKCAST, "C"), Op(ARETURN))) assertEquals(casts("t4"), List("C")) assertEquals(casts("t5"), Nil) assertEquals(casts("t6"), Nil) @@ -1428,8 +1428,8 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val cls = compile(code) - val test = cls.find(_.name == "Test$").get - assertSameSummary(getSingleMethod(test, "f"), List( + val test = findClass(cls, "Test$") + assertSameSummary(getMethod(test, "f"), List( GETSTATIC, "mkFoo", BIPUSH, ISTORE, IFNONNULL, ACONST_NULL, ATHROW, -1 /*label*/, @@ -1448,7 +1448,7 @@ class InlinerTest extends BytecodeTesting { val List(c) = compile(code) // box-unbox will clean it up - assertSameSummary(getSingleMethod(c, "t"), List( + assertSameSummary(getMethod(c, "t"), List( ALOAD, "C$$$anonfun$1", IFEQ /*A*/, "C$$$anonfun$2", IRETURN, -1 /*A*/, "C$$$anonfun$3", IRETURN)) @@ -1460,7 +1460,7 @@ class InlinerTest extends BytecodeTesting { val codeB = "class B { def t(a: A) = a.f }" // tests that no warning is emitted val List(a, b) = compileClassesSeparately(List(codeA, codeB), extraArgs = "-Yopt:l:project -Yopt-warnings") - assertInvoke(getSingleMethod(b, "t"), "A", "f") + assertInvoke(getMethod(b, "t"), "A", "f") } @Test @@ -1472,7 +1472,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, t1, t2) = compile(code, allowMessage = _ => true) // the forwarder C.f is inlined, so there's no invocation - assertSameSummary(getSingleMethod(c, "f"), List(ICONST_1, IRETURN)) + assertSameSummary(getMethod(c, "f"), List(ICONST_1, IRETURN)) } @Test @@ -1485,7 +1485,7 @@ class InlinerTest extends BytecodeTesting { |class C { def t = (new K).f } """.stripMargin val c :: _ = compile(code) - assertSameSummary(getSingleMethod(c, "t"), List(NEW, "", ICONST_1, IRETURN)) // ICONST_1, U.f is inlined (not T.f) + assertSameSummary(getMethod(c, "t"), List(NEW, "", ICONST_1, IRETURN)) // ICONST_1, U.f is inlined (not T.f) } @Test @@ -1497,7 +1497,7 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c) = compile(code) - val t = getSingleMethod(c, "t") + val t = getMethod(c, "t") assertNoIndy(t) assertInvoke(t, "C", "C$$$anonfun$1") } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index 3867f101451d..fa76c0d93086 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -22,20 +22,20 @@ class MethodLevelOptsTest extends BytecodeTesting { def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) - def locals(c: ClassNode, m: String) = findAsmMethod(c, m).localVariables.asScala.toList.map(l => (l.name, l.index)).sortBy(_._2) + def locals(c: ClassNode, m: String) = getAsmMethod(c, m).localVariables.asScala.toList.map(l => (l.name, l.index)).sortBy(_._2) @Test def eliminateEmptyTry(): Unit = { val code = "def f = { try {} catch { case _: Throwable => 0; () }; 1 }" val warn = "a pure expression does nothing in statement position" - assertSameCode(singleMethodInstructions(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN))) + assertSameCode(compileInstructions(code, allowMessage = _.msg contains warn), wrapInDefault(Op(ICONST_1), Op(IRETURN))) } @Test def eliminateLoadBoxedUnit(): Unit = { // the compiler inserts a boxed into the try block. it's therefore non-empty (and live) and not eliminated. val code = "def f = { try {} catch { case _: Throwable => 0 }; 1 }" - val m = singleMethod(code) + val m = compileMethod(code) assertTrue(m.handlers.length == 0) assertSameCode(m, List(Op(ICONST_1), Op(IRETURN))) } @@ -44,7 +44,7 @@ class MethodLevelOptsTest extends BytecodeTesting { def inlineThrowInCatchNotTry(): Unit = { // the try block does not contain the `ATHROW` instruction, but in the catch block, `ATHROW` is inlined val code = "def f(e: Exception) = throw { try e catch { case _: Throwable => e } }" - val m = singleMethod(code) + val m = compileMethod(code) assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5) assertSameCode(m.instructions, wrapInDefault(VarOp(ALOAD, 1), Label(3), Op(ATHROW), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), VarOp(ALOAD, 1), Op(ATHROW)) @@ -55,7 +55,7 @@ class MethodLevelOptsTest extends BytecodeTesting { def inlineReturnInCatchNotTry(): Unit = { val code = "def f: Int = return { try 1 catch { case _: Throwable => 2 } }" // cannot inline the IRETURN into the try block (because RETURN may throw IllegalMonitorState) - val m = singleMethod(code) + val m = compileMethod(code) assertHandlerLabelPostions(m.handlers.head, m.instructions, 0, 3, 5) assertSameCode(m.instructions, wrapInDefault(Op(ICONST_1), Label(3), Op(IRETURN), Label(5), FrameEntry(4, List(), List("java/lang/Throwable")), Op(POP), Op(ICONST_2), Op(IRETURN))) @@ -77,7 +77,7 @@ class MethodLevelOptsTest extends BytecodeTesting { | println(x) | } """.stripMargin - val m = singleMethod(code) + val m = compileMethod(code) assertTrue(m.handlers.isEmpty) assertSameCode(m, List(Op(ICONST_3), Op(IRETURN))) } @@ -97,8 +97,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), List( + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List( Op(ACONST_NULL), Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false), Op(ARETURN))) } @@ -114,9 +114,9 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) assertSameCode( - getSingleMethod(c, "t"), List(Ldc(LDC, "c"), Op(ARETURN))) + getMethod(c, "t"), List(Ldc(LDC, "c"), Op(ARETURN))) } @Test @@ -134,9 +134,9 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) - assertSameCode(getSingleMethod(c, "t"), List( + assertSameCode(getMethod(c, "t"), List( Ldc(LDC, "el"), VarOp(ASTORE, 1), Field(GETSTATIC, "scala/Predef$", "MODULE$", "Lscala/Predef$;"), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), Op(ACONST_NULL), VarOp(ASTORE, 1), @@ -158,8 +158,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), List( + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List( IntOp(BIPUSH, 23), IntOp(NEWARRAY, 5), Op(POP), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN))) } @@ -173,8 +173,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), List( + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List( TypeOp(NEW, "java/lang/Integer"), Ldc(LDC, "nono"), Invoke(INVOKESPECIAL, "java/lang/Integer", "", "(Ljava/lang/String;)V", false), VarOp(ILOAD, 1), VarOp(ILOAD, 2), Op(IADD), Op(IRETURN))) } @@ -199,8 +199,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), List(Op(ICONST_0), Op(IRETURN))) + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List(Op(ICONST_0), Op(IRETURN))) } @Test @@ -215,8 +215,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameCode(getSingleMethod(c, "t"), List( + val c = compileClass(code) + assertSameCode(getMethod(c, "t"), List( IntOp(BIPUSH, 30), VarOp(ISTORE, 3), // no constant propagation, so we keep the store (and load below) of a const VarOp(ILOAD, 1), VarOp(ILOAD, 2), @@ -236,8 +236,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - val t = getSingleMethod(c, "t") + val c = compileClass(code) + val t = getMethod(c, "t") assert(!t.instructions.exists(_.opcode == INVOKEDYNAMIC), t) } @@ -317,23 +317,23 @@ class MethodLevelOptsTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) - - assertNoInvoke(getSingleMethod(c, "t1")) - assertNoInvoke(getSingleMethod(c, "t2")) - assertInvoke(getSingleMethod(c, "t3"), "scala/runtime/BoxesRunTime", "unboxToInt") - assertInvoke(getSingleMethod(c, "t4"), "scala/runtime/BoxesRunTime", "boxToLong") - assertNoInvoke(getSingleMethod(c, "t5")) - assertNoInvoke(getSingleMethod(c, "t6")) - assertNoInvoke(getSingleMethod(c, "t7")) - assertSameSummary(getSingleMethod(c, "t8"), List(ICONST_0, IRETURN)) - assertNoInvoke(getSingleMethod(c, "t9")) + val c = compileClass(code) + + assertNoInvoke(getMethod(c, "t1")) + assertNoInvoke(getMethod(c, "t2")) + assertInvoke(getMethod(c, "t3"), "scala/runtime/BoxesRunTime", "unboxToInt") + assertInvoke(getMethod(c, "t4"), "scala/runtime/BoxesRunTime", "boxToLong") + assertNoInvoke(getMethod(c, "t5")) + assertNoInvoke(getMethod(c, "t6")) + assertNoInvoke(getMethod(c, "t7")) + assertSameSummary(getMethod(c, "t8"), List(ICONST_0, IRETURN)) + assertNoInvoke(getMethod(c, "t9")) // t10: no invocation of unbox - assertEquals(getSingleMethod(c, "t10").instructions collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List( + assertEquals(getInstructions(c, "t10") collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List( ("java/lang/Integer", "valueOf"), ("C", "escape"))) - assertSameSummary(getSingleMethod(c, "t11"), List( + assertSameSummary(getMethod(c, "t11"), List( BIPUSH, "valueOf", ASTORE /*2*/, BIPUSH, "valueOf", ASTORE /*3*/, ALOAD /*0*/, ALOAD /*2*/, "escape", @@ -341,7 +341,7 @@ class MethodLevelOptsTest extends BytecodeTesting { ASTORE /*4*/, GETSTATIC /*Predef*/, ALOAD /*4*/, "Integer2int", IRETURN)) // no unbox invocations - assertEquals(getSingleMethod(c, "t12").instructions collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List( + assertEquals(getInstructions(c, "t12") collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List( ("java/lang/Integer", "valueOf"), ("java/lang/Integer", "valueOf"), ("C", "escape"))) @@ -393,14 +393,14 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameSummary(getSingleMethod(c, "t1"), List(ICONST_0, IRETURN)) - assertNoInvoke(getSingleMethod(c, "t2")) - assertSameSummary(getSingleMethod(c, "t3"), List(LDC, LDC, LADD, LRETURN)) - assertNoInvoke(getSingleMethod(c, "t4")) - assertEquals(getSingleMethod(c, "t5").instructions collect { case Field(_, owner, name, _) => s"$owner.$name" }, + val c = compileClass(code) + assertSameSummary(getMethod(c, "t1"), List(ICONST_0, IRETURN)) + assertNoInvoke(getMethod(c, "t2")) + assertSameSummary(getMethod(c, "t3"), List(LDC, LDC, LADD, LRETURN)) + assertNoInvoke(getMethod(c, "t4")) + assertEquals(getInstructions(c, "t5") collect { case Field(_, owner, name, _) => s"$owner.$name" }, List("scala/runtime/IntRef.elem")) - assertEquals(getSingleMethod(c, "t6").instructions collect { case Field(op, owner, name, _) => s"$op $owner.$name" }, + assertEquals(getInstructions(c, "t6") collect { case Field(op, owner, name, _) => s"$op $owner.$name" }, List(s"$PUTFIELD scala/runtime/IntRef.elem", s"$GETFIELD scala/runtime/IntRef.elem")) } @@ -457,23 +457,23 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertNoInvoke(getSingleMethod(c, "t1")) - assertSameSummary(getSingleMethod(c, "t2"), List(ICONST_1, ICONST_3, IADD, IRETURN)) - assertSameSummary(getSingleMethod(c, "t3"), List(ICONST_3, ICONST_4, IADD, IRETURN)) - assertSameSummary(getSingleMethod(c, "t4"), List(ICONST_3, "boxToInteger", ARETURN)) - assertEquals(getSingleMethod(c, "t5").instructions collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List( + val c = compileClass(code) + assertNoInvoke(getMethod(c, "t1")) + assertSameSummary(getMethod(c, "t2"), List(ICONST_1, ICONST_3, IADD, IRETURN)) + assertSameSummary(getMethod(c, "t3"), List(ICONST_3, ICONST_4, IADD, IRETURN)) + assertSameSummary(getMethod(c, "t4"), List(ICONST_3, "boxToInteger", ARETURN)) + assertEquals(getInstructions(c, "t5") collect { case Invoke(_, owner, name, _, _) => (owner, name) }, List( ("scala/runtime/BoxesRunTime", "boxToInteger"), ("scala/runtime/BoxesRunTime", "boxToInteger"), ("C", "tpl"), ("scala/Tuple2", "_1$mcI$sp"))) - assertSameSummary(getSingleMethod(c, "t6"), List(ICONST_1, ICONST_2, ISUB, IRETURN)) - assertSameSummary(getSingleMethod(c, "t7"), List( + assertSameSummary(getMethod(c, "t6"), List(ICONST_1, ICONST_2, ISUB, IRETURN)) + assertSameSummary(getMethod(c, "t7"), List( ICONST_1, ICONST_2, ISTORE, ISTORE, ICONST_3, ISTORE, ILOAD, ILOAD, IADD, ILOAD, IADD, IRETURN)) - assertNoInvoke(getSingleMethod(c, "t8")) - assertNoInvoke(getSingleMethod(c, "t9")) + assertNoInvoke(getMethod(c, "t8")) + assertNoInvoke(getMethod(c, "t9")) } @Test @@ -522,14 +522,14 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertSameSummary(getSingleMethod(c, "t1"), List(NEW, DUP, "", ARETURN)) - assertSameCode(getSingleMethod(c, "t2"), List(Op(LCONST_0), Op(LRETURN))) - assertSameCode(getSingleMethod(c, "t3"), List(Op(ICONST_1), Op(IRETURN))) - assertSameCode(getSingleMethod(c, "t4"), List(Op(ICONST_1), Op(IRETURN))) - assertSameCode(getSingleMethod(c, "t5"), List(Op(DCONST_0), Op(DRETURN))) - assertSameCode(getSingleMethod(c, "t6"), List(Op(ACONST_NULL), Op(ARETURN))) - assertSameCode(getSingleMethod(c, "t7"), List(Op(ICONST_0), Op(IRETURN))) + val c = compileClass(code) + assertSameSummary(getMethod(c, "t1"), List(NEW, DUP, "", ARETURN)) + assertSameCode(getMethod(c, "t2"), List(Op(LCONST_0), Op(LRETURN))) + assertSameCode(getMethod(c, "t3"), List(Op(ICONST_1), Op(IRETURN))) + assertSameCode(getMethod(c, "t4"), List(Op(ICONST_1), Op(IRETURN))) + assertSameCode(getMethod(c, "t5"), List(Op(DCONST_0), Op(DRETURN))) + assertSameCode(getMethod(c, "t6"), List(Op(ACONST_NULL), Op(ARETURN))) + assertSameCode(getMethod(c, "t7"), List(Op(ICONST_0), Op(IRETURN))) } @Test @@ -542,9 +542,9 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) assertSameCode( - getSingleMethod(c, "t"), List( + getMethod(c, "t"), List( VarOp(ALOAD, 1), Jump(IFNULL, Label(6)), Op(ICONST_1), Op(IRETURN), Label(6), Op(ICONST_0), Op(IRETURN))) } @@ -613,28 +613,28 @@ class MethodLevelOptsTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) - def stores(m: String) = getSingleMethod(c, m).instructions.filter(_.opcode == ASTORE) + val c = compileClass(code) + def stores(m: String) = getInstructions(c, m).filter(_.opcode == ASTORE) assertEquals(locals(c, "t1"), List(("this",0), ("kept1",1), ("result",2))) assert(stores("t1") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 2), VarOp(ASTORE, 1), VarOp(ASTORE, 1)), - textify(findAsmMethod(c, "t1"))) + textify(getAsmMethod(c, "t1"))) assertEquals(locals(c, "t2"), List(("this",0), ("kept2",1), ("kept3",2))) assert(stores("t2") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 2), VarOp(ASTORE, 1)), - textify(findAsmMethod(c, "t2"))) + textify(getAsmMethod(c, "t2"))) assertEquals(locals(c, "t3"), List(("this",0), ("kept4",1))) assert(stores("t3") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 1)), - textify(findAsmMethod(c, "t3"))) + textify(getAsmMethod(c, "t3"))) assertEquals(locals(c, "t4"), List(("this",0), ("kept5",1))) assert(stores("t4") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 1)), - textify(findAsmMethod(c, "t4"))) + textify(getAsmMethod(c, "t4"))) assertEquals(locals(c, "t5"), List(("this",0), ("kept6",1))) assert(stores("t5") == List(VarOp(ASTORE, 1), VarOp(ASTORE, 1)), - textify(findAsmMethod(c, "t5"))) + textify(getAsmMethod(c, "t5"))) } @Test @@ -681,13 +681,13 @@ class MethodLevelOptsTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) assertEquals(locals(c, "t1"), List(("this", 0), ("x", 1))) assertEquals(locals(c, "t2"), List(("this", 0), ("x", 1))) // we don't have constant propagation (yet). // the local var can't be optimized as a store;laod sequence, there's a GETSTATIC between the two - assertSameSummary(getSingleMethod(c, "t2"), List( + assertSameSummary(getMethod(c, "t2"), List( ICONST_2, ISTORE, GETSTATIC, ILOAD, "boxToInteger", "println", RETURN)) assertEquals(locals(c, "t3"), List(("this", 0))) @@ -709,8 +709,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - val t = getSingleMethod(c, "t") + val c = compileClass(code) + val t = getMethod(c, "t") assertEquals(t.handlers, Nil) assertEquals(locals(c, "t"), List(("this", 0))) assertSameSummary(t, List(GETSTATIC, LDC, "print", -1, GOTO)) @@ -727,8 +727,8 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) - assertNoInvoke(getSingleMethod(c, "compare")) + val c = compileClass(code) + assertNoInvoke(getMethod(c, "compare")) } @Test @@ -741,9 +741,9 @@ class MethodLevelOptsTest extends BytecodeTesting { | } |} """.stripMargin - val List(c) = compileClasses(code) + val c = compileClass(code) - assertSameSummary(getSingleMethod(c, "t"), List( + assertSameSummary(getMethod(c, "t"), List( BIPUSH, ILOAD, IF_ICMPNE, BIPUSH, ILOAD, IF_ICMPNE, LDC, ASTORE, GOTO, diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index 99a662b8977a..63bbcc396bb6 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -110,10 +110,10 @@ class UnreachableCodeTest extends ClearAfterClass { @Test def basicEliminationCompiler(): Unit = { val code = "def f: Int = { return 1; 2 }" - val withDce = dceCompiler.singleMethodInstructions(code) + val withDce = dceCompiler.compileInstructions(code) assertSameCode(withDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN))) - val noDce = noOptCompiler.singleMethodInstructions(code) + val noDce = noOptCompiler.compileInstructions(code) // The emitted code is ICONST_1, IRETURN, ICONST_2, IRETURN. The latter two are dead. // @@ -139,23 +139,23 @@ class UnreachableCodeTest extends ClearAfterClass { def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) val code = "def f: Int = { return 0; try { 1 } catch { case _: Exception => 2 } }" - val m = dceCompiler.singleMethod(code) + val m = dceCompiler.compileMethod(code) assertTrue(m.handlers.isEmpty) // redundant (if code is gone, handler is gone), but done once here for extra safety assertSameCode(m.instructions, wrapInDefault(Op(ICONST_0), Op(IRETURN))) val code2 = "def f: Unit = { try { } catch { case _: Exception => () }; () }" // requires fixpoint optimization of methodOptCompiler (dce alone is not enough): first the handler is eliminated, then it's dead catch block. - assertSameCode(methodOptCompiler.singleMethodInstructions(code2), wrapInDefault(Op(RETURN))) + assertSameCode(methodOptCompiler.compileInstructions(code2), wrapInDefault(Op(RETURN))) val code3 = "def f: Unit = { try { } catch { case _: Exception => try { } catch { case _: Exception => () } }; () }" - assertSameCode(methodOptCompiler.singleMethodInstructions(code3), wrapInDefault(Op(RETURN))) + assertSameCode(methodOptCompiler.compileInstructions(code3), wrapInDefault(Op(RETURN))) // this example requires two iterations to get rid of the outer handler. // the first iteration of DCE cannot remove the inner handler. then the inner (empty) handler is removed. // then the second iteration of DCE removes the inner catch block, and then the outer handler is removed. val code4 = "def f: Unit = { try { try { } catch { case _: Exception => () } } catch { case _: Exception => () }; () }" - assertSameCode(methodOptCompiler.singleMethodInstructions(code4), wrapInDefault(Op(RETURN))) + assertSameCode(methodOptCompiler.compileInstructions(code4), wrapInDefault(Op(RETURN))) } @Test // test the dce-testing tools @@ -214,35 +214,35 @@ class UnreachableCodeTest extends ClearAfterClass { | def t4 = cons(nt) |} """.stripMargin - val List(c) = noOptCompiler.compileClasses(code) + val c = noOptCompiler.compileClass(code) - assertSameSummary(getSingleMethod(c, "nl"), List(ACONST_NULL, ARETURN)) + assertSameSummary(getMethod(c, "nl"), List(ACONST_NULL, ARETURN)) - assertSameSummary(getSingleMethod(c, "nt"), List( + assertSameSummary(getMethod(c, "nt"), List( NEW, DUP, LDC, "", ATHROW)) - assertSameSummary(getSingleMethod(c, "t1"), List( + assertSameSummary(getMethod(c, "t1"), List( ALOAD, ACONST_NULL, "cons", RETURN)) // GenBCode introduces POP; ACONST_NULL after loading an expression of type scala.runtime.Null$, // see comment in BCodeBodyBuilder.adapt - assertSameSummary(getSingleMethod(c, "t2"), List( + assertSameSummary(getMethod(c, "t2"), List( ALOAD, ALOAD, "nl", POP, ACONST_NULL, "cons", RETURN)) // the bytecode generated by GenBCode is ... ATHROW; INVOKEVIRTUAL C.cons; RETURN // the ASM classfile writer creates a new basic block (creates a label) right after the ATHROW // and replaces all instructions by NOP*; ATHROW, see comment in BCodeBodyBuilder.adapt // NOTE: DCE is enabled by default and gets rid of the redundant code (tested below) - assertSameSummary(getSingleMethod(c, "t3"), List( + assertSameSummary(getMethod(c, "t3"), List( ALOAD, NEW, DUP, LDC, "", ATHROW, NOP, NOP, NOP, ATHROW)) // GenBCode introduces an ATHROW after the invocation of C.nt, see BCodeBodyBuilder.adapt // NOTE: DCE is enabled by default and gets rid of the redundant code (tested below) - assertSameSummary(getSingleMethod(c, "t4"), List( + assertSameSummary(getMethod(c, "t4"), List( ALOAD, ALOAD, "nt", ATHROW, NOP, NOP, NOP, ATHROW)) - val List(cDCE) = dceCompiler.compileClasses(code) - assertSameSummary(getSingleMethod(cDCE, "t3"), List(ALOAD, NEW, DUP, LDC, "", ATHROW)) - assertSameSummary(getSingleMethod(cDCE, "t4"), List(ALOAD, ALOAD, "nt", ATHROW)) + val cDCE = dceCompiler.compileClass(code) + assertSameSummary(getMethod(cDCE, "t3"), List(ALOAD, NEW, DUP, LDC, "", ATHROW)) + assertSameSummary(getMethod(cDCE, "t4"), List(ALOAD, ALOAD, "nt", ATHROW)) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala index 303600aa70fd..c9c98b403b56 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala @@ -8,9 +8,9 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ -import scala.tools.partest.ASMConverters import scala.tools.partest.ASMConverters._ import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class UnusedLocalVariablesTest extends BytecodeTesting { @@ -48,7 +48,7 @@ class UnusedLocalVariablesTest extends BytecodeTesting { | } |} |""".stripMargin - val cls = compileClasses(code).head + val cls = compileClass(code) val m = convertMethod(cls.methods.asScala.toList.find(_.desc == "(I)V").get) assertTrue(m.localVars.length == 2) // this, a, but not y @@ -69,19 +69,14 @@ class UnusedLocalVariablesTest extends BytecodeTesting { |} """.stripMargin - val clss2 = compileClasses(code2) - val cls2 = clss2.find(_.name == "C").get - val companion2 = clss2.find(_.name == "C$").get + val List(cls2, companion2) = compileClasses(code2) - val clsConstr = convertMethod(cls2.methods.asScala.toList.find(_.name == "").get) - val companionConstr = convertMethod(companion2.methods.asScala.toList.find(_.name == "").get) - - assertTrue(clsConstr.localVars.length == 1) // this - assertTrue(companionConstr.localVars.length == 1) // this + assertTrue(getMethod(cls2, "").localVars.length == 1) // this + assertTrue(getMethod(companion2, "").localVars.length == 1) // this } def assertLocalVarCount(code: String, numVars: Int): Unit = { - assertTrue(singleMethod(code).localVars.length == numVars) + assertTrue(compileMethod(code).localVars.length == numVars) } } diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala index cc6d1d748356..b6e8d4fbf2d7 100644 --- a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala @@ -38,9 +38,9 @@ class PatmatBytecodeTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) - assert(getSingleMethod(c, "s1").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) - assert(getSingleMethod(c, "s2").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) + val c = compileClass(code) + assert(getInstructions(c, "s1").count(_.opcode == TABLESWITCH) == 1, textify(c)) + assert(getInstructions(c, "s2").count(_.opcode == TABLESWITCH) == 1, textify(c)) } @Test @@ -66,9 +66,9 @@ class PatmatBytecodeTest extends BytecodeTesting { |} """.stripMargin - val List(c) = compileClasses(code) - assert(getSingleMethod(c, "s1").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) - assert(getSingleMethod(c, "s2").instructions.count(_.opcode == TABLESWITCH) == 1, textify(c)) + val c = compileClass(code) + assert(getInstructions(c, "s1").count(_.opcode == TABLESWITCH) == 1, textify(c)) + assert(getInstructions(c, "s2").count(_.opcode == TABLESWITCH) == 1, textify(c)) } @Test @@ -81,9 +81,9 @@ class PatmatBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val c = optCompiler.compileClasses(code).head + val c :: _ = optCompiler.compileClasses(code) - assertSameSummary(getSingleMethod(c, "a"), List( + assertSameSummary(getMethod(c, "a"), List( NEW, DUP, ICONST_1, LDC, "", "y", ARETURN)) } @@ -98,8 +98,8 @@ class PatmatBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val c = optCompiler.compileClasses(code).head - assert(!getSingleMethod(c, "a").instructions.exists(i => i.opcode == IFNULL || i.opcode == IFNONNULL), textify(findAsmMethod(c, "a"))) + val c :: _ = optCompiler.compileClasses(code) + assert(!getInstructions(c, "a").exists(i => i.opcode == IFNULL || i.opcode == IFNONNULL), textify(getAsmMethod(c, "a"))) } @Test @@ -112,8 +112,8 @@ class PatmatBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val c = optCompiler.compileClasses(code).head - assertSameSummary(getSingleMethod(c, "a"), List( + val c :: _ = optCompiler.compileClasses(code) + assertSameSummary(getMethod(c, "a"), List( NEW, DUP, ICONST_1, "boxToInteger", LDC, "", ASTORE /*1*/, ALOAD /*1*/, "y", ASTORE /*2*/, ALOAD /*1*/, "x", INSTANCEOF, IFNE /*R*/, @@ -133,7 +133,7 @@ class PatmatBytecodeTest extends BytecodeTesting { | } |} """.stripMargin - val c = optCompiler.compileClasses(code, allowMessage = _.msg.contains("may not be exhaustive")).head + val c = optCompiler.compileClass(code, allowMessage = _.msg.contains("may not be exhaustive")) val expected = List( ALOAD /*1*/ , INSTANCEOF /*::*/ , IFEQ /*A*/ , @@ -142,8 +142,8 @@ class PatmatBytecodeTest extends BytecodeTesting { -1 /*A*/ , NEW /*MatchError*/ , DUP, ALOAD /*1*/ , "", ATHROW, -1 /*B*/ , ILOAD, IRETURN) - assertSameSummary(getSingleMethod(c, "a"), expected) - assertSameSummary(getSingleMethod(c, "b"), expected) + assertSameSummary(getMethod(c, "a"), expected) + assertSameSummary(getMethod(c, "b"), expected) } @Test @@ -166,17 +166,17 @@ class PatmatBytecodeTest extends BytecodeTesting { |} """.stripMargin val List(c, cMod) = optCompiler.compileClasses(code) - assertSameSummary(getSingleMethod(c, "t1"), List(ICONST_1, ICONST_2, IADD, IRETURN)) - assertSameSummary(getSingleMethod(c, "t2"), List(ICONST_1, IRETURN)) - assertInvokedMethods(getSingleMethod(c, "t3"), List("C.tplCall", "scala/Tuple2._1", "scala/Tuple2._2$mcI$sp", "scala/MatchError.", "java/lang/String.length")) - assertInvokedMethods(getSingleMethod(c, "t4"), List("C.tplCall", "scala/Tuple2._2$mcI$sp", "scala/MatchError.")) - assertNoInvoke(getSingleMethod(c, "t5")) - assertSameSummary(getSingleMethod(c, "t6"), List(BIPUSH, IRETURN)) + assertSameSummary(getMethod(c, "t1"), List(ICONST_1, ICONST_2, IADD, IRETURN)) + assertSameSummary(getMethod(c, "t2"), List(ICONST_1, IRETURN)) + assertInvokedMethods(getMethod(c, "t3"), List("C.tplCall", "scala/Tuple2._1", "scala/Tuple2._2$mcI$sp", "scala/MatchError.", "java/lang/String.length")) + assertInvokedMethods(getMethod(c, "t4"), List("C.tplCall", "scala/Tuple2._2$mcI$sp", "scala/MatchError.")) + assertNoInvoke(getMethod(c, "t5")) + assertSameSummary(getMethod(c, "t6"), List(BIPUSH, IRETURN)) // MatchError reachable because of the type pattern `s: String` - assertInvokedMethods(getSingleMethod(c, "t7"), List("C.a", "C.b", "scala/MatchError.", "java/lang/String.length")) - assertSameSummary(getSingleMethod(c, "t8"), List(ALOAD, "b", IRETURN)) + assertInvokedMethods(getMethod(c, "t7"), List("C.a", "C.b", "scala/MatchError.", "java/lang/String.length")) + assertSameSummary(getMethod(c, "t8"), List(ALOAD, "b", IRETURN)) // C allocation not eliminated - constructor may have side-effects. - assertSameSummary(getSingleMethod(c, "t9"), List(NEW, DUP, LDC, BIPUSH, "", "a", "toString", ARETURN)) + assertSameSummary(getMethod(c, "t9"), List(NEW, DUP, LDC, BIPUSH, "", "a", "toString", ARETURN)) } } diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index d6f8dbc21937..b11ad271483d 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -44,14 +44,23 @@ class Compiler(val global: Global) { } } - def compile(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = { + def compileToBytes(scalaCode: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[(String, Array[Byte])] = { val run = newRun run.compileSources(makeSourceFile(scalaCode, "unitTestSource.scala") :: javaCode.map(p => makeSourceFile(p._1, p._2))) checkReport(allowMessage) getGeneratedClassfiles(global.settings.outputDirs.getSingleOutput.get) } - def compileTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[(String, Array[Byte])] = { + def compileClasses(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { + readAsmClasses(compileToBytes(code, javaCode, allowMessage)) + } + + def compileClass(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): ClassNode = { + val List(c) = compileClasses(code, javaCode, allowMessage) + c + } + + def compileToBytesTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[(String, Array[Byte])] = { import global._ settings.stopBefore.value = "jvm" :: Nil val run = newRun @@ -68,22 +77,30 @@ class Compiler(val global: Global) { getGeneratedClassfiles(settings.outputDirs.getSingleOutput.get) } - def compileClasses(code: String, javaCode: List[(String, String)] = Nil, allowMessage: StoreReporter#Info => Boolean = _ => false): List[ClassNode] = { - readAsmClasses(compile(code, javaCode, allowMessage)) + def compileClassesTransformed(scalaCode: String, javaCode: List[(String, String)] = Nil, beforeBackend: global.Tree => global.Tree): List[ClassNode] = + readAsmClasses(compileToBytesTransformed(scalaCode, javaCode, beforeBackend)) + + def compileAsmMethods(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = { + val c = compileClass(s"class C { $code }", allowMessage = allowMessage) + getAsmMethods(c, _ != "") } - def compileMethods(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[MethodNode] = { - compileClasses(s"class C { $code }", allowMessage = allowMessage).head.methods.asScala.toList.filterNot(_.name == "") + def compileAsmMethod(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): MethodNode = { + val List(m) = compileAsmMethods(code, allowMessage) + m } - def singleMethodInstructions(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = { + def compileMethods(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Method] = + compileAsmMethods(code, allowMessage).map(convertMethod) + + def compileMethod(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = { val List(m) = compileMethods(code, allowMessage = allowMessage) - instructionsFromMethod(m) + m } - def singleMethod(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): Method = { + def compileInstructions(code: String, allowMessage: StoreReporter#Info => Boolean = _ => false): List[Instruction] = { val List(m) = compileMethods(code, allowMessage = allowMessage) - convertMethod(m) + m.instructions } } @@ -145,7 +162,7 @@ object BytecodeTesting { * The output directory is a physical directory, I have not figured out if / how it's possible to * add a VirtualDirectory to the classpath of a compiler. */ - def compileSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[(String, Array[Byte])] = { + def compileToBytesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[(String, Array[Byte])] = { val outDir = AbstractFile.getDirectory(TempDir.createTempDir()) val outDirPath = outDir.canonicalPath val argsWithOutDir = extraArgs + s" -d $outDirPath -cp $outDirPath" @@ -162,13 +179,11 @@ object BytecodeTesting { classfiles } - def compileClassesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()) = { - readAsmClasses(compileSeparately(codes, extraArgs, allowMessage, afterEach)) + def compileClassesSeparately(codes: List[String], extraArgs: String = "", allowMessage: StoreReporter#Info => Boolean = _ => false, afterEach: AbstractFile => Unit = _ => ()): List[ClassNode] = { + readAsmClasses(compileToBytesSeparately(codes, extraArgs, allowMessage, afterEach)) } - def readAsmClasses(classfiles: List[(String, Array[Byte])]) = { - classfiles.map(p => AsmUtils.readClass(p._2)).sortBy(_.name) - } + def readAsmClasses(classfiles: List[(String, Array[Byte])]) = classfiles.map(p => AsmUtils.readClass(p._2)).sortBy(_.name) def assertSameCode(method: Method, expected: List[Instruction]): Unit = assertSameCode(method.instructions.dropNonOp, expected) def assertSameCode(actual: List[Instruction], expected: List[Instruction]): Unit = { @@ -218,23 +233,51 @@ object BytecodeTesting { assert(indy.isEmpty, indy) } - def getSingleMethod(classNode: ClassNode, name: String): Method = - convertMethod(classNode.methods.asScala.toList.find(_.name == name).get) + def findClass(cs: List[ClassNode], name: String): ClassNode = { + val List(c) = cs.filter(_.name == name) + c + } + + def getAsmMethods(c: ClassNode, p: String => Boolean): List[MethodNode] = + c.methods.iterator.asScala.filter(m => p(m.name)).toList.sortBy(_.name) - def findAsmMethods(c: ClassNode, p: String => Boolean) = c.methods.iterator.asScala.filter(m => p(m.name)).toList.sortBy(_.name) - def findAsmMethod(c: ClassNode, name: String) = findAsmMethods(c, _ == name).head + def getAsmMethods(c: ClassNode, name: String): List[MethodNode] = + getAsmMethods(c, _ == name) + + def getAsmMethod(c: ClassNode, name: String): MethodNode = { + val List(m) = getAsmMethods(c, name) + m + } + + def getMethods(c: ClassNode, name: String): List[Method] = + getAsmMethods(c, name).map(convertMethod) + + def getMethod(c: ClassNode, name: String): Method = + convertMethod(getAsmMethod(c, name)) + + def getInstructions(c: ClassNode, name: String): List[Instruction] = + getMethod(c, name).instructions /** * Instructions that match `query` when textified. * If `query` starts with a `+`, the next instruction is returned. */ - def findInstr(method: MethodNode, query: String): List[AbstractInsnNode] = { + def findInstrs(method: MethodNode, query: String): List[AbstractInsnNode] = { val useNext = query(0) == '+' val instrPart = if (useNext) query.drop(1) else query val insns = method.instructions.iterator.asScala.filter(i => textify(i) contains instrPart).toList if (useNext) insns.map(_.getNext) else insns } + /** + * Instruction that matches `query` when textified. + * If `query` starts with a `+`, the next instruction is returned. + */ + def findInstr(method: MethodNode, query: String): AbstractInsnNode = { + val List(i) = findInstrs(method, query) + i + } + def assertHandlerLabelPostions(h: ExceptionHandler, instructions: List[Instruction], startIndex: Int, endIndex: Int, handlerIndex: Int): Unit = { val insVec = instructions.toVector assertTrue(h.start == insVec(startIndex) && h.end == insVec(endIndex) && h.handler == insVec(handlerIndex)) From 0e7964ad9919b3ffd591deaf73f20cfb3e5e0cd0 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 07:53:44 +0200 Subject: [PATCH 0060/2793] Small cleanup in JUnit test --- .../backend/jvm/opt/InlinerSeparateCompilationTest.scala | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala index e7c3bab62f6b..b196f1a9ba4d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala @@ -8,13 +8,9 @@ import org.junit.runners.JUnit4 import scala.tools.testing.BytecodeTesting._ -object InlinerSeparateCompilationTest { - val args = "-Yopt:l:classpath" -} - @RunWith(classOf[JUnit4]) class InlinerSeparateCompilationTest { - import InlinerSeparateCompilationTest._ + val args = "-Yopt:l:classpath" @Test def inlnieMixedinMember(): Unit = { From 46d523b47ad835e4124a7d3e1f03f103917fe89d Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 14:57:28 +0200 Subject: [PATCH 0061/2793] Cleanup in BytecodeTest --- test/junit/scala/issues/BytecodeTest.scala | 48 +++++++++------------- 1 file changed, 20 insertions(+), 28 deletions(-) diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/issues/BytecodeTest.scala index 0bb87a4ea6a2..125024f746ce 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/issues/BytecodeTest.scala @@ -206,19 +206,13 @@ class BytecodeTest extends BytecodeTesting { Label(17), Op(IRETURN))) } - object forwarderTestUtils { - import language.implicitConversions - implicit def s2c(s: Symbol)(implicit classes: Map[String, ClassNode]): ClassNode = classes(s.name) - - def checkForwarder(c: ClassNode, target: String) = { - val List(f) = getMethods(c, "f") - assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, target, "f", "()I", false), Op(IRETURN))) - } + def checkForwarder(classes: Map[String, ClassNode], clsName: Symbol, target: String) = { + val List(f) = getMethods(classes(clsName.name), "f") + assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, target, "f", "()I", false), Op(IRETURN))) } @Test def traitMethodForwarders(): Unit = { - import forwarderTestUtils._ val code = """trait T1 { def f = 1 } |trait T2 extends T1 { override def f = 2 } @@ -268,27 +262,26 @@ class BytecodeTest extends BytecodeTesting { |class C20 extends T8 """.stripMargin - implicit val classes = compileClasses(code).map(c => (c.name, c)).toMap + val c = compileClasses(code).map(c => (c.name, c)).toMap val noForwarder = List('C1, 'C2, 'C3, 'C4, 'C10, 'C11, 'C12, 'C13, 'C16, 'C17) - for (c <- noForwarder) assertEquals(getMethods(c, "f"), Nil) - - checkForwarder('C5, "T3") - checkForwarder('C6, "T4") - checkForwarder('C7, "T5") - checkForwarder('C8, "T4") - checkForwarder('C9, "T5") - checkForwarder('C14, "T4") - checkForwarder('C15, "T5") - assertSameSummary(getMethod('C18, "f"), List(BIPUSH, IRETURN)) - checkForwarder('C19, "T7") - assertSameCode(getMethod('C19, "T7$$super$f"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "C18", "f", "()I", false), Op(IRETURN))) - assertInvoke(getMethod('C20, "clone"), "T8", "clone") // mixin forwarder + for (cn <- noForwarder) assertEquals(getMethods(c(cn.name), "f"), Nil) + + checkForwarder(c, 'C5, "T3") + checkForwarder(c, 'C6, "T4") + checkForwarder(c, 'C7, "T5") + checkForwarder(c, 'C8, "T4") + checkForwarder(c, 'C9, "T5") + checkForwarder(c, 'C14, "T4") + checkForwarder(c, 'C15, "T5") + assertSameSummary(getMethod(c("C18"), "f"), List(BIPUSH, IRETURN)) + checkForwarder(c, 'C19, "T7") + assertSameCode(getMethod(c("C19"), "T7$$super$f"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "C18", "f", "()I", false), Op(IRETURN))) + assertInvoke(getMethod(c("C20"), "clone"), "T8", "clone") // mixin forwarder } @Test def noTraitMethodForwardersForOverloads(): Unit = { - import forwarderTestUtils._ val code = """trait T1 { def f(x: Int) = 0 } |trait T2 { def f(x: String) = 1 } @@ -300,7 +293,6 @@ class BytecodeTest extends BytecodeTesting { @Test def traitMethodForwardersForJavaDefaultMethods(): Unit = { - import forwarderTestUtils._ val j1 = ("interface J1 { int f(); }", "J1.java") val j2 = ("interface J2 { default int f() { return 1; } }", "J2.java") val j3 = ("interface J3 extends J1 { default int f() { return 2; } }", "J3.java") @@ -326,12 +318,12 @@ class BytecodeTest extends BytecodeTesting { | |class K12 extends J2 with T2 """.stripMargin - implicit val classes = compileClasses(code, List(j1, j2, j3, j4)).map(c => (c.name, c)).toMap + val c = compileClasses(code, List(j1, j2, j3, j4)).map(c => (c.name, c)).toMap val noForwarder = List('K1, 'K2, 'K3, 'K4, 'K5, 'K6, 'K7, 'K8, 'K9, 'K10, 'K11) - for (c <- noForwarder) assertEquals(getMethods(c, "f"), Nil) + for (cn <- noForwarder) assertEquals(getMethods(c(cn.name), "f"), Nil) - checkForwarder('K12, "T2") + checkForwarder(c, 'K12, "T2") } @Test From 2537027195fd1702bbd12ba8e9d6cb3262b03482 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 15:19:50 +0200 Subject: [PATCH 0062/2793] Split RunTest and BytecodeTest into parts, put in matching packages. --- test/junit/scala/BoxUnboxTest.scala | 119 --------- .../PartialFunctionSerializationTest.scala | 16 +- test/junit/scala/issues/RunTest.scala | 247 ------------------ .../scala/lang/annotations/BytecodeTest.scala | 80 ++++++ .../scala/lang/annotations/RunTest.scala | 32 +++ .../scala/lang/primitives/BoxUnboxTest.scala | 222 ++++++++++++++++ .../primitives}/PredefAutoboxingTest.scala | 6 +- .../stringinterpol}/StringContextTest.scala | 9 +- .../traits}/BytecodeTest.scala | 242 ++--------------- test/junit/scala/lang/traits/RunTest.scala | 20 ++ test/junit/scala/reflect/ClassOfTest.scala | 124 +++++++++ .../tools/nsc/backend/jvm/BytecodeTest.scala | 140 ++++++++++ .../backend/jvm}/OptimizedBytecodeTest.scala | 13 +- 13 files changed, 660 insertions(+), 610 deletions(-) delete mode 100644 test/junit/scala/BoxUnboxTest.scala delete mode 100644 test/junit/scala/issues/RunTest.scala create mode 100644 test/junit/scala/lang/annotations/BytecodeTest.scala create mode 100644 test/junit/scala/lang/annotations/RunTest.scala create mode 100644 test/junit/scala/lang/primitives/BoxUnboxTest.scala rename test/junit/scala/{ => lang/primitives}/PredefAutoboxingTest.scala (93%) rename test/junit/scala/{ => lang/stringinterpol}/StringContextTest.scala (98%) rename test/junit/scala/{issues => lang/traits}/BytecodeTest.scala (52%) create mode 100644 test/junit/scala/lang/traits/RunTest.scala create mode 100644 test/junit/scala/reflect/ClassOfTest.scala create mode 100644 test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala rename test/junit/scala/{issues => tools/nsc/backend/jvm}/OptimizedBytecodeTest.scala (98%) diff --git a/test/junit/scala/BoxUnboxTest.scala b/test/junit/scala/BoxUnboxTest.scala deleted file mode 100644 index 88b3037e6990..000000000000 --- a/test/junit/scala/BoxUnboxTest.scala +++ /dev/null @@ -1,119 +0,0 @@ -package scala - -import org.junit.Test -import org.junit.Assert._ -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -import scala.tools.testing.AssertUtil._ - -@RunWith(classOf[JUnit4]) -class BoxUnboxTest { - def genericNull[T] = null.asInstanceOf[T] // allowed, see SI-4437, point 2 - - @Test - def boxUnboxInt(): Unit = { - val b = new Integer(1) - val u = 1 - - assertEquals(1.toInt, u) - - assertEquals(Predef.int2Integer(1), b) - assertEquals(1: Integer, b) - assertEquals(Int.box(1), b) - assertEquals(1.asInstanceOf[Object], b) - - assertThrows[ClassCastException]("".asInstanceOf[Integer]) - - assertEquals(Predef.Integer2int(b), u) - assertEquals(b: Int, u) - assertEquals(Int.unbox(b), u) - assertEquals(b.asInstanceOf[Int], u) - assertEquals(b.intValue, u) - assertEquals(b.toInt, u) - intWrapper(b).toInt - - assertThrows[ClassCastException](Int.unbox("")) - assertThrows[ClassCastException]("".asInstanceOf[Int]) - - // null unboxing in various positions - - val n1 = Int.unbox(null) - assertEquals(n1, 0) - val n2 = Predef.Integer2int(null) - assertEquals(n2, 0) - val n3 = (null: Integer): Int - assertEquals(n3, 0) - val n4 = null.asInstanceOf[Int] - assertEquals(n4, 0) - val n5 = null.asInstanceOf[Int] == 0 - assertTrue(n5) - val n6 = null.asInstanceOf[Int] == null - assertFalse(n6) - val n7 = null.asInstanceOf[Int] != 0 - assertFalse(n7) - val n8 = null.asInstanceOf[Int] != null - assertTrue(n8) - - val mp = new java.util.HashMap[Int, Int] - val n9 = mp.get(0) - assertEquals(n9, 0) - val n10 = mp.get(0) == null // SI-602 - assertThrows[AssertionError](assertFalse(n10)) // should not throw - - def f(a: Any) = "" + a - val n11 = f(null.asInstanceOf[Int]) - assertEquals(n11, "0") - - def n12 = genericNull[Int] - assertEquals(n12, 0) - } - - @Test - def numericConversions(): Unit = { - val i1 = 1L.asInstanceOf[Int] - assertEquals(i1, 1) - assertThrows[ClassCastException] { - val i2 = (1L: Any).asInstanceOf[Int] // SI-1448, should not throw. see also SI-4437 point 1. - assertEquals(i2, 1) - } - } - - @Test - def boxUnboxBoolean(): Unit = { - val n1 = Option(null.asInstanceOf[Boolean]) - assertEquals(n1, Some(false)) - } - - @Test - def boxUnboxUnit(): Unit = { - // should not use assertEquals in this test: it takes two Object parameters. normally, Unit does - // not conform to Object, but for Java-defined methods scalac makes an exception and treats them - // as Any. passing a Unit as Any makes the compiler go through another layer of boxing, so it - // can hide some bugs (where we actually have a null, but the compiler makes it a ()). - - var v = 0 - def eff() = { v = 1 } - def chk() = { assert(v == 1); v = 0 } - - val b = runtime.BoxedUnit.UNIT - - assert(eff() == b); chk() - assert(Unit.box(eff()) == b); chk() - assert(().asInstanceOf[Object] == b) - - Unit.unbox({eff(); b}); chk() - Unit.unbox({eff(); null}); chk() - assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() - - val n1 = null.asInstanceOf[Unit] - assert(n1 == b) - - val n2 = null.asInstanceOf[Unit] == b - assert(n2) - - def f(a: Any) = "" + a - val n3 = f(null.asInstanceOf[Unit]) - assertEquals(n3, "()") - } -} diff --git a/test/junit/scala/PartialFunctionSerializationTest.scala b/test/junit/scala/PartialFunctionSerializationTest.scala index d525b045cd9c..2019e3a4259c 100644 --- a/test/junit/scala/PartialFunctionSerializationTest.scala +++ b/test/junit/scala/PartialFunctionSerializationTest.scala @@ -7,24 +7,18 @@ import org.junit.runners.JUnit4 @RunWith(classOf[JUnit4]) class PartialFunctionSerializationTest { - val pf1: PartialFunction[Int, Int] = { - case n if n > 0 => 1 - } - - val pf2: PartialFunction[Int, Int] = { - case n if n <= 0 => 2 - } + val pf1: PartialFunction[Int, Int] = { case n if n > 0 => 1 } + val pf2: PartialFunction[Int, Int] = { case n if n <= 0 => 2 } - private def assertSerializable[A,B](fn: A => B) = { + private def assertSerializable[A,B](fn: A => B): Unit = { import java.io._ - new ObjectOutputStream(new ByteArrayOutputStream()).writeObject(fn) } - @Test def canSerializeLiteral= assertSerializable(pf1) + @Test def canSerializeLiteral = assertSerializable(pf1) - @Test def canSerializeLifted= assertSerializable(pf1.lift) + @Test def canSerializeLifted = assertSerializable(pf1.lift) @Test def canSerializeOrElse = assertSerializable(pf1 orElse pf2) diff --git a/test/junit/scala/issues/RunTest.scala b/test/junit/scala/issues/RunTest.scala deleted file mode 100644 index 0686d73d9bdf..000000000000 --- a/test/junit/scala/issues/RunTest.scala +++ /dev/null @@ -1,247 +0,0 @@ -package scala.issues - -import org.junit.Assert._ -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -import scala.tools.testing.RunTesting - -object RunTest { - class VC(val x: Any) extends AnyVal - class VCI(val x: Int) extends AnyVal { override def toString = "" + x } -} - -@RunWith(classOf[JUnit4]) -class RunTest extends RunTesting { - import runner._ - - @Test - def classOfValueClassAlias(): Unit = { - val code = - """import scala.issues.RunTest.VC - |type aVC = VC - |type aInt = Int - |type aInteger = Integer - |classOf[VC] == classOf[aVC] && - | classOf[aInt] == classOf[Int] && - | classOf[aInteger] == classOf[Integer] && - | classOf[aInt] != classOf[aInteger] - """.stripMargin - assertTrue(run[Boolean](code)) - } - - @Test - def classOfFinalVal(): Unit = { - val code = - """class C { - | final val a1 = classOf[Int] - | final val b1 = classOf[List[_]] - | final val c1 = classOf[List[String]] - | final val d1 = classOf[Array[Int]] - | final val e1 = classOf[Array[List[_]]] - | final val f1 = classOf[Array[_]] - | - | val a2 = classOf[Int] - | val b2 = classOf[List[_]] - | val c2 = classOf[List[String]] - | val d2 = classOf[Array[Int]] - | val e2 = classOf[Array[List[_]]] - | val f2 = classOf[Array[_]] - | - | val listC = Class.forName("scala.collection.immutable.List") - | - | val compare = List( - | (a1, a2, Integer.TYPE), - | (b1, b2, listC), - | (c1, c2, listC), - | (d1, d2, Array(1).getClass), - | (e1, e2, Array(List()).getClass), - | (f1, f2, new Object().getClass)) - |} - |(new C).compare - """.stripMargin - type K = Class[_] - val cs = run[List[(K, K, K)]](code) - for ((x, y, z) <- cs) { - assertEquals(x, y) - assertEquals(x, z) - } - } - - @Test - def t9702(): Unit = { - val code = - """import javax.annotation.Resource - |import scala.issues.RunTest.VC - |class C { - | type aList[K] = List[K] - | type aVC = VC - | type aInt = Int - | type aInteger = Integer - | @Resource(`type` = classOf[List[Int]]) def a = 0 - | @Resource(`type` = classOf[List[_]]) def b = 0 - | @Resource(`type` = classOf[aList[_]]) def c = 0 - | @Resource(`type` = classOf[Int]) def d = 0 - | @Resource(`type` = classOf[aInt]) def e = 0 - | @Resource(`type` = classOf[Integer]) def f = 0 - | @Resource(`type` = classOf[aInteger]) def g = 0 - | @Resource(`type` = classOf[VC]) def h = 0 - | @Resource(`type` = classOf[aVC]) def i = 0 - | @Resource(`type` = classOf[Array[Int]]) def j = 0 - | @Resource(`type` = classOf[Array[List[_]]]) def k = 0 - |} - |val c = classOf[C] - |def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type` - |('a' to 'k').toList.map(_.toString).map(typeArg) - """.stripMargin - - val l = Class.forName("scala.collection.immutable.List") - val i = Integer.TYPE - val ig = new Integer(1).getClass - val v = new RunTest.VC(1).getClass - val ai = Array(1).getClass - val al = Array(List()).getClass - - // sanity checks - assertEquals(i, classOf[Int]) - assertNotEquals(i, ig) - - assertEquals(run[List[Class[_]]](code), - List(l, l, l, i, i, ig, ig, v, v, ai, al)) - } - - @Test - def annotationInfoNotErased(): Unit = { - val code = - """import javax.annotation.Resource - |import scala.annotation.meta.getter - |class C { - | type Rg = Resource @getter - | @(Resource @getter)(`type` = classOf[Int]) def a = 0 - | @Rg(`type` = classOf[Int]) def b = 0 - |} - |val c = classOf[C] - |def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type` - |List("a", "b") map typeArg - |""".stripMargin - - val i = Integer.TYPE - assertEquals(run[List[Class[_]]](code), List(i, i)) - } - - @Test - def invocationReceivers(): Unit = { - import invocationReceiversTestCode._ - assertEquals(run[String](definitions("Object") + runCode), "hi" * 9) - assertEquals(run[String](definitions("String") + runCode), "hi" * 9) // bridge method for clone generated - } - - @Test - def classOfUnitConstant(): Unit = { - val code = - """abstract class A { def f: Class[_] } - |class C extends A { final val f = classOf[Unit] } - |val c = new C - |(c.f, (c: A).f) - """.stripMargin - val u = Void.TYPE - assertEquals(run[(Class[_], Class[_])](code), (u, u)) - } - - @Test - def t9671(): Unit = { - val code = - """import scala.issues.RunTest.VCI - | - |def f1(a: Any) = "" + a - |def f2(a: AnyVal) = "" + a - |def f3[T](a: T) = "" + a - |def f4(a: Int) = "" + a - |def f5(a: VCI) = "" + a - |def f6(u: Unit) = "" + u - | - |def n1: AnyRef = null - |def n2: Null = null - |def n3: Any = null - |def n4[T]: T = null.asInstanceOf[T] - | - |def npe(s: => String) = try { s; throw new Error() } catch { case _: NullPointerException => "npe" } - | - | f1(null.asInstanceOf[Int]) + - | f1( n1.asInstanceOf[Int]) + - | f1( n2.asInstanceOf[Int]) + - | f1( n3.asInstanceOf[Int]) + - | f1( n4[Int]) + // "null" - |"-" + - | f1(null.asInstanceOf[VCI]) + - |npe(f1( n1.asInstanceOf[VCI])) + // SI-8097 - | f1( n2.asInstanceOf[VCI]) + - |npe(f1( n3.asInstanceOf[VCI])) + // SI-8097 - | f1( n4[VCI]) + // "null" - |"-" + - | f1(null.asInstanceOf[Unit]) + - | f1( n1.asInstanceOf[Unit]) + - | f1( n2.asInstanceOf[Unit]) + - | f1( n3.asInstanceOf[Unit]) + - | f1( n4[Unit]) + // "null" - |"-" + - | f2(null.asInstanceOf[Int]) + - | f2( n1.asInstanceOf[Int]) + - | f2( n2.asInstanceOf[Int]) + - | f2( n3.asInstanceOf[Int]) + - | f2( n4[Int]) + // "null" - |"-" + - | f2(null.asInstanceOf[VCI]) + - |npe(f2( n1.asInstanceOf[VCI])) + // SI-8097 - | f2( n2.asInstanceOf[VCI]) + - |npe(f2( n3.asInstanceOf[VCI])) + // SI-8097 - | f2( n4[VCI]) + // "null" - |"-" + - | f2(null.asInstanceOf[Unit]) + - | f2( n1.asInstanceOf[Unit]) + - | f2( n2.asInstanceOf[Unit]) + - | f2( n3.asInstanceOf[Unit]) + - | f2( n4[Unit]) + // "null" - |"-" + - | f3(null.asInstanceOf[Int]) + - | f3( n1.asInstanceOf[Int]) + - | f3( n2.asInstanceOf[Int]) + - | f3( n3.asInstanceOf[Int]) + - | f3( n4[Int]) + // "null" - |"-" + - | f3(null.asInstanceOf[VCI]) + - |npe(f3( n1.asInstanceOf[VCI])) + // SI-8097 - | f3( n2.asInstanceOf[VCI]) + - |npe(f3( n3.asInstanceOf[VCI])) + // SI-8097 - | f3( n4[VCI]) + // "null" - |"-" + - | f3(null.asInstanceOf[Unit]) + - | f3( n1.asInstanceOf[Unit]) + - | f3( n2.asInstanceOf[Unit]) + - | f3( n3.asInstanceOf[Unit]) + - | f3( n4[Unit]) + // "null" - |"-" + - | f4(null.asInstanceOf[Int]) + - | f4( n1.asInstanceOf[Int]) + - | f4( n2.asInstanceOf[Int]) + - | f4( n3.asInstanceOf[Int]) + - | f4( n4[Int]) + - |"-" + - | f5(null.asInstanceOf[VCI]) + - |npe(f5( n1.asInstanceOf[VCI])) + // SI-8097 - | f5( n2.asInstanceOf[VCI]) + - |npe(f5( n3.asInstanceOf[VCI])) + // SI-8097 - |npe(f5( n4[VCI])) + // SI-8097 - |"-" + - | f6(null.asInstanceOf[Unit]) + - | f6( n1.asInstanceOf[Unit]) + - | f6( n2.asInstanceOf[Unit]) + - | f6( n3.asInstanceOf[Unit]) + - | f6( n4[Unit]) // "null" - """.stripMargin - - assertEquals(run[String](code), - "0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-00000-0npe0npenpe-()()()()null") - } -} diff --git a/test/junit/scala/lang/annotations/BytecodeTest.scala b/test/junit/scala/lang/annotations/BytecodeTest.scala new file mode 100644 index 000000000000..09fc1d35724f --- /dev/null +++ b/test/junit/scala/lang/annotations/BytecodeTest.scala @@ -0,0 +1,80 @@ +package scala.lang.annotations + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.collection.JavaConverters._ +import scala.tools.nsc.backend.jvm.AsmUtils +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ + +@RunWith(classOf[JUnit4]) +class BytecodeTest extends BytecodeTesting { + import compiler._ + + @Test + def t8731(): Unit = { + val code = + """class C { + | def f(x: Int) = (x: @annotation.switch) match { + | case 1 => 0 + | case 2 => 1 + | case 3 => 2 + | } + | final val K = 10 + | def g(x: Int) = (x: @annotation.switch) match { + | case K => 0 + | case 1 => 10 + | case 2 => 20 + | } + |} + """.stripMargin + + val c = compileClass(code) + + assertTrue(getInstructions(c, "f").count(_.isInstanceOf[TableSwitch]) == 1) + assertTrue(getInstructions(c, "g").count(_.isInstanceOf[LookupSwitch]) == 1) + } + + @Test + def t8926(): Unit = { + import scala.reflect.internal.util.BatchSourceFile + + // this test cannot be implemented using partest because of its mixed-mode compilation strategy: + // partest first compiles all files with scalac, then the java files, and then again the scala + // using the output classpath. this shadows the bug SI-8926. + + val annotA = + """import java.lang.annotation.Retention; + |import java.lang.annotation.RetentionPolicy; + |@Retention(RetentionPolicy.RUNTIME) + |public @interface AnnotA { } + """.stripMargin + val annotB = "public @interface AnnotB { }" + + val scalaSrc = + """@AnnotA class A + |@AnnotB class B + """.stripMargin + + val run = new global.Run() + run.compileSources(List(new BatchSourceFile("AnnotA.java", annotA), new BatchSourceFile("AnnotB.java", annotB), new BatchSourceFile("Test.scala", scalaSrc))) + val outDir = global.settings.outputDirs.getSingleOutput.get + val outfiles = (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList + + def check(classfile: String, annotName: String) = { + val f = (outfiles collect { case (`classfile`, bytes) => AsmUtils.readClass(bytes) }).head + val descs = f.visibleAnnotations.asScala.map(_.desc).toList + assertTrue(descs.toString, descs exists (_ contains annotName)) + } + + check("A.class", "AnnotA") + + // known issue SI-8928: the visibility of AnnotB should be CLASS, but annotation classes without + // a @Retention annotation are currently emitted as RUNTIME. + check("B.class", "AnnotB") + } +} \ No newline at end of file diff --git a/test/junit/scala/lang/annotations/RunTest.scala b/test/junit/scala/lang/annotations/RunTest.scala new file mode 100644 index 000000000000..0d9c0c471362 --- /dev/null +++ b/test/junit/scala/lang/annotations/RunTest.scala @@ -0,0 +1,32 @@ +package scala.lang.annotations + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.RunTesting + +@RunWith(classOf[JUnit4]) +class RunTest extends RunTesting { + import runner._ + + @Test + def annotationInfoNotErased(): Unit = { + val code = + """import javax.annotation.Resource + |import scala.annotation.meta.getter + |class C { + | type Rg = Resource @getter + | @(Resource @getter)(`type` = classOf[Int]) def a = 0 + | @Rg(`type` = classOf[Int]) def b = 0 + |} + |val c = classOf[C] + |def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type` + |List("a", "b") map typeArg + |""".stripMargin + + val i = Integer.TYPE + assertEquals(run[List[Class[_]]](code), List(i, i)) + } +} diff --git a/test/junit/scala/lang/primitives/BoxUnboxTest.scala b/test/junit/scala/lang/primitives/BoxUnboxTest.scala new file mode 100644 index 000000000000..23c9326989ee --- /dev/null +++ b/test/junit/scala/lang/primitives/BoxUnboxTest.scala @@ -0,0 +1,222 @@ +package scala.lang.primitives + +import org.junit.Test +import org.junit.Assert._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.AssertUtil._ +import scala.tools.testing.RunTesting + +object BoxUnboxTest { + class VCI(val x: Int) extends AnyVal { override def toString = "" + x } +} + +@RunWith(classOf[JUnit4]) +class BoxUnboxTest extends RunTesting { + import runner._ + + def genericNull[T] = null.asInstanceOf[T] // allowed, see SI-4437, point 2 + + @Test + def boxUnboxInt(): Unit = { + val b = new Integer(1) + val u = 1 + + assertEquals(1.toInt, u) + + assertEquals(Predef.int2Integer(1), b) + assertEquals(1: Integer, b) + assertEquals(Int.box(1), b) + assertEquals(1.asInstanceOf[Object], b) + + assertThrows[ClassCastException]("".asInstanceOf[Integer]) + + assertEquals(Predef.Integer2int(b), u) + assertEquals(b: Int, u) + assertEquals(Int.unbox(b), u) + assertEquals(b.asInstanceOf[Int], u) + assertEquals(b.intValue, u) + assertEquals(b.toInt, u) + intWrapper(b).toInt + + assertThrows[ClassCastException](Int.unbox("")) + assertThrows[ClassCastException]("".asInstanceOf[Int]) + + // null unboxing in various positions + + val n1 = Int.unbox(null) + assertEquals(n1, 0) + val n2 = Predef.Integer2int(null) + assertEquals(n2, 0) + val n3 = (null: Integer): Int + assertEquals(n3, 0) + val n4 = null.asInstanceOf[Int] + assertEquals(n4, 0) + val n5 = null.asInstanceOf[Int] == 0 + assertTrue(n5) + val n6 = null.asInstanceOf[Int] == null + assertFalse(n6) + val n7 = null.asInstanceOf[Int] != 0 + assertFalse(n7) + val n8 = null.asInstanceOf[Int] != null + assertTrue(n8) + + val mp = new java.util.HashMap[Int, Int] + val n9 = mp.get(0) + assertEquals(n9, 0) + val n10 = mp.get(0) == null // SI-602 + assertThrows[AssertionError](assertFalse(n10)) // should not throw + + def f(a: Any) = "" + a + val n11 = f(null.asInstanceOf[Int]) + assertEquals(n11, "0") + + def n12 = genericNull[Int] + assertEquals(n12, 0) + } + + @Test + def numericConversions(): Unit = { + val i1 = 1L.asInstanceOf[Int] + assertEquals(i1, 1) + assertThrows[ClassCastException] { + val i2 = (1L: Any).asInstanceOf[Int] // SI-1448, should not throw. see also SI-4437 point 1. + assertEquals(i2, 1) + } + } + + @Test + def boxUnboxBoolean(): Unit = { + val n1 = Option(null.asInstanceOf[Boolean]) + assertEquals(n1, Some(false)) + } + + @Test + def boxUnboxUnit(): Unit = { + // should not use assertEquals in this test: it takes two Object parameters. normally, Unit does + // not conform to Object, but for Java-defined methods scalac makes an exception and treats them + // as Any. passing a Unit as Any makes the compiler go through another layer of boxing, so it + // can hide some bugs (where we actually have a null, but the compiler makes it a ()). + + var v = 0 + def eff() = { v = 1 } + def chk() = { assert(v == 1); v = 0 } + + val b = runtime.BoxedUnit.UNIT + + assert(eff() == b); chk() + assert(Unit.box(eff()) == b); chk() + assert(().asInstanceOf[Object] == b) + + Unit.unbox({eff(); b}); chk() + Unit.unbox({eff(); null}); chk() + assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() + + val n1 = null.asInstanceOf[Unit] + assert(n1 == b) + + val n2 = null.asInstanceOf[Unit] == b + assert(n2) + + def f(a: Any) = "" + a + val n3 = f(null.asInstanceOf[Unit]) + assertEquals(n3, "()") + } + + @Test + def t9671(): Unit = { + val code = + """import scala.lang.primitives.BoxUnboxTest.VCI + | + |def f1(a: Any) = "" + a + |def f2(a: AnyVal) = "" + a + |def f3[T](a: T) = "" + a + |def f4(a: Int) = "" + a + |def f5(a: VCI) = "" + a + |def f6(u: Unit) = "" + u + | + |def n1: AnyRef = null + |def n2: Null = null + |def n3: Any = null + |def n4[T]: T = null.asInstanceOf[T] + | + |def npe(s: => String) = try { s; throw new Error() } catch { case _: NullPointerException => "npe" } + | + | f1(null.asInstanceOf[Int]) + + | f1( n1.asInstanceOf[Int]) + + | f1( n2.asInstanceOf[Int]) + + | f1( n3.asInstanceOf[Int]) + + | f1( n4[Int]) + // "null" + |"-" + + | f1(null.asInstanceOf[VCI]) + + |npe(f1( n1.asInstanceOf[VCI])) + // SI-8097 + | f1( n2.asInstanceOf[VCI]) + + |npe(f1( n3.asInstanceOf[VCI])) + // SI-8097 + | f1( n4[VCI]) + // "null" + |"-" + + | f1(null.asInstanceOf[Unit]) + + | f1( n1.asInstanceOf[Unit]) + + | f1( n2.asInstanceOf[Unit]) + + | f1( n3.asInstanceOf[Unit]) + + | f1( n4[Unit]) + // "null" + |"-" + + | f2(null.asInstanceOf[Int]) + + | f2( n1.asInstanceOf[Int]) + + | f2( n2.asInstanceOf[Int]) + + | f2( n3.asInstanceOf[Int]) + + | f2( n4[Int]) + // "null" + |"-" + + | f2(null.asInstanceOf[VCI]) + + |npe(f2( n1.asInstanceOf[VCI])) + // SI-8097 + | f2( n2.asInstanceOf[VCI]) + + |npe(f2( n3.asInstanceOf[VCI])) + // SI-8097 + | f2( n4[VCI]) + // "null" + |"-" + + | f2(null.asInstanceOf[Unit]) + + | f2( n1.asInstanceOf[Unit]) + + | f2( n2.asInstanceOf[Unit]) + + | f2( n3.asInstanceOf[Unit]) + + | f2( n4[Unit]) + // "null" + |"-" + + | f3(null.asInstanceOf[Int]) + + | f3( n1.asInstanceOf[Int]) + + | f3( n2.asInstanceOf[Int]) + + | f3( n3.asInstanceOf[Int]) + + | f3( n4[Int]) + // "null" + |"-" + + | f3(null.asInstanceOf[VCI]) + + |npe(f3( n1.asInstanceOf[VCI])) + // SI-8097 + | f3( n2.asInstanceOf[VCI]) + + |npe(f3( n3.asInstanceOf[VCI])) + // SI-8097 + | f3( n4[VCI]) + // "null" + |"-" + + | f3(null.asInstanceOf[Unit]) + + | f3( n1.asInstanceOf[Unit]) + + | f3( n2.asInstanceOf[Unit]) + + | f3( n3.asInstanceOf[Unit]) + + | f3( n4[Unit]) + // "null" + |"-" + + | f4(null.asInstanceOf[Int]) + + | f4( n1.asInstanceOf[Int]) + + | f4( n2.asInstanceOf[Int]) + + | f4( n3.asInstanceOf[Int]) + + | f4( n4[Int]) + + |"-" + + | f5(null.asInstanceOf[VCI]) + + |npe(f5( n1.asInstanceOf[VCI])) + // SI-8097 + | f5( n2.asInstanceOf[VCI]) + + |npe(f5( n3.asInstanceOf[VCI])) + // SI-8097 + |npe(f5( n4[VCI])) + // SI-8097 + |"-" + + | f6(null.asInstanceOf[Unit]) + + | f6( n1.asInstanceOf[Unit]) + + | f6( n2.asInstanceOf[Unit]) + + | f6( n3.asInstanceOf[Unit]) + + | f6( n4[Unit]) // "null" + """.stripMargin + + assertEquals(run[String](code), + "0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-0000null-0npe0npenull-()()()()null-00000-0npe0npenpe-()()()()null") + } +} diff --git a/test/junit/scala/PredefAutoboxingTest.scala b/test/junit/scala/lang/primitives/PredefAutoboxingTest.scala similarity index 93% rename from test/junit/scala/PredefAutoboxingTest.scala rename to test/junit/scala/lang/primitives/PredefAutoboxingTest.scala index e5d8ded5d4e9..ab31a9e8f13a 100644 --- a/test/junit/scala/PredefAutoboxingTest.scala +++ b/test/junit/scala/lang/primitives/PredefAutoboxingTest.scala @@ -1,12 +1,10 @@ -package scala +package scala.lang.primitives -import org.junit.Test import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil._ - @RunWith(classOf[JUnit4]) class PredefAutoboxingTest { @Test def unboxNullByte() = diff --git a/test/junit/scala/StringContextTest.scala b/test/junit/scala/lang/stringinterpol/StringContextTest.scala similarity index 98% rename from test/junit/scala/StringContextTest.scala rename to test/junit/scala/lang/stringinterpol/StringContextTest.scala index b5af6de7eb23..d2cb8149d7e4 100644 --- a/test/junit/scala/StringContextTest.scala +++ b/test/junit/scala/lang/stringinterpol/StringContextTest.scala @@ -1,15 +1,14 @@ -package scala +package scala.lang.stringinterpol import java.text.DecimalFormat -import language.implicitConversions - -import org.junit.Test import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import scala.language.implicitConversions import scala.tools.testing.AssertUtil._ object StringContextTestUtils { @@ -128,7 +127,7 @@ class StringContextTest { val fff = new java.util.Formattable { def formatTo(f: java.util.Formatter, g: Int, w: Int, p: Int) = f.format("4") } - import java.util.{ Calendar, Locale } + import java.util.{Calendar, Locale} val c = Calendar.getInstance(Locale.US) c.set(2012, Calendar.MAY, 26) implicit def strToDate(x: String): Calendar = c diff --git a/test/junit/scala/issues/BytecodeTest.scala b/test/junit/scala/lang/traits/BytecodeTest.scala similarity index 52% rename from test/junit/scala/issues/BytecodeTest.scala rename to test/junit/scala/lang/traits/BytecodeTest.scala index 125024f746ce..f47fc9c12724 100644 --- a/test/junit/scala/issues/BytecodeTest.scala +++ b/test/junit/scala/lang/traits/BytecodeTest.scala @@ -1,4 +1,4 @@ -package scala.issues +package scala.lang.traits import org.junit.Assert._ import org.junit.Test @@ -9,7 +9,6 @@ import scala.collection.JavaConverters._ import scala.tools.asm.Opcodes import scala.tools.asm.Opcodes._ import scala.tools.asm.tree.ClassNode -import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.partest.ASMConverters._ import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ @@ -18,194 +17,6 @@ import scala.tools.testing.BytecodeTesting._ class BytecodeTest extends BytecodeTesting { import compiler._ - @Test - def t8731(): Unit = { - val code = - """class C { - | def f(x: Int) = (x: @annotation.switch) match { - | case 1 => 0 - | case 2 => 1 - | case 3 => 2 - | } - | final val K = 10 - | def g(x: Int) = (x: @annotation.switch) match { - | case K => 0 - | case 1 => 10 - | case 2 => 20 - | } - |} - """.stripMargin - - val c = compileClass(code) - - assertTrue(getInstructions(c, "f").count(_.isInstanceOf[TableSwitch]) == 1) - assertTrue(getInstructions(c, "g").count(_.isInstanceOf[LookupSwitch]) == 1) - } - - @Test - def t8926(): Unit = { - import scala.reflect.internal.util.BatchSourceFile - - // this test cannot be implemented using partest because of its mixed-mode compilation strategy: - // partest first compiles all files with scalac, then the java files, and then again the scala - // using the output classpath. this shadows the bug SI-8926. - - val annotA = - """import java.lang.annotation.Retention; - |import java.lang.annotation.RetentionPolicy; - |@Retention(RetentionPolicy.RUNTIME) - |public @interface AnnotA { } - """.stripMargin - val annotB = "public @interface AnnotB { }" - - val scalaSrc = - """@AnnotA class A - |@AnnotB class B - """.stripMargin - - val run = new global.Run() - run.compileSources(List(new BatchSourceFile("AnnotA.java", annotA), new BatchSourceFile("AnnotB.java", annotB), new BatchSourceFile("Test.scala", scalaSrc))) - val outDir = global.settings.outputDirs.getSingleOutput.get - val outfiles = (for (f <- outDir.iterator if !f.isDirectory) yield (f.name, f.toByteArray)).toList - - def check(classfile: String, annotName: String) = { - val f = (outfiles collect { case (`classfile`, bytes) => AsmUtils.readClass(bytes) }).head - val descs = f.visibleAnnotations.asScala.map(_.desc).toList - assertTrue(descs.toString, descs exists (_ contains annotName)) - } - - check("A.class", "AnnotA") - - // known issue SI-8928: the visibility of AnnotB should be CLASS, but annotation classes without - // a @Retention annotation are currently emitted as RUNTIME. - check("B.class", "AnnotB") - } - - @Test - def t6288bJumpPosition(): Unit = { - val code = - """object Case3 { // 01 - | def unapply(z: Any): Option[Int] = Some(-1) // 02 - | def main(args: Array[String]) { // 03 - | ("": Any) match { // 04 - | case x : String => // 05 - | println("case 0") // 06 println and jump at 6 - | case _ => // 07 - | println("default") // 08 println and jump at 8 - | } // 09 - | println("done") // 10 - | } - |} - """.stripMargin - val List(mirror, module) = compileClasses(code) - - val unapplyLineNumbers = getInstructions(module, "unapply").filter(_.isInstanceOf[LineNumber]) - assert(unapplyLineNumbers == List(LineNumber(2, Label(0))), unapplyLineNumbers) - - val expected = List( - LineNumber(4, Label(0)), - LineNumber(5, Label(5)), - Jump(IFEQ, Label(20)), - - LineNumber(6, Label(11)), - Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), - Jump(GOTO, Label(33)), - - LineNumber(5, Label(20)), - Jump(GOTO, Label(24)), - - LineNumber(8, Label(24)), - Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), - Jump(GOTO, Label(33)), - - LineNumber(10, Label(33)), - Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false) - ) - - val mainIns = getInstructions(module, "main") filter { - case _: LineNumber | _: Invoke | _: Jump => true - case _ => false - } - assertSameCode(mainIns, expected) - } - - @Test - def bytecodeForBranches(): Unit = { - val code = - """class C { - | def t1(b: Boolean) = if (b) 1 else 2 - | def t2(x: Int) = if (x == 393) 1 else 2 - | def t3(a: Array[String], b: AnyRef) = a != b && b == a - | def t4(a: AnyRef) = a == null || null != a - | def t5(a: AnyRef) = (a eq null) || (null ne a) - | def t6(a: Int, b: Boolean) = if ((a == 10) && b || a != 1) 1 else 2 - | def t7(a: AnyRef, b: AnyRef) = a == b - | def t8(a: AnyRef) = Nil == a || "" != a - |} - """.stripMargin - - val c = compileClass(code) - - // t1: no unnecessary GOTOs - assertSameCode(getMethod(c, "t1"), List( - VarOp(ILOAD, 1), Jump(IFEQ, Label(6)), - Op(ICONST_1), Jump(GOTO, Label(9)), - Label(6), Op(ICONST_2), - Label(9), Op(IRETURN))) - - // t2: no unnecessary GOTOs - assertSameCode(getMethod(c, "t2"), List( - VarOp(ILOAD, 1), IntOp(SIPUSH, 393), Jump(IF_ICMPNE, Label(7)), - Op(ICONST_1), Jump(GOTO, Label(10)), - Label(7), Op(ICONST_2), - Label(10), Op(IRETURN))) - - // t3: Array == is translated to reference equality, AnyRef == to null checks and equals - assertSameCode(getMethod(c, "t3"), List( - // Array == - VarOp(ALOAD, 1), VarOp(ALOAD, 2), Jump(IF_ACMPEQ, Label(23)), - // AnyRef == - VarOp(ALOAD, 2), VarOp(ALOAD, 1), VarOp(ASTORE, 3), Op(DUP), Jump(IFNONNULL, Label(14)), - Op(POP), VarOp(ALOAD, 3), Jump(IFNULL, Label(19)), Jump(GOTO, Label(23)), - Label(14), VarOp(ALOAD, 3), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFEQ, Label(23)), - Label(19), Op(ICONST_1), Jump(GOTO, Label(26)), - Label(23), Op(ICONST_0), - Label(26), Op(IRETURN))) - - val t4t5 = List( - VarOp(ALOAD, 1), Jump(IFNULL, Label(6)), - VarOp(ALOAD, 1), Jump(IFNULL, Label(10)), - Label(6), Op(ICONST_1), Jump(GOTO, Label(13)), - Label(10), Op(ICONST_0), - Label(13), Op(IRETURN)) - - // t4: one side is known null, so just a null check on the other - assertSameCode(getMethod(c, "t4"), t4t5) - - // t5: one side known null, so just a null check on the other - assertSameCode(getMethod(c, "t5"), t4t5) - - // t6: no unnecessary GOTOs - assertSameCode(getMethod(c, "t6"), List( - VarOp(ILOAD, 1), IntOp(BIPUSH, 10), Jump(IF_ICMPNE, Label(7)), - VarOp(ILOAD, 2), Jump(IFNE, Label(12)), - Label(7), VarOp(ILOAD, 1), Op(ICONST_1), Jump(IF_ICMPEQ, Label(16)), - Label(12), Op(ICONST_1), Jump(GOTO, Label(19)), - Label(16), Op(ICONST_2), - Label(19), Op(IRETURN))) - - // t7: universal equality - assertInvoke(getMethod(c, "t7"), "scala/runtime/BoxesRunTime", "equals") - - // t8: no null checks invoking equals on modules and constants - assertSameCode(getMethod(c, "t8"), List( - Field(GETSTATIC, "scala/collection/immutable/Nil$", "MODULE$", "Lscala/collection/immutable/Nil$;"), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(10)), - Ldc(LDC, ""), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(14)), - Label(10), Op(ICONST_1), Jump(GOTO, Label(17)), - Label(14), Op(ICONST_0), - Label(17), Op(IRETURN))) - } - def checkForwarder(classes: Map[String, ClassNode], clsName: Symbol, target: String) = { val List(f) = getMethods(classes(clsName.name), "f") assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, target, "f", "()I", false), Op(IRETURN))) @@ -417,36 +228,37 @@ class BytecodeTest extends BytecodeTesting { // pass `null` to super constructor, no box-unbox, no Integer created List(ALOAD, ILOAD, PUTFIELD, ALOAD, ACONST_NULL, "", RETURN)) } + } object invocationReceiversTestCode { // if cloneType is more specific than Object (e.g., String), a bridge method is generated. def definitions(cloneType: String) = - s"""trait T { override def clone(): $cloneType = "hi" } - |trait U extends T - |class C1 extends U with Cloneable { - | // The comments below are true when $cloneType is Object. - | // C1 gets a forwarder for clone that invokes T.clone. this is needed because JVM method - | // resolution always prefers class members, so it would resolve to Object.clone, even if - | // C1 is a subtype of the interface T which has an overriding default method for clone. - | - | // invokeinterface T.clone - | def f1 = (this: T).clone() - | - | // cannot invokeinterface U.clone (NoSuchMethodError). Object.clone would work here, but - | // not in the example in C2 (illegal access to protected). T.clone works in all cases and - | // resolves correctly. - | def f2 = (this: U).clone() - | - | // invokevirtual C1.clone() - | def f3 = (this: C1).clone() - |} - | - |class C2 { - | def f1(t: T) = t.clone() // invokeinterface T.clone - | def f2(t: U) = t.clone() // invokeinterface T.clone -- Object.clone would be illegal (protected, explained in C1) - | def f3(t: C1) = t.clone() // invokevirtual C1.clone -- Object.clone would be illegal - |} + s"""trait T { override def clone(): $cloneType = "hi" } + |trait U extends T + |class C1 extends U with Cloneable { + | // The comments below are true when $cloneType is Object. + | // C1 gets a forwarder for clone that invokes T.clone. this is needed because JVM method + | // resolution always prefers class members, so it would resolve to Object.clone, even if + | // C1 is a subtype of the interface T which has an overriding default method for clone. + | + | // invokeinterface T.clone + | def f1 = (this: T).clone() + | + | // cannot invokeinterface U.clone (NoSuchMethodError). Object.clone would work here, but + | // not in the example in C2 (illegal access to protected). T.clone works in all cases and + | // resolves correctly. + | def f2 = (this: U).clone() + | + | // invokevirtual C1.clone() + | def f3 = (this: C1).clone() + |} + | + |class C2 { + | def f1(t: T) = t.clone() // invokeinterface T.clone + | def f2(t: U) = t.clone() // invokeinterface T.clone -- Object.clone would be illegal (protected, explained in C1) + | def f3(t: C1) = t.clone() // invokevirtual C1.clone -- Object.clone would be illegal + |} """.stripMargin val runCode = diff --git a/test/junit/scala/lang/traits/RunTest.scala b/test/junit/scala/lang/traits/RunTest.scala new file mode 100644 index 000000000000..d27dc15e202d --- /dev/null +++ b/test/junit/scala/lang/traits/RunTest.scala @@ -0,0 +1,20 @@ +package scala.lang.traits + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.RunTesting + +@RunWith(classOf[JUnit4]) +class RunTest extends RunTesting { + import runner._ + + @Test + def invocationReceivers(): Unit = { + import invocationReceiversTestCode._ + assertEquals(run[String](definitions("Object") + runCode), "hi" * 9) + assertEquals(run[String](definitions("String") + runCode), "hi" * 9) // bridge method for clone generated + } +} diff --git a/test/junit/scala/reflect/ClassOfTest.scala b/test/junit/scala/reflect/ClassOfTest.scala new file mode 100644 index 000000000000..520b14ccd467 --- /dev/null +++ b/test/junit/scala/reflect/ClassOfTest.scala @@ -0,0 +1,124 @@ +package scala.reflect + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.RunTesting + +object ClassOfTest { + class VC(val x: Any) extends AnyVal +} + +@RunWith(classOf[JUnit4]) +class ClassOfTest extends RunTesting { + import runner._ + + @Test + def classOfValueClassAlias(): Unit = { + val code = + """import scala.reflect.ClassOfTest.VC + |type aVC = VC + |type aInt = Int + |type aInteger = Integer + |classOf[VC] == classOf[aVC] && + | classOf[aInt] == classOf[Int] && + | classOf[aInteger] == classOf[Integer] && + | classOf[aInt] != classOf[aInteger] + """.stripMargin + assertTrue(run[Boolean](code)) + } + + @Test + def classOfFinalVal(): Unit = { + val code = + """class C { + | final val a1 = classOf[Int] + | final val b1 = classOf[List[_]] + | final val c1 = classOf[List[String]] + | final val d1 = classOf[Array[Int]] + | final val e1 = classOf[Array[List[_]]] + | final val f1 = classOf[Array[_]] + | + | val a2 = classOf[Int] + | val b2 = classOf[List[_]] + | val c2 = classOf[List[String]] + | val d2 = classOf[Array[Int]] + | val e2 = classOf[Array[List[_]]] + | val f2 = classOf[Array[_]] + | + | val listC = Class.forName("scala.collection.immutable.List") + | + | val compare = List( + | (a1, a2, Integer.TYPE), + | (b1, b2, listC), + | (c1, c2, listC), + | (d1, d2, Array(1).getClass), + | (e1, e2, Array(List()).getClass), + | (f1, f2, new Object().getClass)) + |} + |(new C).compare + """.stripMargin + type K = Class[_] + val cs = run[List[(K, K, K)]](code) + for ((x, y, z) <- cs) { + assertEquals(x, y) + assertEquals(x, z) + } + } + + @Test + def t9702(): Unit = { + val code = + """import javax.annotation.Resource + |import scala.reflect.ClassOfTest.VC + |class C { + | type aList[K] = List[K] + | type aVC = VC + | type aInt = Int + | type aInteger = Integer + | @Resource(`type` = classOf[List[Int]]) def a = 0 + | @Resource(`type` = classOf[List[_]]) def b = 0 + | @Resource(`type` = classOf[aList[_]]) def c = 0 + | @Resource(`type` = classOf[Int]) def d = 0 + | @Resource(`type` = classOf[aInt]) def e = 0 + | @Resource(`type` = classOf[Integer]) def f = 0 + | @Resource(`type` = classOf[aInteger]) def g = 0 + | @Resource(`type` = classOf[VC]) def h = 0 + | @Resource(`type` = classOf[aVC]) def i = 0 + | @Resource(`type` = classOf[Array[Int]]) def j = 0 + | @Resource(`type` = classOf[Array[List[_]]]) def k = 0 + |} + |val c = classOf[C] + |def typeArg(meth: String) = c.getDeclaredMethod(meth).getDeclaredAnnotation(classOf[Resource]).`type` + |('a' to 'k').toList.map(_.toString).map(typeArg) + """.stripMargin + + val l = Class.forName("scala.collection.immutable.List") + val i = Integer.TYPE + val ig = new Integer(1).getClass + val v = new ClassOfTest.VC(1).getClass + val ai = Array(1).getClass + val al = Array(List()).getClass + + // sanity checks + assertEquals(i, classOf[Int]) + assertNotEquals(i, ig) + + assertEquals(run[List[Class[_]]](code), + List(l, l, l, i, i, ig, ig, v, v, ai, al)) + } + + @Test + def classOfUnitConstant(): Unit = { + val code = + """abstract class A { def f: Class[_] } + |class C extends A { final val f = classOf[Unit] } + |val c = new C + |(c.f, (c: A).f) + """.stripMargin + val u = Void.TYPE + assertEquals(run[(Class[_], Class[_])](code), (u, u)) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala new file mode 100644 index 000000000000..7954fe2360d9 --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -0,0 +1,140 @@ +package scala.tools.nsc.backend.jvm + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.asm.Opcodes._ +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ + +@RunWith(classOf[JUnit4]) +class BytecodeTest extends BytecodeTesting { + import compiler._ + + @Test + def t6288bJumpPosition(): Unit = { + val code = + """object Case3 { // 01 + | def unapply(z: Any): Option[Int] = Some(-1) // 02 + | def main(args: Array[String]) { // 03 + | ("": Any) match { // 04 + | case x : String => // 05 + | println("case 0") // 06 println and jump at 6 + | case _ => // 07 + | println("default") // 08 println and jump at 8 + | } // 09 + | println("done") // 10 + | } + |} + """.stripMargin + val List(mirror, module) = compileClasses(code) + + val unapplyLineNumbers = getInstructions(module, "unapply").filter(_.isInstanceOf[LineNumber]) + assert(unapplyLineNumbers == List(LineNumber(2, Label(0))), unapplyLineNumbers) + + val expected = List( + LineNumber(4, Label(0)), + LineNumber(5, Label(5)), + Jump(IFEQ, Label(20)), + + LineNumber(6, Label(11)), + Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), + Jump(GOTO, Label(33)), + + LineNumber(5, Label(20)), + Jump(GOTO, Label(24)), + + LineNumber(8, Label(24)), + Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), + Jump(GOTO, Label(33)), + + LineNumber(10, Label(33)), + Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false) + ) + + val mainIns = getInstructions(module, "main") filter { + case _: LineNumber | _: Invoke | _: Jump => true + case _ => false + } + assertSameCode(mainIns, expected) + } + + @Test + def bytecodeForBranches(): Unit = { + val code = + """class C { + | def t1(b: Boolean) = if (b) 1 else 2 + | def t2(x: Int) = if (x == 393) 1 else 2 + | def t3(a: Array[String], b: AnyRef) = a != b && b == a + | def t4(a: AnyRef) = a == null || null != a + | def t5(a: AnyRef) = (a eq null) || (null ne a) + | def t6(a: Int, b: Boolean) = if ((a == 10) && b || a != 1) 1 else 2 + | def t7(a: AnyRef, b: AnyRef) = a == b + | def t8(a: AnyRef) = Nil == a || "" != a + |} + """.stripMargin + + val c = compileClass(code) + + // t1: no unnecessary GOTOs + assertSameCode(getMethod(c, "t1"), List( + VarOp(ILOAD, 1), Jump(IFEQ, Label(6)), + Op(ICONST_1), Jump(GOTO, Label(9)), + Label(6), Op(ICONST_2), + Label(9), Op(IRETURN))) + + // t2: no unnecessary GOTOs + assertSameCode(getMethod(c, "t2"), List( + VarOp(ILOAD, 1), IntOp(SIPUSH, 393), Jump(IF_ICMPNE, Label(7)), + Op(ICONST_1), Jump(GOTO, Label(10)), + Label(7), Op(ICONST_2), + Label(10), Op(IRETURN))) + + // t3: Array == is translated to reference equality, AnyRef == to null checks and equals + assertSameCode(getMethod(c, "t3"), List( + // Array == + VarOp(ALOAD, 1), VarOp(ALOAD, 2), Jump(IF_ACMPEQ, Label(23)), + // AnyRef == + VarOp(ALOAD, 2), VarOp(ALOAD, 1), VarOp(ASTORE, 3), Op(DUP), Jump(IFNONNULL, Label(14)), + Op(POP), VarOp(ALOAD, 3), Jump(IFNULL, Label(19)), Jump(GOTO, Label(23)), + Label(14), VarOp(ALOAD, 3), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFEQ, Label(23)), + Label(19), Op(ICONST_1), Jump(GOTO, Label(26)), + Label(23), Op(ICONST_0), + Label(26), Op(IRETURN))) + + val t4t5 = List( + VarOp(ALOAD, 1), Jump(IFNULL, Label(6)), + VarOp(ALOAD, 1), Jump(IFNULL, Label(10)), + Label(6), Op(ICONST_1), Jump(GOTO, Label(13)), + Label(10), Op(ICONST_0), + Label(13), Op(IRETURN)) + + // t4: one side is known null, so just a null check on the other + assertSameCode(getMethod(c, "t4"), t4t5) + + // t5: one side known null, so just a null check on the other + assertSameCode(getMethod(c, "t5"), t4t5) + + // t6: no unnecessary GOTOs + assertSameCode(getMethod(c, "t6"), List( + VarOp(ILOAD, 1), IntOp(BIPUSH, 10), Jump(IF_ICMPNE, Label(7)), + VarOp(ILOAD, 2), Jump(IFNE, Label(12)), + Label(7), VarOp(ILOAD, 1), Op(ICONST_1), Jump(IF_ICMPEQ, Label(16)), + Label(12), Op(ICONST_1), Jump(GOTO, Label(19)), + Label(16), Op(ICONST_2), + Label(19), Op(IRETURN))) + + // t7: universal equality + assertInvoke(getMethod(c, "t7"), "scala/runtime/BoxesRunTime", "equals") + + // t8: no null checks invoking equals on modules and constants + assertSameCode(getMethod(c, "t8"), List( + Field(GETSTATIC, "scala/collection/immutable/Nil$", "MODULE$", "Lscala/collection/immutable/Nil$;"), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(10)), + Ldc(LDC, ""), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "equals", "(Ljava/lang/Object;)Z", false), Jump(IFNE, Label(14)), + Label(10), Op(ICONST_1), Jump(GOTO, Label(17)), + Label(14), Op(ICONST_0), + Label(17), Op(IRETURN))) + } +} diff --git a/test/junit/scala/issues/OptimizedBytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala similarity index 98% rename from test/junit/scala/issues/OptimizedBytecodeTest.scala rename to test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala index af1c50acac37..003162c1ad41 100644 --- a/test/junit/scala/issues/OptimizedBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala @@ -1,18 +1,13 @@ -package scala.issues +package scala.tools.nsc.backend.jvm +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test import scala.tools.asm.Opcodes._ -import org.junit.Assert._ - -import scala.tools.nsc.backend.jvm.AsmUtils +import scala.tools.partest.ASMConverters._ +import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ -import scala.tools.partest.ASMConverters -import ASMConverters._ -import AsmUtils._ -import scala.tools.testing.{BytecodeTesting, ClearAfterClass} @RunWith(classOf[JUnit4]) class OptimizedBytecodeTest extends BytecodeTesting { From 64fdae87975f019d3821ca943852df37d50db801 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 16:07:25 +0200 Subject: [PATCH 0063/2793] All JUnit tests pass without bootstrap (when run in intellij, sbt) --- .../scala/lang/primitives/BoxUnboxTest.scala | 203 ++++++++++-------- .../jvm/opt/BTypesFromClassfileTest.scala | 6 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 6 +- 3 files changed, 123 insertions(+), 92 deletions(-) diff --git a/test/junit/scala/lang/primitives/BoxUnboxTest.scala b/test/junit/scala/lang/primitives/BoxUnboxTest.scala index 23c9326989ee..e4911f1af530 100644 --- a/test/junit/scala/lang/primitives/BoxUnboxTest.scala +++ b/test/junit/scala/lang/primitives/BoxUnboxTest.scala @@ -1,11 +1,10 @@ package scala.lang.primitives -import org.junit.Test import org.junit.Assert._ +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil._ import scala.tools.testing.RunTesting object BoxUnboxTest { @@ -16,80 +15,100 @@ object BoxUnboxTest { class BoxUnboxTest extends RunTesting { import runner._ - def genericNull[T] = null.asInstanceOf[T] // allowed, see SI-4437, point 2 - @Test def boxUnboxInt(): Unit = { - val b = new Integer(1) - val u = 1 - - assertEquals(1.toInt, u) - - assertEquals(Predef.int2Integer(1), b) - assertEquals(1: Integer, b) - assertEquals(Int.box(1), b) - assertEquals(1.asInstanceOf[Object], b) - - assertThrows[ClassCastException]("".asInstanceOf[Integer]) - - assertEquals(Predef.Integer2int(b), u) - assertEquals(b: Int, u) - assertEquals(Int.unbox(b), u) - assertEquals(b.asInstanceOf[Int], u) - assertEquals(b.intValue, u) - assertEquals(b.toInt, u) - intWrapper(b).toInt - - assertThrows[ClassCastException](Int.unbox("")) - assertThrows[ClassCastException]("".asInstanceOf[Int]) - - // null unboxing in various positions - - val n1 = Int.unbox(null) - assertEquals(n1, 0) - val n2 = Predef.Integer2int(null) - assertEquals(n2, 0) - val n3 = (null: Integer): Int - assertEquals(n3, 0) - val n4 = null.asInstanceOf[Int] - assertEquals(n4, 0) - val n5 = null.asInstanceOf[Int] == 0 - assertTrue(n5) - val n6 = null.asInstanceOf[Int] == null - assertFalse(n6) - val n7 = null.asInstanceOf[Int] != 0 - assertFalse(n7) - val n8 = null.asInstanceOf[Int] != null - assertTrue(n8) - - val mp = new java.util.HashMap[Int, Int] - val n9 = mp.get(0) - assertEquals(n9, 0) - val n10 = mp.get(0) == null // SI-602 - assertThrows[AssertionError](assertFalse(n10)) // should not throw - - def f(a: Any) = "" + a - val n11 = f(null.asInstanceOf[Int]) - assertEquals(n11, "0") + // Once we use 2.12.0-M5 as starr, this code can be run directly in the JUnit test. + // Some fixes not yet available in M4 make the test fail when compiled with M4. + val code = + """import scala.tools.testing.AssertUtil._ + |import org.junit.Assert._ + | + |def genericNull[T] = null.asInstanceOf[T] // allowed, see SI-4437, point 2 + | + |val b = new Integer(1) + |val u = 1 + | + |assertEquals(1.toInt, u) + | + |assertEquals(Predef.int2Integer(1), b) + |assertEquals(1: Integer, b) + |assertEquals(Int.box(1), b) + |assertEquals(1.asInstanceOf[Object], b) + | + |assertThrows[ClassCastException]("".asInstanceOf[Integer]) + | + |assertEquals(Predef.Integer2int(b), u) + |assertEquals(b: Int, u) + |assertEquals(Int.unbox(b), u) + |assertEquals(b.asInstanceOf[Int], u) + |assertEquals(b.intValue, u) + |assertEquals(b.toInt, u) + |intWrapper(b).toInt + | + |assertThrows[ClassCastException](Int.unbox("")) + |assertThrows[ClassCastException]("".asInstanceOf[Int]) + | + |// null unboxing in various positions + | + |val n1 = Int.unbox(null) + |assertEquals(n1, 0) + |val n2 = Predef.Integer2int(null) + |assertEquals(n2, 0) + |val n3 = (null: Integer): Int + |assertEquals(n3, 0) + |val n4 = null.asInstanceOf[Int] + |assertEquals(n4, 0) + |val n5 = null.asInstanceOf[Int] == 0 + |assertTrue(n5) + |val n6 = null.asInstanceOf[Int] == null + |assertFalse(n6) + |val n7 = null.asInstanceOf[Int] != 0 + |assertFalse(n7) + |val n8 = null.asInstanceOf[Int] != null + |assertTrue(n8) + | + |val mp = new java.util.HashMap[Int, Int] + |val n9 = mp.get(0) + |assertEquals(n9, 0) + |val n10 = mp.get(0) == null // SI-602 + |assertThrows[AssertionError](assertFalse(n10)) // should not throw + | + |def f(a: Any) = "" + a + |val n11 = f(null.asInstanceOf[Int]) + |assertEquals(n11, "0") + | + |def n12 = genericNull[Int] + |assertEquals(n12, 0) + """.stripMargin - def n12 = genericNull[Int] - assertEquals(n12, 0) + run[Unit](code) } @Test def numericConversions(): Unit = { - val i1 = 1L.asInstanceOf[Int] - assertEquals(i1, 1) - assertThrows[ClassCastException] { - val i2 = (1L: Any).asInstanceOf[Int] // SI-1448, should not throw. see also SI-4437 point 1. - assertEquals(i2, 1) - } + // Once we use 2.12.0-M5 as starr, this code can be run directly in the JUnit test. + val code = + """import scala.tools.testing.AssertUtil._ + |import org.junit.Assert._ + | + |val i1 = 1L.asInstanceOf[Int] + |assertEquals(i1, 1) + |assertThrows[ClassCastException] { + | val i2 = (1L: Any).asInstanceOf[Int] // SI-1448, should not throw. see also SI-4437 point 1. + | assertEquals(i2, 1) + |} + """.stripMargin + run[Unit](code) } @Test def boxUnboxBoolean(): Unit = { - val n1 = Option(null.asInstanceOf[Boolean]) - assertEquals(n1, Some(false)) + // Once we use 2.12.0-M5 as starr, this code can be run directly in the JUnit test. + val code = + """val n1 = Option(null.asInstanceOf[Boolean]) + |n1 + """.stripMargin + assertEquals(run[Option[Boolean]](code), Some(false)) } @Test @@ -99,33 +118,41 @@ class BoxUnboxTest extends RunTesting { // as Any. passing a Unit as Any makes the compiler go through another layer of boxing, so it // can hide some bugs (where we actually have a null, but the compiler makes it a ()). - var v = 0 - def eff() = { v = 1 } - def chk() = { assert(v == 1); v = 0 } - - val b = runtime.BoxedUnit.UNIT - - assert(eff() == b); chk() - assert(Unit.box(eff()) == b); chk() - assert(().asInstanceOf[Object] == b) - - Unit.unbox({eff(); b}); chk() - Unit.unbox({eff(); null}); chk() - assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() - - val n1 = null.asInstanceOf[Unit] - assert(n1 == b) - - val n2 = null.asInstanceOf[Unit] == b - assert(n2) - - def f(a: Any) = "" + a - val n3 = f(null.asInstanceOf[Unit]) - assertEquals(n3, "()") + // Once we use 2.12.0-M5 as starr, this code can be run directly in the JUnit test. + val code = + """import scala.tools.testing.AssertUtil._ + |import org.junit.Assert._ + | + |var v = 0 + |def eff() = { v = 1 } + |def chk() = { assert(v == 1); v = 0 } + | + |val b = runtime.BoxedUnit.UNIT + | + |assert(eff() == b); chk() + |assert(Unit.box(eff()) == b); chk() + |assert(().asInstanceOf[Object] == b) + | + |Unit.unbox({eff(); b}); chk() + |Unit.unbox({eff(); null}); chk() + |assertThrows[ClassCastException](Unit.unbox({eff(); ""})); chk() + | + |val n1 = null.asInstanceOf[Unit] + |assert(n1 == b) + | + |val n2 = null.asInstanceOf[Unit] == b + |assert(n2) + | + |def f(a: Any) = "" + a + |val n3 = f(null.asInstanceOf[Unit]) + |assertEquals(n3, "()") + """.stripMargin + run[Unit](code) } @Test def t9671(): Unit = { + // Once we use 2.12.0-M5 as starr, this code can be run directly in the JUnit test. val code = """import scala.lang.primitives.BoxUnboxTest.VCI | diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala index 1169871ecda0..e7aea71e7215 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -2,7 +2,7 @@ package scala.tools.nsc package backend.jvm package opt -import org.junit.Test +import org.junit.{Ignore, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @@ -83,7 +83,9 @@ class BTypesFromClassfileTest extends BytecodeTesting { sameBType(fromSymbol, fromClassfile) } - @Test + // Can be enabled when using 2.12.0-M5 as starr. This test works under a full boostrap, but not + // when compiled with M4. + @Test @Ignore def compareClassBTypes(): Unit = { // Note that not only these classes are tested, but also all their parents and all nested // classes in their InnerClass attributes. diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 24e889cf186e..4e014d452983 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -3,7 +3,7 @@ package backend.jvm package opt import org.junit.Assert._ -import org.junit.Test +import org.junit.{Ignore, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 @@ -1488,7 +1488,9 @@ class InlinerTest extends BytecodeTesting { assertSameSummary(getMethod(c, "t"), List(NEW, "", ICONST_1, IRETURN)) // ICONST_1, U.f is inlined (not T.f) } - @Test + // Can be enabled when using 2.12.0-M5 as starr. This test works under a full boostrap, but not + // when compiled with M4. + @Test @Ignore def inlineArrayForeach(): Unit = { val code = """class C { From e26835c01254ecb4d9b4fa3a8bbe9c835f808a38 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 20 May 2016 16:26:20 +0200 Subject: [PATCH 0064/2793] Adapt naming convention for collection.convert null safety test Test classes not ending in "Test" are not executed in sbt. IntelliJ runs them. After this patch: 803 tests executed both in sbt and junit. --- .../collection/convert/NullSafetyTest.scala | 279 ------------------ .../convert/NullSafetyToJavaTest.scala | 138 +++++++++ .../convert/NullSafetyToScalaTest.scala | 148 ++++++++++ 3 files changed, 286 insertions(+), 279 deletions(-) delete mode 100644 test/junit/scala/collection/convert/NullSafetyTest.scala create mode 100644 test/junit/scala/collection/convert/NullSafetyToJavaTest.scala create mode 100644 test/junit/scala/collection/convert/NullSafetyToScalaTest.scala diff --git a/test/junit/scala/collection/convert/NullSafetyTest.scala b/test/junit/scala/collection/convert/NullSafetyTest.scala deleted file mode 100644 index 173568408c19..000000000000 --- a/test/junit/scala/collection/convert/NullSafetyTest.scala +++ /dev/null @@ -1,279 +0,0 @@ -package scala.collection.convert - -import java.{util => ju, lang => jl} -import ju.{concurrent => juc} - -import org.junit.Test -import org.junit.experimental.runners.Enclosed -import org.junit.runner.RunWith - -import collection.convert.ImplicitConversions._ -import scala.collection.JavaConverters._ -import scala.collection.{mutable, concurrent} - -@RunWith(classOf[Enclosed]) -object NullSafetyTest { - - /* - * Pertinent: SI-9113 - * Tests to insure that wrappers return null instead of wrapping it as a collection - */ - - class ToScala { - - @Test def testIteratorWrapping(): Unit = { - val nullJIterator: ju.Iterator[AnyRef] = null - val iterator: Iterator[AnyRef] = nullJIterator - - assert(iterator == null) - } - - @Test def testEnumerationWrapping(): Unit = { - val nullJEnumeration: ju.Enumeration[AnyRef] = null - val enumeration: Iterator[AnyRef] = nullJEnumeration - - assert(enumeration == null) - } - - @Test def testIterableWrapping(): Unit = { - val nullJIterable: jl.Iterable[AnyRef] = null - val iterable: Iterable[AnyRef] = nullJIterable - - assert(iterable == null) - } - - @Test def testCollectionWrapping(): Unit = { - val nullJCollection: ju.Collection[AnyRef] = null - val collection: Iterable[AnyRef] = nullJCollection - - assert(collection == null) - } - - @Test def testBufferWrapping(): Unit = { - val nullJList: ju.List[AnyRef] = null - val buffer: mutable.Buffer[AnyRef] = nullJList - - assert(buffer == null) - } - - @Test def testSetWrapping(): Unit = { - val nullJSet: ju.Set[AnyRef] = null - val set: mutable.Set[AnyRef] = nullJSet - - assert(set == null) - } - - @Test def testMapWrapping(): Unit = { - val nullJMap: ju.Map[AnyRef, AnyRef] = null - val map: mutable.Map[AnyRef, AnyRef] = nullJMap - - assert(map == null) - } - - @Test def testConcurrentMapWrapping(): Unit = { - val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null - val conMap: concurrent.Map[AnyRef, AnyRef] = nullJConMap - - assert(conMap == null) - } - - @Test def testDictionaryWrapping(): Unit = { - val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null - val dict: mutable.Map[AnyRef, AnyRef] = nullJDict - - assert(dict == null) - } - - - @Test def testPropertyWrapping(): Unit = { - val nullJProps: ju.Properties = null - val props: mutable.Map[String, String] = nullJProps - - assert(props == null) - } - - @Test def testIteratorDecoration(): Unit = { - val nullJIterator: ju.Iterator[AnyRef] = null - - assert(nullJIterator.asScala == null) - } - - @Test def testEnumerationDecoration(): Unit = { - val nullJEnumeration: ju.Enumeration[AnyRef] = null - - assert(nullJEnumeration.asScala == null) - } - - @Test def testIterableDecoration(): Unit = { - val nullJIterable: jl.Iterable[AnyRef] = null - - assert(nullJIterable.asScala == null) - } - - @Test def testCollectionDecoration(): Unit = { - val nullJCollection: ju.Collection[AnyRef] = null - - assert(nullJCollection.asScala == null) - } - - @Test def testBufferDecoration(): Unit = { - val nullJBuffer: ju.List[AnyRef] = null - - assert(nullJBuffer.asScala == null) - } - - @Test def testSetDecoration(): Unit = { - val nullJSet: ju.Set[AnyRef] = null - - assert(nullJSet.asScala == null) - } - - @Test def testMapDecoration(): Unit = { - val nullJMap: ju.Map[AnyRef, AnyRef] = null - - assert(nullJMap.asScala == null) - } - - @Test def testConcurrentMapDecoration(): Unit = { - val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null - - assert(nullJConMap.asScala == null) - } - - @Test def testDictionaryDecoration(): Unit = { - val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null - - assert(nullJDict.asScala == null) - } - - @Test def testPropertiesDecoration(): Unit = { - val nullJProperties: ju.Properties = null - - assert(nullJProperties.asScala == null) - } - } - - class ToJava { - - @Test def testIteratorWrapping(): Unit = { - val nullIterator: Iterator[AnyRef] = null - val jIterator: ju.Iterator[AnyRef] = nullIterator - - assert(jIterator == null) - } - - @Test def testEnumerationWrapping(): Unit = { - val nullEnumeration: Iterator[AnyRef] = null - val enumeration: ju.Iterator[AnyRef] = nullEnumeration - - assert(enumeration == null) - } - - @Test def testIterableWrapping(): Unit = { - val nullIterable: Iterable[AnyRef] = null - val iterable: jl.Iterable[AnyRef] = asJavaIterable(nullIterable) - - assert(iterable == null) - } - - @Test def testCollectionWrapping(): Unit = { - val nullCollection: Iterable[AnyRef] = null - val collection: ju.Collection[AnyRef] = nullCollection - - assert(collection == null) - } - - @Test def testBufferWrapping(): Unit = { - val nullList: mutable.Buffer[AnyRef] = null - val buffer: ju.List[AnyRef] = nullList - - assert(buffer == null) - } - - @Test def testSetWrapping(): Unit = { - val nullSet: mutable.Set[AnyRef] = null - val set: ju.Set[AnyRef] = nullSet - - assert(set == null) - } - - @Test def testMapWrapping(): Unit = { - val nullMap: mutable.Map[AnyRef, AnyRef] = null - val map: ju.Map[AnyRef, AnyRef] = nullMap - - assert(map == null) - } - - @Test def testConcurrentMapWrapping(): Unit = { - val nullConMap: concurrent.Map[AnyRef, AnyRef] = null - val conMap: juc.ConcurrentMap[AnyRef, AnyRef] = nullConMap - - assert(conMap == null) - } - - @Test def testDictionaryWrapping(): Unit = { - val nullDict: mutable.Map[AnyRef, AnyRef] = null - val dict: ju.Dictionary[AnyRef, AnyRef] = nullDict - - assert(dict == null) - } - - // Implicit conversion to ju.Properties is not available - - @Test def testIteratorDecoration(): Unit = { - val nullIterator: Iterator[AnyRef] = null - - assert(nullIterator.asJava == null) - } - - @Test def testEnumerationDecoration(): Unit = { - val nullEnumeration: Iterator[AnyRef] = null - - assert(nullEnumeration.asJavaEnumeration == null) - } - - @Test def testIterableDecoration(): Unit = { - val nullIterable: Iterable[AnyRef] = null - - assert(nullIterable.asJava == null) - } - - @Test def testCollectionDecoration(): Unit = { - val nullCollection: Iterable[AnyRef] = null - - assert(nullCollection.asJavaCollection == null) - } - - @Test def testBufferDecoration(): Unit = { - val nullBuffer: mutable.Buffer[AnyRef] = null - - assert(nullBuffer.asJava == null) - } - - @Test def testSetDecoration(): Unit = { - val nullSet: Set[AnyRef] = null - - assert(nullSet.asJava == null) - } - - @Test def testMapDecoration(): Unit = { - val nullMap: mutable.Map[AnyRef, AnyRef] = null - - assert(nullMap.asJava == null) - } - - @Test def testConcurrentMapDecoration(): Unit = { - val nullConMap: concurrent.Map[AnyRef, AnyRef] = null - - assert(nullConMap.asJava == null) - } - - @Test def testDictionaryDecoration(): Unit = { - val nullDict: mutable.Map[AnyRef, AnyRef] = null - - assert(nullDict.asJavaDictionary == null) - } - - // Decorator conversion to ju.Properties is not available - } -} diff --git a/test/junit/scala/collection/convert/NullSafetyToJavaTest.scala b/test/junit/scala/collection/convert/NullSafetyToJavaTest.scala new file mode 100644 index 000000000000..da0513ed8ae7 --- /dev/null +++ b/test/junit/scala/collection/convert/NullSafetyToJavaTest.scala @@ -0,0 +1,138 @@ +package scala.collection.convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.collection.JavaConverters._ +import scala.collection.convert.ImplicitConversions._ +import scala.collection.{concurrent, mutable} + +// SI-9113: tests to insure that wrappers return null instead of wrapping it as a collection + +@RunWith(classOf[JUnit4]) +class NullSafetyToJavaTest { + @Test def testIteratorWrapping(): Unit = { + val nullIterator: Iterator[AnyRef] = null + val jIterator: ju.Iterator[AnyRef] = nullIterator + + assert(jIterator == null) + } + + @Test def testEnumerationWrapping(): Unit = { + val nullEnumeration: Iterator[AnyRef] = null + val enumeration: ju.Iterator[AnyRef] = nullEnumeration + + assert(enumeration == null) + } + + @Test def testIterableWrapping(): Unit = { + val nullIterable: Iterable[AnyRef] = null + val iterable: jl.Iterable[AnyRef] = asJavaIterable(nullIterable) + + assert(iterable == null) + } + + @Test def testCollectionWrapping(): Unit = { + val nullCollection: Iterable[AnyRef] = null + val collection: ju.Collection[AnyRef] = nullCollection + + assert(collection == null) + } + + @Test def testBufferWrapping(): Unit = { + val nullList: mutable.Buffer[AnyRef] = null + val buffer: ju.List[AnyRef] = nullList + + assert(buffer == null) + } + + @Test def testSetWrapping(): Unit = { + val nullSet: mutable.Set[AnyRef] = null + val set: ju.Set[AnyRef] = nullSet + + assert(set == null) + } + + @Test def testMapWrapping(): Unit = { + val nullMap: mutable.Map[AnyRef, AnyRef] = null + val map: ju.Map[AnyRef, AnyRef] = nullMap + + assert(map == null) + } + + @Test def testConcurrentMapWrapping(): Unit = { + val nullConMap: concurrent.Map[AnyRef, AnyRef] = null + val conMap: juc.ConcurrentMap[AnyRef, AnyRef] = nullConMap + + assert(conMap == null) + } + + @Test def testDictionaryWrapping(): Unit = { + val nullDict: mutable.Map[AnyRef, AnyRef] = null + val dict: ju.Dictionary[AnyRef, AnyRef] = nullDict + + assert(dict == null) + } + + // Implicit conversion to ju.Properties is not available + + @Test def testIteratorDecoration(): Unit = { + val nullIterator: Iterator[AnyRef] = null + + assert(nullIterator.asJava == null) + } + + @Test def testEnumerationDecoration(): Unit = { + val nullEnumeration: Iterator[AnyRef] = null + + assert(nullEnumeration.asJavaEnumeration == null) + } + + @Test def testIterableDecoration(): Unit = { + val nullIterable: Iterable[AnyRef] = null + + assert(nullIterable.asJava == null) + } + + @Test def testCollectionDecoration(): Unit = { + val nullCollection: Iterable[AnyRef] = null + + assert(nullCollection.asJavaCollection == null) + } + + @Test def testBufferDecoration(): Unit = { + val nullBuffer: mutable.Buffer[AnyRef] = null + + assert(nullBuffer.asJava == null) + } + + @Test def testSetDecoration(): Unit = { + val nullSet: Set[AnyRef] = null + + assert(nullSet.asJava == null) + } + + @Test def testMapDecoration(): Unit = { + val nullMap: mutable.Map[AnyRef, AnyRef] = null + + assert(nullMap.asJava == null) + } + + @Test def testConcurrentMapDecoration(): Unit = { + val nullConMap: concurrent.Map[AnyRef, AnyRef] = null + + assert(nullConMap.asJava == null) + } + + @Test def testDictionaryDecoration(): Unit = { + val nullDict: mutable.Map[AnyRef, AnyRef] = null + + assert(nullDict.asJavaDictionary == null) + } + + // Decorator conversion to ju.Properties is not available +} diff --git a/test/junit/scala/collection/convert/NullSafetyToScalaTest.scala b/test/junit/scala/collection/convert/NullSafetyToScalaTest.scala new file mode 100644 index 000000000000..9b6d366fafcf --- /dev/null +++ b/test/junit/scala/collection/convert/NullSafetyToScalaTest.scala @@ -0,0 +1,148 @@ +package scala.collection.convert + +import java.util.{concurrent => juc} +import java.{lang => jl, util => ju} + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.collection.JavaConverters._ +import scala.collection.convert.ImplicitConversions._ +import scala.collection.{concurrent, mutable} + +// SI-9113: tests to insure that wrappers return null instead of wrapping it as a collection + +@RunWith(classOf[JUnit4]) +class NullSafetyToScalaTest { + @Test def testIteratorWrapping(): Unit = { + val nullJIterator: ju.Iterator[AnyRef] = null + val iterator: Iterator[AnyRef] = nullJIterator + + assert(iterator == null) + } + + @Test def testEnumerationWrapping(): Unit = { + val nullJEnumeration: ju.Enumeration[AnyRef] = null + val enumeration: Iterator[AnyRef] = nullJEnumeration + + assert(enumeration == null) + } + + @Test def testIterableWrapping(): Unit = { + val nullJIterable: jl.Iterable[AnyRef] = null + val iterable: Iterable[AnyRef] = nullJIterable + + assert(iterable == null) + } + + @Test def testCollectionWrapping(): Unit = { + val nullJCollection: ju.Collection[AnyRef] = null + val collection: Iterable[AnyRef] = nullJCollection + + assert(collection == null) + } + + @Test def testBufferWrapping(): Unit = { + val nullJList: ju.List[AnyRef] = null + val buffer: mutable.Buffer[AnyRef] = nullJList + + assert(buffer == null) + } + + @Test def testSetWrapping(): Unit = { + val nullJSet: ju.Set[AnyRef] = null + val set: mutable.Set[AnyRef] = nullJSet + + assert(set == null) + } + + @Test def testMapWrapping(): Unit = { + val nullJMap: ju.Map[AnyRef, AnyRef] = null + val map: mutable.Map[AnyRef, AnyRef] = nullJMap + + assert(map == null) + } + + @Test def testConcurrentMapWrapping(): Unit = { + val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null + val conMap: concurrent.Map[AnyRef, AnyRef] = nullJConMap + + assert(conMap == null) + } + + @Test def testDictionaryWrapping(): Unit = { + val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null + val dict: mutable.Map[AnyRef, AnyRef] = nullJDict + + assert(dict == null) + } + + + @Test def testPropertyWrapping(): Unit = { + val nullJProps: ju.Properties = null + val props: mutable.Map[String, String] = nullJProps + + assert(props == null) + } + + @Test def testIteratorDecoration(): Unit = { + val nullJIterator: ju.Iterator[AnyRef] = null + + assert(nullJIterator.asScala == null) + } + + @Test def testEnumerationDecoration(): Unit = { + val nullJEnumeration: ju.Enumeration[AnyRef] = null + + assert(nullJEnumeration.asScala == null) + } + + @Test def testIterableDecoration(): Unit = { + val nullJIterable: jl.Iterable[AnyRef] = null + + assert(nullJIterable.asScala == null) + } + + @Test def testCollectionDecoration(): Unit = { + val nullJCollection: ju.Collection[AnyRef] = null + + assert(nullJCollection.asScala == null) + } + + @Test def testBufferDecoration(): Unit = { + val nullJBuffer: ju.List[AnyRef] = null + + assert(nullJBuffer.asScala == null) + } + + @Test def testSetDecoration(): Unit = { + val nullJSet: ju.Set[AnyRef] = null + + assert(nullJSet.asScala == null) + } + + @Test def testMapDecoration(): Unit = { + val nullJMap: ju.Map[AnyRef, AnyRef] = null + + assert(nullJMap.asScala == null) + } + + @Test def testConcurrentMapDecoration(): Unit = { + val nullJConMap: juc.ConcurrentMap[AnyRef, AnyRef] = null + + assert(nullJConMap.asScala == null) + } + + @Test def testDictionaryDecoration(): Unit = { + val nullJDict: ju.Dictionary[AnyRef, AnyRef] = null + + assert(nullJDict.asScala == null) + } + + @Test def testPropertiesDecoration(): Unit = { + val nullJProperties: ju.Properties = null + + assert(nullJProperties.asScala == null) + } +} From eeef2602dc97f84c798713d7a2c924ea2b0d6012 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 30 Jan 2016 21:53:27 -0800 Subject: [PATCH 0065/2793] SI-8044 Allow binding backquoted varid in patterns Previously, a varid could not be backquoted, so that it was not possible to introduce variables with names such as `type` in a match expression. This commit allows backquoted varids in `case x @ _` and `case x: Int`. In neither position is a stable id accepted, that is, an id with leading uppercase. Therefore, this commit merely relaxes the backquoted varid to be taken as a normal varid in these contexts. --- spec/01-lexical-syntax.md | 2 ++ spec/08-pattern-matching.md | 4 ++-- .../scala/tools/nsc/ast/parser/Parsers.scala | 15 ++++++++------- test/files/neg/t8044.check | 4 ++++ test/files/neg/t8044.scala | 4 ++++ test/files/pos/t8044.scala | 7 +++++++ 6 files changed, 27 insertions(+), 9 deletions(-) create mode 100644 test/files/neg/t8044.check create mode 100644 test/files/neg/t8044.scala create mode 100644 test/files/pos/t8044.scala diff --git a/spec/01-lexical-syntax.md b/spec/01-lexical-syntax.md index 0232ed9a3411..4e92c7cf7b48 100644 --- a/spec/01-lexical-syntax.md +++ b/spec/01-lexical-syntax.md @@ -49,6 +49,8 @@ classes (Unicode general category given in parentheses): ```ebnf op ::= opchar {opchar} varid ::= lower idrest +boundvarid ::= varid + | ‘`’ varid ‘`’ plainid ::= upper idrest | varid | op diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md index 3b481eea86c3..6753fa3ec783 100644 --- a/spec/08-pattern-matching.md +++ b/spec/08-pattern-matching.md @@ -10,10 +10,10 @@ chapter: 8 ```ebnf Pattern ::= Pattern1 { ‘|’ Pattern1 } - Pattern1 ::= varid ‘:’ TypePat + Pattern1 ::= boundvarid ‘:’ TypePat | ‘_’ ‘:’ TypePat | Pattern2 - Pattern2 ::= varid [‘@’ Pattern3] + Pattern2 ::= boundvarid [‘@’ Pattern3] | Pattern3 Pattern3 ::= SimplePattern | SimplePattern {id [nl] SimplePattern} diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 9c0174d89b99..1e239f91a63f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1901,19 +1901,20 @@ self => } /** {{{ - * Pattern1 ::= varid `:' TypePat + * Pattern1 ::= boundvarid `:' TypePat * | `_' `:' TypePat * | Pattern2 - * SeqPattern1 ::= varid `:' TypePat + * SeqPattern1 ::= boundvarid `:' TypePat * | `_' `:' TypePat * | [SeqPattern2] * }}} */ def pattern1(): Tree = pattern2() match { case p @ Ident(name) if in.token == COLON => - if (treeInfo.isVarPattern(p)) + if (nme.isVariableName(name)) { + p.removeAttachment[BackquotedIdentifierAttachment.type] atPos(p.pos.start, in.skipToken())(Typed(p, compoundType())) - else { + } else { syntaxError(in.offset, "Pattern variables must start with a lower-case letter. (SLS 8.1.1.)") p } @@ -1921,9 +1922,9 @@ self => } /** {{{ - * Pattern2 ::= varid [ @ Pattern3 ] + * Pattern2 ::= boundvarid [ @ Pattern3 ] * | Pattern3 - * SeqPattern2 ::= varid [ @ SeqPattern3 ] + * SeqPattern2 ::= boundvarid [ @ SeqPattern3 ] * | SeqPattern3 * }}} */ @@ -1935,7 +1936,7 @@ self => case Ident(nme.WILDCARD) => in.nextToken() pattern3() - case Ident(name) if treeInfo.isVarPattern(p) => + case Ident(name) if nme.isVariableName(name) => in.nextToken() atPos(p.pos.start) { Bind(name, pattern3()) } case _ => p diff --git a/test/files/neg/t8044.check b/test/files/neg/t8044.check new file mode 100644 index 000000000000..678bf8c7007b --- /dev/null +++ b/test/files/neg/t8044.check @@ -0,0 +1,4 @@ +t8044.scala:3: error: not found: value _ + def f = 42 match { case `_` : Int => `_` } // doesn't leak quoted underscore + ^ +one error found diff --git a/test/files/neg/t8044.scala b/test/files/neg/t8044.scala new file mode 100644 index 000000000000..930c30c5a5a0 --- /dev/null +++ b/test/files/neg/t8044.scala @@ -0,0 +1,4 @@ + +trait T { + def f = 42 match { case `_` : Int => `_` } // doesn't leak quoted underscore +} diff --git a/test/files/pos/t8044.scala b/test/files/pos/t8044.scala new file mode 100644 index 000000000000..8259f06a8a0e --- /dev/null +++ b/test/files/pos/t8044.scala @@ -0,0 +1,7 @@ + +trait T { + def f = 42 match { case `x` @ _ => x } + def g = 42 match { case `type` @ _ => `type` } + def h = 42 match { case `type` : Int => `type` } + def i = (null: Any) match { case _: Int | _: String => 17 } +} From 2eb1cc2e3df1627cde35afa1237cb10f508fe2f2 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 30 Jan 2016 22:15:34 -0800 Subject: [PATCH 0066/2793] SI-8044 Test for quoted not-a-varid --- test/files/neg/t8044-b.check | 4 ++++ test/files/neg/t8044-b.scala | 4 ++++ 2 files changed, 8 insertions(+) create mode 100644 test/files/neg/t8044-b.check create mode 100644 test/files/neg/t8044-b.scala diff --git a/test/files/neg/t8044-b.check b/test/files/neg/t8044-b.check new file mode 100644 index 000000000000..4a93e9a77238 --- /dev/null +++ b/test/files/neg/t8044-b.check @@ -0,0 +1,4 @@ +t8044-b.scala:3: error: Pattern variables must start with a lower-case letter. (SLS 8.1.1.) + def g = 42 match { case `Oops` : Int => } // must be varish + ^ +one error found diff --git a/test/files/neg/t8044-b.scala b/test/files/neg/t8044-b.scala new file mode 100644 index 000000000000..fb2e921ac95e --- /dev/null +++ b/test/files/neg/t8044-b.scala @@ -0,0 +1,4 @@ + +trait T { + def g = 42 match { case `Oops` : Int => } // must be varish +} From 1e565d879360709758950332c19a77fffee073d1 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 15 Mar 2016 13:24:55 -0700 Subject: [PATCH 0067/2793] SI-8044 Allow any id in explicit pattern binding Allows arbitrary identifier in `X @ pat`, including non-varids. This goes to regularity. Users of this syntax are not likely to be confused by the "backquoted var id is stable" rule. Also for sequence pattern, `X @ _*`. --- spec/08-pattern-matching.md | 4 ++-- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 7 +++---- test/files/pos/t8044.scala | 8 ++++++++ 3 files changed, 13 insertions(+), 6 deletions(-) diff --git a/spec/08-pattern-matching.md b/spec/08-pattern-matching.md index 6753fa3ec783..35eb97b948a0 100644 --- a/spec/08-pattern-matching.md +++ b/spec/08-pattern-matching.md @@ -13,7 +13,7 @@ chapter: 8 Pattern1 ::= boundvarid ‘:’ TypePat | ‘_’ ‘:’ TypePat | Pattern2 - Pattern2 ::= boundvarid [‘@’ Pattern3] + Pattern2 ::= id [‘@’ Pattern3] | Pattern3 Pattern3 ::= SimplePattern | SimplePattern {id [nl] SimplePattern} @@ -22,7 +22,7 @@ chapter: 8 | Literal | StableId | StableId ‘(’ [Patterns] ‘)’ - | StableId ‘(’ [Patterns ‘,’] [varid ‘@’] ‘_’ ‘*’ ‘)’ + | StableId ‘(’ [Patterns ‘,’] [id ‘@’] ‘_’ ‘*’ ‘)’ | ‘(’ [Patterns] ‘)’ | XmlPattern Patterns ::= Pattern {‘,’ Patterns} diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 1e239f91a63f..abfb6ae67928 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1922,10 +1922,9 @@ self => } /** {{{ - * Pattern2 ::= boundvarid [ @ Pattern3 ] + * Pattern2 ::= id @ Pattern3 + * | `_' @ Pattern3 * | Pattern3 - * SeqPattern2 ::= boundvarid [ @ SeqPattern3 ] - * | SeqPattern3 * }}} */ def pattern2(): Tree = { @@ -1936,7 +1935,7 @@ self => case Ident(nme.WILDCARD) => in.nextToken() pattern3() - case Ident(name) if nme.isVariableName(name) => + case Ident(name) => in.nextToken() atPos(p.pos.start) { Bind(name, pattern3()) } case _ => p diff --git a/test/files/pos/t8044.scala b/test/files/pos/t8044.scala index 8259f06a8a0e..2519a8306b26 100644 --- a/test/files/pos/t8044.scala +++ b/test/files/pos/t8044.scala @@ -4,4 +4,12 @@ trait T { def g = 42 match { case `type` @ _ => `type` } def h = 42 match { case `type` : Int => `type` } def i = (null: Any) match { case _: Int | _: String => 17 } + + // arbitrary idents allowed in @ syntax + def j = "Fred" match { case Name @ (_: String) => Name } + def k = "Fred" match { case * @ (_: String) => * } + + // also in sequence pattern + def m = List(1,2,3,4,5) match { case List(1, `Rest of them` @ _*) => `Rest of them` } + } From 99dad60d984d3f72338f3bad4c4fe905090edd51 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 25 Feb 2016 13:41:20 -0800 Subject: [PATCH 0068/2793] SI-7898 Read user input during REPL warmup The compiler is created on main thread and user input is read on an aux thread (opposite to currently). Fixes completion when `-i` is supplied. Now `-i` means pasted and new option `-I` means line-by-line. The temporary reader uses postInit to swap in the underlying reader. Completion is disabled for the temporary reader, rather than blocking while it waits for a compiler. But manically hitting tab is one way of knowing exactly when completion is live. --- .../tools/nsc/GenericRunnerCommand.scala | 1 + .../tools/nsc/GenericRunnerSettings.scala | 8 +- .../scala/tools/nsc/interpreter/ILoop.scala | 165 ++++++++++++------ .../nsc/interpreter/InteractiveReader.scala | 95 ++++++++++ test/files/run/t7805-repl-i.check | 3 - 5 files changed, 212 insertions(+), 60 deletions(-) diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala index 24496fa013d4..bab612bad5ef 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala @@ -79,6 +79,7 @@ Other startup options: -howtorun what to run (default: guess) -i preload before starting the repl + -I preload , enforcing line-by-line interpretation -e execute as if entered in the repl -save save the compiled script in a jar for future use -nc no compilation daemon: do not use the fsc offline compiler diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index 1289d55c3726..d1f8db048bad 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -20,10 +20,16 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { "guess") val loadfiles = + MultiStringSetting( + "-I", + "file", + "load a file line-by-line") + + val pastefiles = MultiStringSetting( "-i", "file", - "load a file (assumes the code is given interactively)") + "paste a file") val execute = StringSetting( diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index adac438b3765..adaf3a5d252b 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -175,10 +175,19 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) echo("\n" + msg) in.redrawLine() } - protected def echo(msg: String) = { + protected var mum = false + protected def echo(msg: String) = if (!mum) { out println msg out.flush() } + // turn off intp reporter and our echo + def mumly[A](op: =>A): A = + if (isReplDebug) op + else intp beSilentDuring { + val saved = mum + mum = true + try op finally mum = saved + } /** Search the history */ def searchHistory(_cmdline: String) { @@ -408,12 +417,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) * command() for each line of input, and stops when * command() returns false. */ - @tailrec final def loop(): LineResult = { + final def loop(): LineResult = loop(readOneLine()) + + @tailrec final def loop(line: String): LineResult = { import LineResults._ - readOneLine() match { - case null => EOF - case line => if (try processLine(line) catch crashRecovery) loop() else ERR - } + if (line == null) EOF + else if (try processLine(line) catch crashRecovery) loop(readOneLine()) + else ERR } /** interpret all lines from a specified file */ @@ -829,19 +839,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - // runs :load `file` on any files passed via -i - def loadFiles(settings: Settings) = settings match { - case settings: GenericRunnerSettings => - for (filename <- settings.loadfiles.value) { - val cmd = ":load " + filename - command(cmd) - addReplay(cmd) - echo("") - } - case _ => - } - - /** Tries to create a JLineReader, falling back to SimpleReader, + /** Tries to create a jline.InteractiveReader, falling back to SimpleReader, * unless settings or properties are such that it should start with SimpleReader. * The constructor of the InteractiveReader must take a Completion strategy, * supplied as a `() => Completion`; the Completion object provides a concrete Completer. @@ -885,49 +883,104 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - private def loopPostInit() { - // Bind intp somewhere out of the regular namespace where - // we can get at it in generated code. - intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])) - // Auto-run code via some setting. - ( replProps.replAutorunCode.option - flatMap (f => io.File(f).safeSlurp()) - foreach (intp quietRun _) - ) - // classloader and power mode setup - intp.setContextClassLoader() - if (isReplPower) { - replProps.power setValue true - unleashAndSetPhase() - asyncMessage(power.banner) - } - // SI-7418 Now, and only now, can we enable TAB completion. - in.postInit() - } - - // start an interpreter with the given settings + /** Start an interpreter with the given settings. + * @return true if successful + */ def process(settings: Settings): Boolean = savingContextLoader { - this.settings = settings - createInterpreter() - // sets in to some kind of reader depending on environmental cues - in = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true)) - globalFuture = future { - intp.initializeSynchronous() - loopPostInit() - !intp.reporter.hasErrors + def newReader = in0.fold(chooseReader(settings))(r => SimpleReader(r, out, interactive = true)) + + /** Reader to use before interpreter is online. */ + def preLoop = { + val sr = SplashReader(newReader) { r => + in = r + in.postInit() + } + in = sr + SplashLoop(sr, prompt) } - loadFiles(settings) - printWelcome() - try loop() match { - case LineResults.EOF => out print Properties.shellInterruptedString - case _ => + /* Actions to cram in parallel while collecting first user input at prompt. + * Run with output muted both from ILoop and from the intp reporter. + */ + def loopPostInit(): Unit = mumly { + // Bind intp somewhere out of the regular namespace where + // we can get at it in generated code. + intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])) + + // add a help function for anyone who types "help" instead of ":help". Easily shadowed. + //addHelp() + + // Auto-run code via some setting. + ( replProps.replAutorunCode.option + flatMap (f => File(f).safeSlurp()) + foreach (intp quietRun _) + ) + // power mode setup + if (isReplPower) { + replProps.power setValue true + unleashAndSetPhase() + asyncMessage(power.banner) + } + loadInitFiles() + // SI-7418 Now, and only now, can we enable TAB completion. + in.postInit() + } + def loadInitFiles(): Unit = settings match { + case settings: GenericRunnerSettings => + for (f <- settings.loadfiles.value) { + loadCommand(f) + addReplay(s":load $f") + } + for (f <- settings.pastefiles.value) { + pasteCommand(f) + addReplay(s":paste $f") + } + case _ => + } + // TODO: wait until after startup to enable obnoxious settings + def withSuppressedSettings[A](body: =>A): A = { + body } - catch AbstractOrMissingHandler() - finally closeInterpreter() + def startup(): String = withSuppressedSettings { + // starting + printWelcome() - true + // let them start typing + val splash = preLoop + splash.start() + + // while we go fire up the REPL + try { + createInterpreter() + intp.initializeSynchronous() + globalFuture = Future successful true + if (intp.reporter.hasErrors) { + echo("Interpreter encountered errors during initialization!") + null + } else { + loopPostInit() + val line = splash.line // what they typed in while they were waiting + if (line == null) { // they ^D + try out print Properties.shellInterruptedString + finally closeInterpreter() + } + line + } + } finally splash.stop() + } + this.settings = settings + startup() match { + case null => false + case line => + try loop(line) match { + case LineResults.EOF => out print Properties.shellInterruptedString + case _ => + } + catch AbstractOrMissingHandler() + finally closeInterpreter() + true + } } @deprecated("Use `process` instead", "2.9.0") diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala index 71753a3e3924..1f81d9965c67 100644 --- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -50,3 +50,98 @@ object InteractiveReader { def createDefault(): InteractiveReader = apply() // used by sbt } +/** Collect one line of user input from the supplied reader. + * Runs on a new thread while the REPL is initializing on the main thread. + * + * The user can enter text or a `:paste` command. + */ +class SplashLoop(reader: InteractiveReader, prompt: String) extends Runnable { + import java.util.concurrent.SynchronousQueue + import scala.compat.Platform.EOL + + private val result = new SynchronousQueue[Option[String]] + @volatile private var running: Boolean = _ + private var thread: Thread = _ + + /** Read one line of input which can be retrieved with `line`. */ + def run(): Unit = { + var line = "" + try + do { + line = reader.readLine(prompt) + if (line != null) { + line = process(line.trim) + } + } while (line != null && line.isEmpty && running) + finally { + result.put(Option(line)) + } + } + + /** Check for `:paste` command. */ + private def process(line: String): String = { + def isPrefix(s: String, p: String, n: Int) = ( + //s != null && p.inits.takeWhile(_.length >= n).exists(s == _) + s != null && s.length >= n && s.length <= p.length && s == p.take(s.length) + ) + if (isPrefix(line, ":paste", 3)) { + // while collecting lines, check running flag + var help = f"// Entering paste mode (ctrl-D to finish)%n%n" + def readWhile(cond: String => Boolean) = { + Iterator continually reader.readLine(help) takeWhile { x => + help = "" + x != null && cond(x) + } + } + val text = (readWhile(_ => running) mkString EOL).trim + val next = + if (text.isEmpty) "// Nothing pasted, nothing gained." + else "// Exiting paste mode, now interpreting." + Console println f"%n${next}%n" + text + } else { + line + } + } + + def start(): Unit = result.synchronized { + require(thread == null, "Already started") + thread = new Thread(this) + running = true + thread.start() + } + + def stop(): Unit = result.synchronized { + running = false + if (thread != null) thread.interrupt() + thread = null + } + + /** Block for the result line, or null on ctl-D. */ + def line: String = result.take getOrElse null +} +object SplashLoop { + def apply(reader: SplashReader, prompt: String): SplashLoop = new SplashLoop(reader, prompt) +} + +/** Reader during splash. Handles splash-completion with a stub, otherwise delegates. */ +class SplashReader(reader: InteractiveReader, postIniter: InteractiveReader => Unit) extends InteractiveReader { + /** Invoke the postInit action with the underlying reader. */ + override def postInit(): Unit = postIniter(reader) + + override val interactive: Boolean = reader.interactive + + override def reset(): Unit = reader.reset() + override def history: History = reader.history + override val completion: Completion = NoCompletion + override def redrawLine(): Unit = reader.redrawLine() + + override protected[interpreter] def readOneLine(prompt: String): String = ??? // unused + override protected[interpreter] def readOneKey(prompt: String): Int = ??? // unused + + override def readLine(prompt: String): String = reader.readLine(prompt) +} +object SplashReader { + def apply(reader: InteractiveReader)(postIniter: InteractiveReader => Unit) = + new SplashReader(reader, postIniter) +} diff --git a/test/files/run/t7805-repl-i.check b/test/files/run/t7805-repl-i.check index 24512c006733..70f024605caa 100644 --- a/test/files/run/t7805-repl-i.check +++ b/test/files/run/t7805-repl-i.check @@ -1,6 +1,3 @@ -Loading t7805-repl-i.script... -import util._ - Welcome to Scala Type in expressions for evaluation. Or try :help. From c89c36597f922fe29cbb3cec8095611f86ba4976 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Mon, 16 May 2016 18:52:06 +0200 Subject: [PATCH 0069/2793] SI-9766 - allow ++ on empty ConcatIterator --- src/library/scala/collection/Iterator.scala | 3 ++- test/junit/scala/collection/IteratorTest.scala | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 8d88b1c6b14f..9ba16976bd49 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -200,7 +200,8 @@ object Iterator { } else Iterator.empty.next() override def ++[B >: A](that: => GenTraversableOnce[B]): Iterator[B] = - new ConcatIterator(current, queue :+ (() => that.toIterator)) + if(current eq null) new JoinIterator(Iterator.empty, that) + else new ConcatIterator(current, queue :+ (() => that.toIterator)) } private[scala] final class JoinIterator[+A](lhs: Iterator[A], that: => GenTraversableOnce[A]) extends Iterator[A] { diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 329c85127a5d..d980cadeb3c4 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -192,4 +192,22 @@ class IteratorTest { assertSameElements(exp, res) assertEquals(8, counter) // was 14 } + + // SI-9766 + @Test def exhaustedConcatIteratorConcat: Unit = { + def consume[A](i: Iterator[A]) = { + while(i.hasNext) i.next() + } + val joiniter = Iterator.empty ++ Seq(1, 2, 3) + assertTrue(joiniter.hasNext) + consume(joiniter) + val concatiter = joiniter ++ Seq(4, 5, 6) + assertTrue(concatiter.hasNext) + consume(concatiter) + assertFalse(concatiter.hasNext) + val concatFromEmpty = concatiter ++ Seq(7, 8, 9) + assertTrue(concatFromEmpty.hasNext) + consume(concatFromEmpty) + assertFalse(concatFromEmpty.hasNext) + } } From a412ea04ba0ba41dbac469b89e1412f0c56fce3f Mon Sep 17 00:00:00 2001 From: Raphael Jolly Date: Tue, 9 Feb 2016 22:16:22 +0100 Subject: [PATCH 0070/2793] Use jarlister in build The goal of this change is to exercize the "manifest classpath" mechanism, meant to bring the compiler its needed classes as resources, listed in jar manifests, as opposed to files, thus enabling to use the compiler in sandboxed environments (and also the scripting engine for that matter). --- build.xml | 14 +++++++++++++- test/files/run/t7843-jsr223-service.scala | 6 ++---- test/files/run/t7933.scala | 6 ++---- 3 files changed, 17 insertions(+), 9 deletions(-) diff --git a/build.xml b/build.xml index 7b49544447c7..e0b2f353e107 100644 --- a/build.xml +++ b/build.xml @@ -279,6 +279,10 @@ TODO: + + + + @@ -867,6 +871,11 @@ TODO: + + + + + @@ -1230,7 +1239,10 @@ TODO: - + + + + diff --git a/test/files/run/t7843-jsr223-service.scala b/test/files/run/t7843-jsr223-service.scala index 31112212eaf4..3c853878ba3b 100644 --- a/test/files/run/t7843-jsr223-service.scala +++ b/test/files/run/t7843-jsr223-service.scala @@ -1,8 +1,6 @@ -import scala.tools.nsc.interpreter.IMain - object Test extends App { - val engine = new IMain.Factory getScriptEngine() - engine.asInstanceOf[IMain].settings.usejavacp.value = true + val m = new javax.script.ScriptEngineManager() + val engine = m.getEngineByName("scala") engine put ("n", 10) engine eval "1 to n.asInstanceOf[Int] foreach print" } diff --git a/test/files/run/t7933.scala b/test/files/run/t7933.scala index b06dffcd80a9..58e39dd3843a 100644 --- a/test/files/run/t7933.scala +++ b/test/files/run/t7933.scala @@ -1,8 +1,6 @@ -import scala.tools.nsc.interpreter.IMain - object Test extends App { - val engine = new IMain.Factory getScriptEngine() - engine.asInstanceOf[IMain].settings.usejavacp.value = true + val m = new javax.script.ScriptEngineManager() + val engine = m.getEngineByName("scala") val res2 = engine.asInstanceOf[javax.script.Compilable] res2 compile "8" eval() val res5 = res2 compile """println("hello") ; 8""" From 206a657225f16c0fa847f0b2abaddc17b8d945d7 Mon Sep 17 00:00:00 2001 From: Raphael Jolly Date: Fri, 20 May 2016 20:09:53 +0200 Subject: [PATCH 0071/2793] Jarlist scala-library in build.sbt --- build.sbt | 5 ++++- project/Osgi.scala | 13 +++++++++++-- 2 files changed, 15 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 984ac0e91cc6..da86f6274b79 100644 --- a/build.sbt +++ b/build.sbt @@ -351,6 +351,7 @@ lazy val library = configureAsSubproject(project) products in Compile in packageBin ++= (products in Compile in packageBin in forkjoin).value, Osgi.headers += "Import-Package" -> "sun.misc;resolution:=optional, *", + Osgi.jarlist := true, fixPom( "/project/name" -> Scala Library, "/project/description" -> Standard library for the Scala Programming Language, @@ -420,13 +421,15 @@ lazy val compiler = configureAsSubproject(project) scalacOptions in Compile in doc ++= Seq( "-doc-root-content", (sourceDirectory in Compile).value + "/rootdoc.txt" ), - Osgi.headers += + Osgi.headers ++= Seq( "Import-Package" -> ("jline.*;resolution:=optional," + "org.apache.tools.ant.*;resolution:=optional," + "scala.util.parsing.*;version=\"${range;[====,====];"+versionNumber("scala-parser-combinators")+"}\";resolution:=optional," + "scala.xml.*;version=\"${range;[====,====];"+versionNumber("scala-xml")+"}\";resolution:=optional," + "scala.*;version=\"${range;[==,=+);${ver}}\"," + "*"), + "Class-Path" -> "scala-reflect.jar scala-library.jar" + ), // Generate the ScriptEngineFactory service definition. The ant build does this when building // the JAR but sbt has no support for it and it is easier to do as a resource generator: generateServiceProviderResources("javax.script.ScriptEngineFactory" -> "scala.tools.nsc.interpreter.IMain$Factory"), diff --git a/project/Osgi.scala b/project/Osgi.scala index 4456c9419051..d780be2f78ed 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -1,6 +1,7 @@ import aQute.lib.osgi.Builder import aQute.lib.osgi.Constants._ import java.util.Properties +import java.util.jar.Attributes import sbt._ import sbt.Keys._ import scala.collection.JavaConversions._ @@ -16,6 +17,7 @@ object Osgi { val bundleName = SettingKey[String]("osgiBundleName", "The Bundle-Name for the manifest.") val bundleSymbolicName = SettingKey[String]("osgiBundleSymbolicName", "The Bundle-SymbolicName for the manifest.") val headers = SettingKey[Seq[(String, String)]]("osgiHeaders", "Headers and processing instructions for BND.") + val jarlist = SettingKey[Boolean]("osgiJarlist", "List classes in manifest.") def settings: Seq[Setting[_]] = Seq( bundleName := description.value, @@ -33,8 +35,9 @@ object Osgi { "-eclipse" -> "false" ) }, + jarlist := false, bundle <<= Def.task { - bundleTask(headers.value.toMap, (products in Compile in packageBin).value, + bundleTask(headers.value.toMap, jarlist.value, (products in Compile in packageBin).value, (artifactPath in (Compile, packageBin)).value, Nil, streams.value) }, packagedArtifact in (Compile, packageBin) <<= (artifact in (Compile, packageBin), bundle).identityMap, @@ -47,7 +50,7 @@ object Osgi { ) ) - def bundleTask(headers: Map[String, String], fullClasspath: Seq[File], artifactPath: File, + def bundleTask(headers: Map[String, String], jarlist: Boolean, fullClasspath: Seq[File], artifactPath: File, resourceDirectories: Seq[File], streams: TaskStreams): File = { val log = streams.log val builder = new Builder @@ -62,6 +65,12 @@ object Osgi { builder.getWarnings.foreach(s => log.warn(s"bnd: $s")) builder.getErrors.foreach(s => log.error(s"bnd: $s")) IO.createDirectory(artifactPath.getParentFile) + if (jarlist) { + val entries = jar.getManifest.getEntries + for ((name, resource) <- jar.getResources if name.endsWith(".class")) { + entries.put(name, new Attributes) + } + } jar.write(artifactPath) artifactPath } From 093c9346dec958c46d0ae2e0473eaca463a5f922 Mon Sep 17 00:00:00 2001 From: chrisokasaki Date: Sun, 22 May 2016 21:54:06 -0400 Subject: [PATCH 0072/2793] SI-9776: Fix type of PriorityQueue.newBuilder and improve performance - Fix the return type of mutable.PriorityQueue.newBuilder to be Builder[A, PriorityQueue[A]] rather than PriorityQueue[A]. - Improve performance of bulk inserts from O(N log N) to O(N), primarily in the builder, ++=, and reverse. These changes indirectly benefit the many other methods that use the builder or ++=. - Improve performance of clone. - Fix SI-9757 space leak in dequeue. --- .../collection/mutable/PriorityQueue.scala | 124 +++++++++++++++--- .../scalacheck/MutablePriorityQueue.scala | 102 ++++++++++++++ 2 files changed, 208 insertions(+), 18 deletions(-) create mode 100644 test/files/scalacheck/MutablePriorityQueue.scala diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index d5b7673c373c..a6c0fc207791 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -66,7 +66,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) def p_swap(a: Int, b: Int) = super.swap(a, b) } - protected[this] override def newBuilder = new PriorityQueue[A] + protected[this] override def newBuilder = PriorityQueue.newBuilder[A] private val resarr = new ResizableArrayAccess[A] @@ -89,14 +89,15 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) } } - protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Unit = { + protected def fixDown(as: Array[AnyRef], m: Int, n: Int): Boolean = { + // returns true if any swaps were done (used in heapify) var k: Int = m while (n >= 2 * k) { var j = 2 * k if (j < n && toA(as(j)) < toA(as(j + 1))) j += 1 if (toA(as(k)) >= toA(as(j))) - return + return k != m else { val h = as(k) as(k) = as(j) @@ -104,6 +105,7 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) k = j } } + k != m } /** Inserts a single element into the priority queue. @@ -119,6 +121,66 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) this } + override def ++=(xs: TraversableOnce[A]): this.type = { + val from = resarr.p_size0 + for (x <- xs) unsafeAdd(x) + heapify(from) + this + } + + private def unsafeAdd(elem: A): Unit = { + // like += but skips fixUp, which breaks the ordering invariant + // a series of unsafeAdds MUST be followed by heapify + resarr.p_ensureSize(resarr.p_size0 + 1) + resarr.p_array(resarr.p_size0) = elem.asInstanceOf[AnyRef] + resarr.p_size0 += 1 + } + + private def heapify(from: Int): Unit = { + // elements at indices 1..from-1 were already in heap order before any adds + // elements at indices from..n are newly added, their order must be fixed + val n = length + + if (from <= 2) { + // no pre-existing order to maintain, do the textbook heapify algorithm + for (i <- n/2 to 1 by -1) fixDown(resarr.p_array, i, n) + } + else if (n - from < 4) { + // for very small adds, doing the simplest fix is faster + for (i <- from to n) fixUp(resarr.p_array, i) + } + else { + var min = from/2 // tracks the minimum element in the queue + val queue = scala.collection.mutable.Queue[Int](min) + + // do fixDown on the parents of all the new elements + // except the parent of the first new element, which is in the queue + // (that parent is treated specially because it might be the root) + for (i <- n/2 until min by -1) { + if (fixDown(resarr.p_array, i, n)) { + // there was a swap, so also need to fixDown i's parent + val parent = i/2 + if (parent < min) { // make sure same parent isn't added twice + min = parent + queue += parent + } + } + } + + while (queue.nonEmpty) { + val i = queue.dequeue() + if (fixDown(resarr.p_array, i, n)) { + val parent = i/2 + if (parent < min && parent > 0) { + // the "parent > 0" is to avoid adding the parent of the root + min = parent + queue += parent + } + } + } + } + } + /** Adds all elements provided by a `TraversableOnce` object * into the priority queue. * @@ -142,9 +204,11 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) def dequeue(): A = if (resarr.p_size0 > 1) { resarr.p_size0 = resarr.p_size0 - 1 - resarr.p_swap(1, resarr.p_size0) + val result = resarr.p_array(1) + resarr.p_array(1) = resarr.p_array(resarr.p_size0) + resarr.p_array(resarr.p_size0) = null // erase reference from array fixDown(resarr.p_array, 1, resarr.p_size0 - 1) - toA(resarr.p_array(resarr.p_size0)) + toA(result) } else throw new NoSuchElementException("no element to remove from heap") @@ -186,25 +250,34 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) } } - /** Returns the reverse of this queue. The priority queue that gets - * returned will have an inversed ordering - if for some elements - * `x` and `y` the original queue's ordering - * had `compare` returning an integer ''w'', the new one will return ''-w'', - * assuming the original ordering abides its contract. + /** Returns the reverse of this priority queue. The new priority queue has + * the same elements as the original, but the opposite ordering. * - * Note that the order of the elements will be reversed unless the - * `compare` method returns 0. In this case, such elements - * will be subsequent, but their corresponding subinterval may be inappropriately - * reversed. However, due to the compare-equals contract, they will also be equal. + * For example, the element with the highest priority in `pq` has the lowest + * priority in `pq.reverse`, and vice versa. * - * @return A reversed priority queue. + * Ties are handled arbitrarily. Elements with equal priority may or + * may not be reversed with respect to each other. + * + * @return the reversed priority queue. */ def reverse = { val revq = new PriorityQueue[A]()(ord.reverse) - for (i <- 1 until resarr.length) revq += resarr(i) + // copy the existing data into the new array backwards + // this won't put it exactly into the correct order, + // but will require less fixing than copying it in + // the original order + val n = resarr.p_size0 + revq.resarr.p_ensureSize(n) + revq.resarr.p_size0 = n + val from = resarr.p_array + val to = revq.resarr.p_array + for (i <- 1 until n) to(i) = from(n-i) + revq.heapify(1) revq } + /** Returns an iterator which yields all the elements in the reverse order * than that returned by the method `iterator`. * @@ -254,12 +327,27 @@ sealed class PriorityQueue[A](implicit val ord: Ordering[A]) * * @return a priority queue with the same elements. */ - override def clone(): PriorityQueue[A] = new PriorityQueue[A] ++= this.iterator + override def clone(): PriorityQueue[A] = { + val pq = new PriorityQueue[A] + val n = resarr.p_size0 + pq.resarr.p_ensureSize(n) + pq.resarr.p_size0 = n + scala.compat.Platform.arraycopy(resarr.p_array, 1, pq.resarr.p_array, 1, n-1) + pq + } } object PriorityQueue extends OrderedTraversableFactory[PriorityQueue] { - def newBuilder[A](implicit ord: Ordering[A]) = new PriorityQueue[A] + def newBuilder[A](implicit ord: Ordering[A]): Builder[A, PriorityQueue[A]] = { + new Builder[A, PriorityQueue[A]] { + val pq = new PriorityQueue[A] + def +=(elem: A): this.type = { pq.unsafeAdd(elem); this } + def result(): PriorityQueue[A] = { pq.heapify(1); pq } + def clear(): Unit = pq.clear() + } + } + implicit def canBuildFrom[A](implicit ord: Ordering[A]): CanBuildFrom[Coll, A, PriorityQueue[A]] = new GenericCanBuildFrom[A] } diff --git a/test/files/scalacheck/MutablePriorityQueue.scala b/test/files/scalacheck/MutablePriorityQueue.scala new file mode 100644 index 000000000000..687e2e7c623b --- /dev/null +++ b/test/files/scalacheck/MutablePriorityQueue.scala @@ -0,0 +1,102 @@ +import scala.collection.mutable.PriorityQueue +import org.scalacheck._ +import Prop._ +import Arbitrary._ + +object Test extends Properties("PriorityQueue") { + type E = Int // the element type used for most/all of the tests + + def checkInvariant[A](pq: PriorityQueue[A])(implicit ord: Ordering[A]): Boolean = { + // The ordering invariant in the heap is that parent >= child. + // A child at index i has a parent at index i/2 in the priority + // queue's internal array. However, that array is padded with + // an extra slot in front so that the first real element is at + // index 1. The vector below is not padded, so subtract 1 from + // every index. + import ord._ + val vec = pq.toVector // elements in same order as pq's internal array + 2 until pq.size forall { i => vec(i/2-1) >= vec(i-1) } + } + + property("newBuilder (in companion)") = forAll { list: List[E] => + val builder = PriorityQueue.newBuilder[E] + for (x <- list) builder += x + val pq = builder.result() + checkInvariant(pq) && + pq.dequeueAll == list.sorted.reverse + } + + property("to[PriorityQueue]") = forAll { list: List[E] => + val pq = list.to[PriorityQueue] + checkInvariant(pq) && + pq.dequeueAll == list.sorted.reverse + } + + property("apply (in companion)") = forAll { list: List[E] => + val pq = PriorityQueue.apply(list : _*) + checkInvariant(pq) && + pq.dequeueAll == list.sorted.reverse + } + + property("size, isEmpty") = forAll { list: List[E] => + val pq = PriorityQueue(list : _*) + pq.size == list.size && pq.isEmpty == list.isEmpty + } + + property("+=") = forAll { (x: E, list: List[E]) => + val pq = PriorityQueue(list : _*) + pq += x + checkInvariant(pq) && + pq.dequeueAll == (x :: list).sorted.reverse + } + + property("++= on empty") = forAll { list: List[E] => + val pq = PriorityQueue.empty[E] + pq ++= list + checkInvariant(pq) && + pq.dequeueAll == list.sorted.reverse + } + + property("++=") = forAll { (list1: List[E], list2: List[E]) => + val pq = PriorityQueue(list1 : _*) + pq ++= list2 + checkInvariant(pq) && + pq.dequeueAll == (list1 ++ list2).sorted.reverse + } + + property("reverse") = forAll { list: List[E] => + val pq = PriorityQueue(list : _*).reverse + checkInvariant(pq)(implicitly[Ordering[E]].reverse) && + pq.dequeueAll == list.sorted + } + + property("reverse then ++=") = forAll { list: List[E] => + val pq = PriorityQueue.empty[E].reverse ++= list + checkInvariant(pq)(implicitly[Ordering[E]].reverse) && + pq.dequeueAll == list.sorted + } + + property("reverse then +=") = forAll { (x: E, list: List[E]) => + val pq = PriorityQueue(list : _*).reverse += x + checkInvariant(pq)(implicitly[Ordering[E]].reverse) && + pq.dequeueAll == (x +: list).sorted + } + + property("clone") = forAll { list: List[E] => + val pq = PriorityQueue(list : _*) + val c = pq.clone() + (pq ne c) && + checkInvariant(c) && + c.dequeueAll == pq.dequeueAll + } + + property("dequeue") = forAll { list: List[E] => + list.nonEmpty ==> { + val pq = PriorityQueue(list : _*) + val x = pq.dequeue() + checkInvariant(pq) && + x == list.max && pq.dequeueAll == list.sorted.reverse.tail + } + } + +} From 869df338617f2210217827c83d3ef9dc6d810e65 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 20 May 2016 18:19:08 -0700 Subject: [PATCH 0073/2793] SI-7898 Quiet REPL at startup Enable noisy modes only when interpreting user input. --- .../scala/tools/nsc/interpreter/ILoop.scala | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index adaf3a5d252b..4e0f60cf2b68 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -908,9 +908,6 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) // we can get at it in generated code. intp.quietBind(NamedParam[IMain]("$intp", intp)(tagOfIMain, classTag[IMain])) - // add a help function for anyone who types "help" instead of ":help". Easily shadowed. - //addHelp() - // Auto-run code via some setting. ( replProps.replAutorunCode.option flatMap (f => File(f).safeSlurp()) @@ -938,9 +935,24 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } case _ => } - // TODO: wait until after startup to enable obnoxious settings - def withSuppressedSettings[A](body: =>A): A = { - body + // wait until after startup to enable noisy settings + def withSuppressedSettings[A](body: => A): A = { + val ss = this.settings + import ss._ + val noisy = List(Xprint, Ytyperdebug) + val noisesome = noisy.exists(!_.isDefault) + val current = (Xprint.value, Ytyperdebug.value) + if (isReplDebug || !noisesome) body + else { + this.settings.Xprint.value = List.empty + this.settings.Ytyperdebug.value = false + try body + finally { + Xprint.value = current._1 + Ytyperdebug.value = current._2 + intp.global.printTypings = current._2 + } + } } def startup(): String = withSuppressedSettings { // starting From 03d2de20047ad853d80c5c1aae68298082af27d3 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 25 Apr 2016 13:51:02 +0200 Subject: [PATCH 0074/2793] SI-9121 test case (fixed in new optimizer), SI-9179 test case Also adds a mising phase travel in the backend. A comment already points out why it's necessary, but it was actually forgotten. --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 2 +- .../tools/nsc/backend/jvm/BytecodeTest.scala | 31 +++++++++++++++++++ .../nsc/backend/jvm/opt/InlinerTest.scala | 23 ++++++++++++++ 3 files changed, 55 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index a32c21795dbd..a5744983b280 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -76,7 +76,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { val origOwner = sym.originalOwner // phase travel necessary: after flatten, the name includes the name of outer classes. // if some outer name contains $anon, a non-anon class is considered anon. - if (delambdafyInline() && sym.rawowner.isAnonymousFunction) { + if (delambdafyInline() && exitingPickler(sym.rawowner.isAnonymousFunction)) { // SI-9105: special handling for anonymous functions under delambdafy:inline. // // class C { def t = () => { def f { class Z } } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 7954fe2360d9..b2ee8b3a4570 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -1,5 +1,6 @@ package scala.tools.nsc.backend.jvm +import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @@ -137,4 +138,34 @@ class BytecodeTest extends BytecodeTesting { Label(14), Op(ICONST_0), Label(17), Op(IRETURN))) } + + @Test // wrong local variable table for methods containing while loops + def t9179(): Unit = { + val code = + """class C { + | def t(): Unit = { + | var x = "" + | while (x != null) { + | foo() + | x = null + | } + | bar() + | } + | def foo(): Unit = () + | def bar(): Unit = () + |} + """.stripMargin + val c = compileClass(code) + val t = getMethod(c, "t") + val isFrameLine = (x: Instruction) => x.isInstanceOf[FrameEntry] || x.isInstanceOf[LineNumber] + assertSameCode(t.instructions.filterNot(isFrameLine), List( + Label(0), Ldc(LDC, ""), Label(3), VarOp(ASTORE, 1), + Label(5), VarOp(ALOAD, 1), Jump(IFNULL, Label(21)), + Label(10), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "foo", "()V", false), Label(14), Op(ACONST_NULL), VarOp(ASTORE, 1), Label(18), Jump(GOTO, Label(5)), + Label(21), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "bar", "()V", false), Label(26), Op(RETURN), Label(28))) + val labels = t.instructions collect { case l: Label => l } + val x = t.localVars.find(_.name == "x").get + assertEquals(x.start, labels(1)) + assertEquals(x.end, labels(7)) + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 4e014d452983..fd020c7d9390 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1503,4 +1503,27 @@ class InlinerTest extends BytecodeTesting { assertNoIndy(t) assertInvoke(t, "C", "C$$$anonfun$1") } + + @Test + def t9121(): Unit = { + val codes = List( + """package p1 + |object Implicits { + | class ScalaObservable(val underlying: Any) extends AnyVal { + | @inline def scMap[R](f: String): Any = f.toRx + | } + | implicit class RichFunction1[T1, R](val f: String) extends AnyVal { + | def toRx: Any = "" + | } + |} + """.stripMargin, + """ + |import p1.Implicits._ + |class C { + | def t(): Unit = new ScalaObservable("").scMap("") + |} + """.stripMargin) + val c :: _ = compileClassesSeparately(codes, extraArgs = compilerArgs) + assertInvoke(getMethod(c, "t"), "p1/Implicits$RichFunction1$", "toRx$extension") + } } From 41965695b71bc00ea60003c39c72a0e10bfd621f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Giero=C5=84?= Date: Mon, 23 May 2016 15:17:24 +0200 Subject: [PATCH 0075/2793] SI-9688 Make merge in immutable HashMap1 work with null kv. The kv field of scala.collection.immutable.HashMap.HashMap1 can be null. This commit corrects the behavior of updated0 (which is on call path for merged) to work in such cases, instead of throwing NPE. Commit contains regression test. --- .../scala/collection/immutable/HashMap.scala | 2 +- .../collection/immutable/HashMapTest.scala | 48 +++++++++++++++++++ 2 files changed, 49 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/collection/immutable/HashMapTest.scala diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 92d915fe8b04..3e482f1369d2 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -197,7 +197,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { if (this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this else new HashMap1(key, hash, value, kv) } else { - val nkv = merger(this.kv, kv) + val nkv = merger(this.ensurePair, if(kv != null) kv else (key, value)) new HashMap1(nkv._1, hash, nkv._2, nkv) } } else { diff --git a/test/junit/scala/collection/immutable/HashMapTest.scala b/test/junit/scala/collection/immutable/HashMapTest.scala new file mode 100644 index 000000000000..a970786455e4 --- /dev/null +++ b/test/junit/scala/collection/immutable/HashMapTest.scala @@ -0,0 +1,48 @@ +package scala.collection.immutable + +import org.junit.Assert.assertEquals +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +@RunWith(classOf[JUnit4]) +class HashMapTest { + + private val computeHashF = { + HashMap.empty.computeHash _ + } + + @Test + def canMergeIdenticalHashMap1sWithNullKvs() { + def m = new HashMap.HashMap1(1, computeHashF(1), 1, null) + val merged = m.merged(m)(null) + assertEquals(m, merged) + } + + @Test + def canMergeIdenticalHashMap1sWithNullKvsCustomMerge() { + def m = new HashMap.HashMap1(1, computeHashF(1), 1, null) + val merged = m.merged(m) { + case ((k1, v1), (k2, v2)) => + (k1, v1 + v2) + } + assertEquals(new HashMap.HashMap1(1, computeHashF(1), 2, null), merged) + } + + @Test + def canMergeHashMap1sWithNullKvsHashCollision() { + val key1 = 1000L * 1000 * 1000 * 10 + val key2 = key1.##.toLong + assert(key1.## == key2.##) + + val m1 = new HashMap.HashMap1(key1, computeHashF(key1.##), 1, null) + val m2 = new HashMap.HashMap1(key2, computeHashF(key2.##), 1, null) + val expected = HashMap(key1 -> 1, key2 -> 1) + val merged = m1.merged(m2)(null) + assertEquals(expected, merged) + val mergedWithMergeFunction = m1.merged(m2) { (kv1, kv2) => + throw new RuntimeException("Should not be reached.") + } + assertEquals(expected, mergedWithMergeFunction) + } +} \ No newline at end of file From 90ca3fd6aeddd2aa54537cc940f4b69f2592b447 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Mon, 23 May 2016 18:58:07 +0100 Subject: [PATCH 0076/2793] Group Console and AnsiColor entities and add usage examples - Group AnsiColor and Console members - Add Console IO redefinition examples - Add Console control code examples - Add color swatches to AnsiColor - Add AnsiColor control code examples - Link to StdIn for reading - Fix link syntax for java.util.Formatter - Fix withErr method example --- src/library/scala/Console.scala | 146 ++++++++++++++++++++++--- src/library/scala/io/AnsiColor.scala | 155 +++++++++++++++++++++++---- 2 files changed, 265 insertions(+), 36 deletions(-) diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala index 37127a93d5ee..0b079aae1590 100644 --- a/src/library/scala/Console.scala +++ b/src/library/scala/Console.scala @@ -1,6 +1,6 @@ /* __ *\ ** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** +** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** @@ -12,12 +12,115 @@ import java.io.{ BufferedReader, InputStream, InputStreamReader, OutputStream, P import scala.io.{ AnsiColor, StdIn } import scala.util.DynamicVariable -/** Implements functionality for - * printing Scala values on the terminal as well as reading specific values. +/** Implements functionality for printing Scala values on the terminal. For reading values + * use [[scala.io.StdIn$ StdIn]]. * Also defines constants for marking up text on ANSI terminals. * + * == Console Output == + * + * Use the print methods to output text. + * {{{ + * scala> Console.printf( + * "Today the outside temperature is a balmy %.1f°C. %<.1f°C beats the previous record of %.1f°C.\n", + * -137.0, + * -135.05) + * Today the outside temperature is a balmy -137.0°C. -137.0°C beats the previous record of -135.1°C. + * }}} + * + * == ANSI escape codes == + * Use the ANSI escape codes for colorizing console output either to STDOUT or STDERR. + * {{{ + * import Console.{GREEN, RED, RESET, YELLOW_B, UNDERLINED} + * + * object PrimeTest { + * + * def isPrime(): Unit = { + * + * val candidate = io.StdIn.readInt().ensuring(_ > 1) + * + * val prime = (2 to candidate - 1).forall(candidate % _ != 0) + * + * if (prime) + * Console.println(s"${RESET}${GREEN}yes${RESET}") + * else + * Console.err.println(s"${RESET}${YELLOW_B}${RED}${UNDERLINED}NO!${RESET}") + * } + * + * def main(args: Array[String]): Unit = isPrime() + * + * } + * }}} + * + * + * + * + * + * + * + * + *
$ scala PrimeTest
1234567891
yes
$ scala PrimeTest
56474
NO!
+ * + * == IO redefinition == + * + * Use IO redefinition to temporarily swap in a different set of input and/or output streams. In this example the stream based + * method above is wrapped into a function. + * + * {{{ + * import java.io.{ByteArrayOutputStream, StringReader} + * + * object FunctionalPrimeTest { + * + * def isPrime(candidate: Int): Boolean = { + * + * val input = new StringReader(s"$candidate\n") + * val outCapture = new ByteArrayOutputStream + * val errCapture = new ByteArrayOutputStream + * + * Console.withIn(input) { + * Console.withOut(outCapture) { + * Console.withErr(errCapture) { + * PrimeTest.isPrime() + * } + * } + * } + * + * if (outCapture.toByteArray.nonEmpty) // "yes" + * true + * else if (errCapture.toByteArray.nonEmpty) // "NO!" + * false + * else throw new IllegalArgumentException(candidate.toString) + * } + * + * def main(args: Array[String]): Unit = { + * val primes = (2 to 50) filter (isPrime) + * println(s"First primes: $primes") + * } + * + * } + * }}} + * + * + * + * + * + *
$ scala FunctionalPrimeTest
First primes: Vector(2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41, 43, 47)
+ * * @author Matthias Zenger * @version 1.0, 03/09/2003 + * + * @groupname console-output Console Output + * @groupprio console-output 30 + * @groupdesc console-output These methods provide output via the console. + * + * @groupname io-default IO Defaults + * @groupprio io-default 50 + * @groupdesc io-default These values provide direct access to the standard IO channels + * + * @groupname io-redefinition IO Redefinition + * @groupprio io-redefinition 60 + * @groupdesc io-redefinition These methods allow substituting alternative streams for the duration of + * a body of code. Threadsafe by virtue of [[scala.util.DynamicVariable]]. + * */ object Console extends DeprecatedConsole with AnsiColor { private val outVar = new DynamicVariable[PrintStream](java.lang.System.out) @@ -29,11 +132,17 @@ object Console extends DeprecatedConsole with AnsiColor { protected def setErrDirect(err: PrintStream): Unit = errVar.value = err protected def setInDirect(in: BufferedReader): Unit = inVar.value = in - /** The default output, can be overridden by `setOut` */ + /** The default output, can be overridden by `withOut` + * @group io-default + */ def out = outVar.value - /** The default error, can be overridden by `setErr` */ + /** The default error, can be overridden by `withErr` + * @group io-default + */ def err = errVar.value - /** The default input, can be overridden by `setIn` */ + /** The default input, can be overridden by `withIn` + * @group io-default + */ def in = inVar.value /** Sets the default output stream for the duration @@ -48,6 +157,7 @@ object Console extends DeprecatedConsole with AnsiColor { * the new output stream active * @return the results of `thunk` * @see `withOut[T](out:OutputStream)(thunk: => T)` + * @group io-redefinition */ def withOut[T](out: PrintStream)(thunk: =>T): T = outVar.withValue(out)(thunk) @@ -60,6 +170,7 @@ object Console extends DeprecatedConsole with AnsiColor { * the new output stream active * @return the results of `thunk` * @see `withOut[T](out:PrintStream)(thunk: => T)` + * @group io-redefinition */ def withOut[T](out: OutputStream)(thunk: =>T): T = withOut(new PrintStream(out))(thunk) @@ -67,7 +178,7 @@ object Console extends DeprecatedConsole with AnsiColor { /** Set the default error stream for the duration * of execution of one thunk. * @example {{{ - * withErr(Console.out) { println("This goes to default _out_") } + * withErr(Console.out) { err.println("This goes to default _out_") } * }}} * * @param err the new error stream. @@ -75,6 +186,7 @@ object Console extends DeprecatedConsole with AnsiColor { * the new error stream active * @return the results of `thunk` * @see `withErr[T](err:OutputStream)(thunk: =>T)` + * @group io-redefinition */ def withErr[T](err: PrintStream)(thunk: =>T): T = errVar.withValue(err)(thunk) @@ -87,6 +199,7 @@ object Console extends DeprecatedConsole with AnsiColor { * the new error stream active * @return the results of `thunk` * @see `withErr[T](err:PrintStream)(thunk: =>T)` + * @group io-redefinition */ def withErr[T](err: OutputStream)(thunk: =>T): T = withErr(new PrintStream(err))(thunk) @@ -105,8 +218,9 @@ object Console extends DeprecatedConsole with AnsiColor { * @param thunk the code to execute with * the new input stream active * - * @return the results of `thunk` - * @see `withIn[T](in:InputStream)(thunk: =>T)` + * @return the results of `thunk` + * @see `withIn[T](in:InputStream)(thunk: =>T)` + * @group io-redefinition */ def withIn[T](reader: Reader)(thunk: =>T): T = inVar.withValue(new BufferedReader(reader))(thunk) @@ -117,8 +231,9 @@ object Console extends DeprecatedConsole with AnsiColor { * @param in the new input stream. * @param thunk the code to execute with * the new input stream active - * @return the results of `thunk` - * @see `withIn[T](reader:Reader)(thunk: =>T)` + * @return the results of `thunk` + * @see `withIn[T](reader:Reader)(thunk: =>T)` + * @group io-redefinition */ def withIn[T](in: InputStream)(thunk: =>T): T = withIn(new InputStreamReader(in))(thunk) @@ -126,6 +241,7 @@ object Console extends DeprecatedConsole with AnsiColor { /** Prints an object to `out` using its `toString` method. * * @param obj the object to print; may be null. + * @group console-output */ def print(obj: Any) { out.print(if (null == obj) "null" else obj.toString()) @@ -134,29 +250,31 @@ object Console extends DeprecatedConsole with AnsiColor { /** Flushes the output stream. This function is required when partial * output (i.e. output not terminated by a newline character) has * to be made visible on the terminal. + * @group console-output */ def flush() { out.flush() } /** Prints a newline character on the default output. + * @group console-output */ def println() { out.println() } /** Prints out an object to the default output, followed by a newline character. * * @param x the object to print. + * @group console-output */ def println(x: Any) { out.println(x) } /** Prints its arguments as a formatted string to the default output, * based on a string pattern (in a fashion similar to printf in C). * - * The interpretation of the formatting patterns is described in - * - * `java.util.Formatter`. + * The interpretation of the formatting patterns is described in [[java.util.Formatter]]. * * @param text the pattern for formatting the arguments. * @param args the arguments used to instantiating the pattern. * @throws java.lang.IllegalArgumentException if there was a problem with the format string or arguments + * @group console-output */ def printf(text: String, args: Any*) { out.print(text format (args : _*)) } } diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala index 39e2e3b0caff..720049ba8e10 100644 --- a/src/library/scala/io/AnsiColor.scala +++ b/src/library/scala/io/AnsiColor.scala @@ -1,52 +1,163 @@ package scala package io +/** ANSI escape codes providing control over text formatting and color on supporting text terminals. + * + * ==ANSI Style and Control Codes== + * + * This group of escape codes provides control over text styling. For example, to turn on reverse video with bold and + * then turn off all styling embed these codes, + * + * {{{ + * import io.AnsiColor._ + * + * object ColorDemo extends App { + * + * println(s"$REVERSED${BOLD}Hello 1979!$RESET") + * } + * }}} + * + * ==Foreground and Background Colors== + * + * Embedding ANSI color codes in text output will control the text foreground and background colors. + * + * + * + * + * + * + * + * + * + * + * + *
ForegroundBackground
BLACK BLACK_B
RED RED_B
GREEN GREEN_B
YELLOW YELLOW_B
BLUE BLUE_B
MAGENTAMAGENTA_B
CYAN CYAN_B
WHITE WHITE_B
+ * + * @groupname style-control ANSI Style and Control Codes + * @groupprio style-control 101 + * + * @groupname color-black ANSI Black + * @groupdesc color-black
 
+ * @groupprio color-black 110 + * + * @groupname color-red ANSI Red + * @groupdesc color-red
 
+ * @groupprio color-red 120 + * + * @groupname color-green ANSI Green + * @groupdesc color-green
 
+ * @groupprio color-green 130 + * + * @groupname color-yellow ANSI Yellow + * @groupdesc color-yellow
 
+ * @groupprio color-yellow 140 + * + * @groupname color-blue ANSI Blue + * @groupdesc color-blue
 
+ * @groupprio color-blue 150 + * + * @groupname color-magenta ANSI Magenta + * @groupdesc color-magenta
 
+ * @groupprio color-magenta 160 + * + * @groupname color-cyan ANSI Cyan + * @groupdesc color-cyan
 
+ * @groupprio color-cyan 170 + * + * @groupname color-white ANSI White + * @groupdesc color-white
 
+ * @groupprio color-white 180 + */ trait AnsiColor { - /** Foreground color for ANSI black */ + /** Foreground color for ANSI black + * @group color-black + */ final val BLACK = "\u001b[30m" - /** Foreground color for ANSI red */ + /** Foreground color for ANSI red + * @group color-red + */ final val RED = "\u001b[31m" - /** Foreground color for ANSI green */ + /** Foreground color for ANSI green + * @group color-green + */ final val GREEN = "\u001b[32m" - /** Foreground color for ANSI yellow */ + /** Foreground color for ANSI yellow + * @group color-yellow + */ final val YELLOW = "\u001b[33m" - /** Foreground color for ANSI blue */ + /** Foreground color for ANSI blue + * @group color-blue + */ final val BLUE = "\u001b[34m" - /** Foreground color for ANSI magenta */ + /** Foreground color for ANSI magenta + * @group color-magenta + */ final val MAGENTA = "\u001b[35m" - /** Foreground color for ANSI cyan */ + /** Foreground color for ANSI cyan + * @group color-cyan + */ final val CYAN = "\u001b[36m" - /** Foreground color for ANSI white */ + /** Foreground color for ANSI white + * @group color-white + */ final val WHITE = "\u001b[37m" - /** Background color for ANSI black */ + /** Background color for ANSI black + * @group color-black + */ final val BLACK_B = "\u001b[40m" - /** Background color for ANSI red */ + /** Background color for ANSI red + * @group color-red + */ final val RED_B = "\u001b[41m" - /** Background color for ANSI green */ + /** Background color for ANSI green + * @group color-green + */ final val GREEN_B = "\u001b[42m" - /** Background color for ANSI yellow */ + /** Background color for ANSI yellow + * @group color-yellow + */ final val YELLOW_B = "\u001b[43m" - /** Background color for ANSI blue */ + /** Background color for ANSI blue + * @group color-blue + */ final val BLUE_B = "\u001b[44m" - /** Background color for ANSI magenta */ + /** Background color for ANSI magenta + * @group color-magenta + */ final val MAGENTA_B = "\u001b[45m" - /** Background color for ANSI cyan */ + /** Background color for ANSI cyan + * @group color-cyan + */ final val CYAN_B = "\u001b[46m" - /** Background color for ANSI white */ + /** Background color for ANSI white + * @group color-white + */ final val WHITE_B = "\u001b[47m" - /** Reset ANSI styles */ + /** Reset ANSI styles + * @group style-control + */ final val RESET = "\u001b[0m" - /** ANSI bold */ + /** ANSI bold + * @group style-control + */ final val BOLD = "\u001b[1m" - /** ANSI underlines */ + /** ANSI underlines + * @group style-control + */ final val UNDERLINED = "\u001b[4m" - /** ANSI blink */ + /** ANSI blink + * @group style-control + */ final val BLINK = "\u001b[5m" - /** ANSI reversed */ + /** ANSI reversed + * @group style-control + */ final val REVERSED = "\u001b[7m" - /** ANSI invisible */ + /** ANSI invisible + * @group style-control + */ final val INVISIBLE = "\u001b[8m" } From 5450ae6102eaeb8ec0f9b524bf43ac5f604b5074 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 23 May 2016 10:12:41 -0700 Subject: [PATCH 0077/2793] SI-7898 Report paste errors improvedly Use a "label" for errors, so that script names are shown. Position is still wrong for scripts in REPL. Initial scripts are run with `ILoop.echo` and results printing turned off, but reporter still enabled. --- .../scala/tools/nsc/interpreter/ILoop.scala | 20 ++++++++------- .../scala/tools/nsc/interpreter/IMain.scala | 25 ++++++++++++------- test/files/run/t9170.scala | 2 +- 3 files changed, 28 insertions(+), 19 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 4e0f60cf2b68..7dab371cafd6 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -1,5 +1,5 @@ /* NSC -- new Scala compiler - * Copyright 2005-2015 LAMP/EPFL + * Copyright 2005-2016 LAMP/EPFL * @author Alexander Spoon */ package scala @@ -15,7 +15,7 @@ import scala.tools.asm.ClassReader import scala.util.Properties.{ jdkHome, javaVersion, versionString, javaVmName } import scala.tools.nsc.util.{ ClassPath, Exceptional, stringFromWriter, stringFromStream } import scala.reflect.classTag -import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader } +import scala.reflect.internal.util.{ BatchSourceFile, ScalaClassLoader, NoPosition } import ScalaClassLoader._ import scala.reflect.io.{ File, Directory } import scala.tools.util._ @@ -181,9 +181,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) out.flush() } // turn off intp reporter and our echo - def mumly[A](op: =>A): A = + def mumly[A](op: => A): A = if (isReplDebug) op - else intp beSilentDuring { + else intp beQuietDuring { val saved = mum mum = true try op finally mum = saved @@ -576,9 +576,9 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - def withFile[A](filename: String)(action: File => A): Option[A] = { + def withFile[A](filename: String)(action: File => A): Option[A] = intp.withLabel(filename) { val res = Some(File(filename)) filter (_.exists) map action - if (res.isEmpty) echo("That file does not exist") // courtesy side-effect + if (res.isEmpty) intp.reporter.warning(NoPosition, s"File `$filename' does not exist.") // courtesy side-effect res } @@ -715,6 +715,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) */ def pasteCommand(arg: String): Result = { var shouldReplay: Option[String] = None + var label = "" def result = Result(keepRunning = true, shouldReplay) val (raw, file, margin) = if (arg.isEmpty) (false, None, None) @@ -735,6 +736,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } val code = (file, margin) match { case (Some(name), None) => + label = name withFile(name) { f => shouldReplay = Some(s":paste $arg") val s = f.slurp.trim @@ -757,17 +759,17 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) text } def interpretCode() = { - val res = intp interpret code + val res = intp.withLabel(label)(intp interpret code) // if input is incomplete, let the compiler try to say why if (res == IR.Incomplete) { echo("The pasted code is incomplete!\n") // Remembrance of Things Pasted in an object - val errless = intp compileSources new BatchSourceFile("", s"object pastel {\n$code\n}") + val errless = intp compileSources new BatchSourceFile(label, s"object pastel {\n$code\n}") if (errless) echo("...but compilation found no error? Good luck with that.") } } def compileCode() = { - val errless = intp compileSources new BatchSourceFile("", code) + val errless = intp compileSources new BatchSourceFile(label, code) if (!errless) echo("There were compilation errors!") } if (code.nonEmpty) { diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index ef6ab4063a39..1e7a9cefed04 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -1,5 +1,5 @@ /* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL + * Copyright 2005-2016 LAMP/EPFL * @author Martin Odersky */ @@ -74,13 +74,14 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set lazy val isClassBased: Boolean = settings.Yreplclassbased.value - private[nsc] var printResults = true // whether to print result lines - private[nsc] var totalSilence = false // whether to print anything - private var _initializeComplete = false // compiler is initialized - private var _isInitialized: Future[Boolean] = null // set up initialization future - private var bindExceptions = true // whether to bind the lastException variable - private var _executionWrapper = "" // code to be wrapped around all lines - var partialInput: String = "" // code accumulated in multi-line REPL input + private[nsc] var printResults = true // whether to print result lines + private[nsc] var totalSilence = false // whether to print anything + private var _initializeComplete = false // compiler is initialized + private var _isInitialized: Future[Boolean] = null // set up initialization future + private var bindExceptions = true // whether to bind the lastException variable + private var _executionWrapper = "" // code to be wrapped around all lines + var partialInput: String = "" // code accumulated in multi-line REPL input + private var label = "" // compilation unit name for reporting /** We're going to go to some trouble to initialize the compiler asynchronously. * It's critical that nothing call into it until it's been initialized or we will @@ -108,6 +109,12 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set try body finally if (!saved) settings.nowarn.value = false } + // Apply a temporary label for compilation (for example, script name) + def withLabel[A](temp: String)(body: => A): A = { + val saved = label + label = temp + try body finally label = saved + } /** construct an interpreter that reports to Console */ def this(settings: Settings, out: JPrintWriter) = this(null, settings, out) @@ -810,7 +817,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set case Right(result) => Right(result) } - def compile(source: String): Boolean = compileAndSaveRun("", source) + def compile(source: String): Boolean = compileAndSaveRun(label, source) /** The innermost object inside the wrapper, found by * following accessPath into the outer one. diff --git a/test/files/run/t9170.scala b/test/files/run/t9170.scala index f39467bc250b..87471fb1294d 100644 --- a/test/files/run/t9170.scala +++ b/test/files/run/t9170.scala @@ -44,7 +44,7 @@ object Y { // Exiting paste mode, now interpreting. -:13: error: double definition: +:13: error: double definition: def f[A](a: => A): Int at line 12 and def f[A](a: => Either[Exception,A]): Int at line 13 have same type after erasure: (a: Function0)Int From 892a6d6878accb67e3fe68aefaa256396db05a90 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Fri, 20 May 2016 12:49:25 +0100 Subject: [PATCH 0078/2793] SI-2712 Add support for higher order unification --- bincompat-forward.whitelist.conf | 4 + project/ScalaOptionParser.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/reflect/internal/Types.scala | 40 +++++- .../internal/settings/MutableSettings.scala | 1 + .../scala/reflect/runtime/Settings.scala | 1 + test/files/neg/t2712-1.check | 13 ++ test/files/neg/t2712-1.scala | 8 ++ test/files/neg/t2712-2.check | 13 ++ test/files/neg/t2712-2.flags | 1 + test/files/neg/t2712-2.scala | 18 +++ test/files/neg/t2712-3.check | 6 + test/files/neg/t2712-3.scala | 18 +++ test/files/neg/t2712.flags | 1 + test/files/pos/t2712-1.flags | 1 + test/files/pos/t2712-1.scala | 9 ++ test/files/pos/t2712-2.flags | 2 + test/files/pos/t2712-2.scala | 25 ++++ test/files/pos/t2712-3.flags | 2 + test/files/pos/t2712-3.scala | 24 ++++ test/files/pos/t2712-4.flags | 2 + test/files/pos/t2712-4.scala | 17 +++ test/files/pos/t2712-5.flags | 1 + test/files/pos/t2712-5.scala | 29 ++++ test/files/pos/t2712-6.flags | 1 + test/files/pos/t2712-6.scala | 12 ++ test/files/pos/t2712-7.flags | 1 + test/files/pos/t2712-7.scala | 15 +++ test/files/pos/t5683.flags | 1 + test/files/pos/t5683.scala | 23 ++++ test/files/pos/t6895b.flags | 2 + test/files/pos/t6895b.scala | 39 ++++++ .../run/inferred-type-constructors-hou.check | 56 ++++++++ .../run/inferred-type-constructors-hou.flags | 1 + .../run/inferred-type-constructors-hou.scala | 125 ++++++++++++++++++ 35 files changed, 509 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t2712-1.check create mode 100644 test/files/neg/t2712-1.scala create mode 100644 test/files/neg/t2712-2.check create mode 100644 test/files/neg/t2712-2.flags create mode 100644 test/files/neg/t2712-2.scala create mode 100644 test/files/neg/t2712-3.check create mode 100644 test/files/neg/t2712-3.scala create mode 100644 test/files/neg/t2712.flags create mode 100644 test/files/pos/t2712-1.flags create mode 100644 test/files/pos/t2712-1.scala create mode 100644 test/files/pos/t2712-2.flags create mode 100644 test/files/pos/t2712-2.scala create mode 100644 test/files/pos/t2712-3.flags create mode 100644 test/files/pos/t2712-3.scala create mode 100644 test/files/pos/t2712-4.flags create mode 100644 test/files/pos/t2712-4.scala create mode 100644 test/files/pos/t2712-5.flags create mode 100644 test/files/pos/t2712-5.scala create mode 100644 test/files/pos/t2712-6.flags create mode 100644 test/files/pos/t2712-6.scala create mode 100644 test/files/pos/t2712-7.flags create mode 100644 test/files/pos/t2712-7.scala create mode 100644 test/files/pos/t5683.flags create mode 100644 test/files/pos/t5683.scala create mode 100644 test/files/pos/t6895b.flags create mode 100644 test/files/pos/t6895b.scala create mode 100644 test/files/run/inferred-type-constructors-hou.check create mode 100644 test/files/run/inferred-type-constructors-hou.flags create mode 100644 test/files/run/inferred-type-constructors-hou.scala diff --git a/bincompat-forward.whitelist.conf b/bincompat-forward.whitelist.conf index 2d32e3e9da8c..be4a44c4da21 100644 --- a/bincompat-forward.whitelist.conf +++ b/bincompat-forward.whitelist.conf @@ -478,6 +478,10 @@ filter { { matchName="scala.concurrent.impl.Promise$DefaultPromise" problemName=MissingTypesProblem + }, + { + matchName="scala.reflect.runtime.Settings.YpartialUnification" + problemName=MissingMethodProblem } ] } diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index f2fd4d86d7c0..6658b6efc62d 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -86,7 +86,7 @@ object ScalaOptionParser { "-Yeta-expand-keeps-star", "-Yide-debug", "-Yinfer-argument-types", "-Yinfer-by-name", "-Yissue-debug", "-Ylog-classpath", "-Ymacro-debug-lite", "-Ymacro-debug-verbose", "-Ymacro-no-expand", "-Yno-completion", "-Yno-generic-signatures", "-Yno-imports", "-Yno-predef", - "-Yoverride-objects", "-Yoverride-vars", "-Ypatmat-debug", "-Yno-adapted-args", "-Ypos-debug", "-Ypresentation-debug", + "-Yoverride-objects", "-Yoverride-vars", "-Ypatmat-debug", "-Yno-adapted-args", "-Ypartial-unification", "-Ypos-debug", "-Ypresentation-debug", "-Ypresentation-strict", "-Ypresentation-verbose", "-Yquasiquote-debug", "-Yrangepos", "-Yreify-copypaste", "-Yreify-debug", "-Yrepl-class-based", "-Yrepl-sync", "-Yshow-member-pos", "-Yshow-symkinds", "-Yshow-symowners", "-Yshow-syms", "-Yshow-trees", "-Yshow-trees-compact", "-Yshow-trees-stringified", "-Ytyper-debug", "-Ywarn-adapted-args", "-Ywarn-dead-code", "-Ywarn-inaccessible", "-Ywarn-infer-any", "-Ywarn-nullary-override", "-Ywarn-nullary-unit", "-Ywarn-numeric-widen", "-Ywarn-unused", "-Ywarn-unused-import", "-Ywarn-value-discard", diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 9a0d86a94df1..dcbff2426507 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -201,6 +201,7 @@ trait ScalaSettings extends AbsScalaSettings val etaExpandKeepsStar = BooleanSetting ("-Yeta-expand-keeps-star", "Eta-expand varargs methods to T* rather than Seq[T]. This is a temporary option to ease transition.").withDeprecationMessage(removalIn212) val inferByName = BooleanSetting ("-Yinfer-by-name", "Allow inference of by-name types. This is a temporary option to ease transition. See SI-7899.").withDeprecationMessage(removalIn212) val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index de82a6a0b26c..a649f6f92658 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3131,13 +3131,43 @@ trait Types */ def unifyFull(tpe: Type): Boolean = { def unifySpecific(tp: Type) = { - sameLength(typeArgs, tp.typeArgs) && { - val lhs = if (isLowerBound) tp.typeArgs else typeArgs - val rhs = if (isLowerBound) typeArgs else tp.typeArgs + val tpTypeArgs = tp.typeArgs + val arityDelta = compareLengths(typeArgs, tpTypeArgs) + if (arityDelta == 0) { + val lhs = if (isLowerBound) tpTypeArgs else typeArgs + val rhs = if (isLowerBound) typeArgs else tpTypeArgs // This is a higher-kinded type var with same arity as tp. // If so (see SI-7517), side effect: adds the type constructor itself as a bound. - isSubArgs(lhs, rhs, params, AnyDepth) && { addBound(tp.typeConstructor); true } - } + isSubArgs(lhs, rhs, params, AnyDepth) && {addBound(tp.typeConstructor); true} + } else if (settings.YpartialUnification && arityDelta < 0 && typeArgs.nonEmpty) { + // Simple algorithm as suggested by Paul Chiusano in the comments on SI-2712 + // + // https://issues.scala-lang.org/browse/SI-2712?focusedCommentId=61270 + // + // Treat the type constructor as curried and partially applied, we treat a prefix + // as constants and solve for the suffix. For the example in the ticket, unifying + // M[A] with Int => Int this unifies as, + // + // M[t] = [t][Int => t] --> abstract on the right to match the expected arity + // A = Int --> capture the remainder on the left + // + // A more "natural" unifier might be M[t] = [t][t => t]. There's lots of scope for + // experimenting with alternatives here. + val numCaptured = tpTypeArgs.length - typeArgs.length + val (captured, abstractedArgs) = tpTypeArgs.splitAt(numCaptured) + + val (lhs, rhs) = + if (isLowerBound) (abstractedArgs, typeArgs) + else (typeArgs, abstractedArgs) + + isSubArgs(lhs, rhs, params, AnyDepth) && { + val tpSym = tp.typeSymbolDirect + val abstractedTypeParams = tpSym.typeParams.drop(numCaptured).map(_.cloneSymbol(tpSym)) + + addBound(PolyType(abstractedTypeParams, appliedType(tp.typeConstructor, captured ++ abstractedTypeParams.map(_.tpeHK)))) + true + } + } else false } // The type with which we can successfully unify can be hidden // behind singleton types and type aliases. diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index e75b3dff3d5d..5a2c8024768a 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -53,6 +53,7 @@ abstract class MutableSettings extends AbsSettings { def printtypes: BooleanSetting def uniqid: BooleanSetting def verbose: BooleanSetting + def YpartialUnification: BooleanSetting def Yrecursion: IntSetting def maxClassfileName: IntSetting diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index b1d7fde1b446..3b33f089e1ae 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -47,6 +47,7 @@ private[reflect] class Settings extends MutableSettings { val printtypes = new BooleanSetting(false) val uniqid = new BooleanSetting(false) val verbose = new BooleanSetting(false) + val YpartialUnification = new BooleanSetting(false) val Yrecursion = new IntSetting(0) val maxClassfileName = new IntSetting(255) diff --git a/test/files/neg/t2712-1.check b/test/files/neg/t2712-1.check new file mode 100644 index 000000000000..61e4b6b1499c --- /dev/null +++ b/test/files/neg/t2712-1.check @@ -0,0 +1,13 @@ +t2712-1.scala:7: error: no type parameters for method foo: (m: M[A])Unit exist so that it can be applied to arguments (test.Two[Int,String]) + --- because --- +argument expression's type is not compatible with formal parameter type; + found : test.Two[Int,String] + required: ?M[?A] + def test(ma: Two[Int, String]) = foo(ma) // should fail with -Ypartial-unification *disabled* + ^ +t2712-1.scala:7: error: type mismatch; + found : test.Two[Int,String] + required: M[A] + def test(ma: Two[Int, String]) = foo(ma) // should fail with -Ypartial-unification *disabled* + ^ +two errors found diff --git a/test/files/neg/t2712-1.scala b/test/files/neg/t2712-1.scala new file mode 100644 index 000000000000..f7967d71b689 --- /dev/null +++ b/test/files/neg/t2712-1.scala @@ -0,0 +1,8 @@ +package test + +trait Two[A, B] + +object Test { + def foo[M[_], A](m: M[A]) = () + def test(ma: Two[Int, String]) = foo(ma) // should fail with -Ypartial-unification *disabled* +} diff --git a/test/files/neg/t2712-2.check b/test/files/neg/t2712-2.check new file mode 100644 index 000000000000..ea19e33e2c55 --- /dev/null +++ b/test/files/neg/t2712-2.check @@ -0,0 +1,13 @@ +t2712-2.scala:16: error: type mismatch; + found : test.Foo + required: test.Two[test.X1,Object] +Note: test.X2 <: Object (and test.Foo <: test.Two[test.X1,test.X2]), but trait Two is invariant in type B. +You may wish to define B as +B instead. (SLS 4.5) + test1(foo): One[X3] // fails with -Ypartial-unification enabled + ^ +t2712-2.scala:16: error: type mismatch; + found : test.Two[test.X1,Object] + required: test.One[test.X3] + test1(foo): One[X3] // fails with -Ypartial-unification enabled + ^ +two errors found diff --git a/test/files/neg/t2712-2.flags b/test/files/neg/t2712-2.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/neg/t2712-2.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/neg/t2712-2.scala b/test/files/neg/t2712-2.scala new file mode 100644 index 000000000000..85ed52348903 --- /dev/null +++ b/test/files/neg/t2712-2.scala @@ -0,0 +1,18 @@ +package test + +class X1 +class X2 +class X3 + +trait One[A] +trait Two[A, B] + +class Foo extends Two[X1, X2] with One[X3] +object Test { + def test1[M[_], A](x: M[A]): M[A] = x + + val foo = new Foo + + test1(foo): One[X3] // fails with -Ypartial-unification enabled + test1(foo): Two[X1, X2] // fails without -Ypartial-unification +} diff --git a/test/files/neg/t2712-3.check b/test/files/neg/t2712-3.check new file mode 100644 index 000000000000..a84d96bf09c9 --- /dev/null +++ b/test/files/neg/t2712-3.check @@ -0,0 +1,6 @@ +t2712-3.scala:17: error: type mismatch; + found : test.One[test.X3] + required: test.Two[test.X1,test.X2] + test1(foo): Two[X1, X2] // fails without -Ypartial-unification + ^ +one error found diff --git a/test/files/neg/t2712-3.scala b/test/files/neg/t2712-3.scala new file mode 100644 index 000000000000..85ed52348903 --- /dev/null +++ b/test/files/neg/t2712-3.scala @@ -0,0 +1,18 @@ +package test + +class X1 +class X2 +class X3 + +trait One[A] +trait Two[A, B] + +class Foo extends Two[X1, X2] with One[X3] +object Test { + def test1[M[_], A](x: M[A]): M[A] = x + + val foo = new Foo + + test1(foo): One[X3] // fails with -Ypartial-unification enabled + test1(foo): Two[X1, X2] // fails without -Ypartial-unification +} diff --git a/test/files/neg/t2712.flags b/test/files/neg/t2712.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/neg/t2712.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/pos/t2712-1.flags b/test/files/pos/t2712-1.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/pos/t2712-1.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/pos/t2712-1.scala b/test/files/pos/t2712-1.scala new file mode 100644 index 000000000000..4f84c9df5edd --- /dev/null +++ b/test/files/pos/t2712-1.scala @@ -0,0 +1,9 @@ +package test + +// Original test case from, +// +// https://issues.scala-lang.org/browse/SI-2712 +object Test { + def meh[M[_], A](x: M[A]): M[A] = x + meh{(x: Int) => x} // solves ?M = [X] Int => X and ?A = Int ... +} diff --git a/test/files/pos/t2712-2.flags b/test/files/pos/t2712-2.flags new file mode 100644 index 000000000000..7d49efbb8e6c --- /dev/null +++ b/test/files/pos/t2712-2.flags @@ -0,0 +1,2 @@ +-Ypartial-unification + diff --git a/test/files/pos/t2712-2.scala b/test/files/pos/t2712-2.scala new file mode 100644 index 000000000000..39f22dd92a79 --- /dev/null +++ b/test/files/pos/t2712-2.scala @@ -0,0 +1,25 @@ +package test + +// See: https://github.com/milessabin/si2712fix-demo/issues/3 +object Test { + trait A[T1, T2] { } + trait B[T1, T2] { } + class C[T] extends A[T, Long] with B[T, Double] + class CB extends A[Boolean, Long] with B[Boolean, Double] + + trait A2[T] + trait B2[T] + class C2[T] extends A2[T] with B2[T] + class CB2 extends A2[Boolean] with B2[Boolean] + + def meh[M[_], A](x: M[A]): M[A] = x + + val m0 = meh(new C[Boolean]) + m0: C[Boolean] + val m1 = meh(new CB) + m1: A[Boolean, Long] + val m2 = meh(new C2[Boolean]) + m2: C2[Boolean] + val m3 = meh(new CB2) + m3: A2[Boolean] +} diff --git a/test/files/pos/t2712-3.flags b/test/files/pos/t2712-3.flags new file mode 100644 index 000000000000..7d49efbb8e6c --- /dev/null +++ b/test/files/pos/t2712-3.flags @@ -0,0 +1,2 @@ +-Ypartial-unification + diff --git a/test/files/pos/t2712-3.scala b/test/files/pos/t2712-3.scala new file mode 100644 index 000000000000..46445f9289f7 --- /dev/null +++ b/test/files/pos/t2712-3.scala @@ -0,0 +1,24 @@ +package test + +object Test1 { + class Foo[T, F[_]] + def meh[M[_[_]], F[_]](x: M[F]): M[F] = x + meh(new Foo[Int, List]) // solves ?M = [X[_]]Foo[Int, X[_]] ?A = List ... +} + +object Test2 { + trait TC[T] + class Foo[F[_], G[_]] + def meh[G[_[_]]](g: G[TC]) = ??? + meh(new Foo[TC, TC]) // solves ?G = [X[_]]Foo[TC, X] +} + +object Test3 { + trait TC[F[_]] + trait TC2[F[_]] + class Foo[F[_[_]], G[_[_]]] + new Foo[TC, TC2] + + def meh[G[_[_[_]]]](g: G[TC2]) = ??? + meh(new Foo[TC, TC2]) // solves ?G = [X[_[_]]]Foo[TC, X] +} diff --git a/test/files/pos/t2712-4.flags b/test/files/pos/t2712-4.flags new file mode 100644 index 000000000000..7d49efbb8e6c --- /dev/null +++ b/test/files/pos/t2712-4.flags @@ -0,0 +1,2 @@ +-Ypartial-unification + diff --git a/test/files/pos/t2712-4.scala b/test/files/pos/t2712-4.scala new file mode 100644 index 000000000000..3e2e5cddaedf --- /dev/null +++ b/test/files/pos/t2712-4.scala @@ -0,0 +1,17 @@ +package test + +object Test1 { + trait X + trait Y extends X + class Foo[T, U <: X] + def meh[M[_ <: A], A](x: M[A]): M[A] = x + meh(new Foo[Int, Y]) +} + +object Test2 { + trait X + trait Y extends X + class Foo[T, U >: Y] + def meh[M[_ >: A], A](x: M[A]): M[A] = x + meh(new Foo[Int, X]) +} diff --git a/test/files/pos/t2712-5.flags b/test/files/pos/t2712-5.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/pos/t2712-5.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/pos/t2712-5.scala b/test/files/pos/t2712-5.scala new file mode 100644 index 000000000000..ed96d4c06fcc --- /dev/null +++ b/test/files/pos/t2712-5.scala @@ -0,0 +1,29 @@ +package test + +import scala.language.higherKinds + +trait Functor[F[_]] { + def map[A, B](f: A => B, fa: F[A]): F[B] +} + +object Functor { + implicit def function[A]: Functor[({ type l[B] = A => B })#l] = + new Functor[({ type l[B] = A => B })#l] { + def map[C, B](cb: C => B, ac: A => C): A => B = cb compose ac + } +} + +object FunctorSyntax { + implicit class FunctorOps[F[_], A](fa: F[A])(implicit F: Functor[F]) { + def map[B](f: A => B): F[B] = F.map(f, fa) + } +} + +object Test { + + val f: Int => String = _.toString + + import FunctorSyntax._ + + f.map((s: String) => s.reverse) +} diff --git a/test/files/pos/t2712-6.flags b/test/files/pos/t2712-6.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/pos/t2712-6.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/pos/t2712-6.scala b/test/files/pos/t2712-6.scala new file mode 100644 index 000000000000..eefe769ad652 --- /dev/null +++ b/test/files/pos/t2712-6.scala @@ -0,0 +1,12 @@ +package test + +object Tags { + type Tagged[A, T] = {type Tag = T; type Self = A} + + type @@[T, Tag] = Tagged[T, Tag] + + trait Disjunction + + def meh[M[_], A](ma: M[A]): M[A] = ma + meh(null.asInstanceOf[Int @@ Disjunction]) +} diff --git a/test/files/pos/t2712-7.flags b/test/files/pos/t2712-7.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/pos/t2712-7.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/pos/t2712-7.scala b/test/files/pos/t2712-7.scala new file mode 100644 index 000000000000..d9c5243f132d --- /dev/null +++ b/test/files/pos/t2712-7.scala @@ -0,0 +1,15 @@ +package test + +// Cats Xor, Scalaz \/, scala.util.Either +sealed abstract class Xor[+A, +B] extends Product with Serializable +object Xor { + final case class Left[+A](a: A) extends (A Xor Nothing) + final case class Right[+B](b: B) extends (Nothing Xor B) +} + +object TestXor { + import Xor._ + def meh[F[_], A, B](fa: F[A])(f: A => B): F[B] = ??? + meh(new Right(23): Xor[Boolean, Int])(_ < 13) + meh(new Left(true): Xor[Boolean, Int])(_ < 13) +} diff --git a/test/files/pos/t5683.flags b/test/files/pos/t5683.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/pos/t5683.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/pos/t5683.scala b/test/files/pos/t5683.scala new file mode 100644 index 000000000000..05ab03579274 --- /dev/null +++ b/test/files/pos/t5683.scala @@ -0,0 +1,23 @@ +object Test { + trait NT[X] + trait W[W, A] extends NT[Int] + type StringW[T] = W[String, T] + trait K[M[_], A, B] + + def k[M[_], B](f: Int => M[B]): K[M, Int, B] = null + + val okay1: K[StringW,Int,Int] = k{ (y: Int) => null: StringW[Int] } + val okay2 = k[StringW,Int]{ (y: Int) => null: W[String, Int] } + + val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } + + // remove `extends NT[Int]`, and the last line gives an inference error + // rather than a crash. + // test/files/pos/t5683.scala:12: error: no type parameters for method k: (f: Int => M[B])Test.K[M,Int,B] exist so that it can be applied to arguments (Int => Test.W[String,Int]) + // --- because --- + // argument expression's type is not compatible with formal parameter type; + // found : Int => Test.W[String,Int] + // required: Int => ?M[?B] + // val crash: K[StringW,Int,Int] = k{ (y: Int) => null: W[String, Int] } + // ^ +} diff --git a/test/files/pos/t6895b.flags b/test/files/pos/t6895b.flags new file mode 100644 index 000000000000..7d49efbb8e6c --- /dev/null +++ b/test/files/pos/t6895b.flags @@ -0,0 +1,2 @@ +-Ypartial-unification + diff --git a/test/files/pos/t6895b.scala b/test/files/pos/t6895b.scala new file mode 100644 index 000000000000..c46506501108 --- /dev/null +++ b/test/files/pos/t6895b.scala @@ -0,0 +1,39 @@ +trait Foo[F[_]] +trait Bar[F[_], A] + +trait Or[A, B] + +class Test { + implicit def orFoo[A]: Foo[({type L[X] = Or[A, X]})#L] = ??? + implicit def barFoo[F[_]](implicit f: Foo[F]): Foo[({type L[X] = Bar[F, X]})#L] = ??? + + // Now we can define a couple of type aliases: + type StringOr[X] = Or[String, X] + type BarStringOr[X] = Bar[StringOr, X] + + // ok + implicitly[Foo[BarStringOr]] + barFoo[StringOr](null) : Foo[BarStringOr] + barFoo(null) : Foo[BarStringOr] + + // nok + implicitly[Foo[({type L[X] = Bar[StringOr, X]})#L]] + // Let's write the application explicitly, and then + // compile with just this line enabled and -explaintypes. + barFoo(null) : Foo[({type L[X] = Bar[StringOr, X]})#L] + + // Foo[[X]Bar[F,X]] <: Foo[[X]Bar[[X]Or[String,X],X]]? + // Bar[[X]Or[String,X],X] <: Bar[F,X]? + // F[_] <: Or[String,_]? + // false + // false + // false + + // Note that the type annotation above is typechecked as + // Foo[[X]Bar[[X]Or[String,X],X]], ie the type alias `L` + // is eta expanded. + // + // This is done so that it does not escape its defining scope. + // However, one this is done, higher kinded inference + // no longer is able to unify F with `StringOr` (SI-2712) +} diff --git a/test/files/run/inferred-type-constructors-hou.check b/test/files/run/inferred-type-constructors-hou.check new file mode 100644 index 000000000000..6b0982334189 --- /dev/null +++ b/test/files/run/inferred-type-constructors-hou.check @@ -0,0 +1,56 @@ +warning: there were two feature warnings; re-run with -feature for details + p.Iterable[Int] + p.Set[Int] + p.Seq[Int] + p.m.Set[Int] + p.m.Seq[Int] + private[m] p.m.ASet[Int] + p.i.Seq[Int] + private[i] p.i.ASet[Int] + private[i] p.i.ASeq[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Set[Int] + p.Iterable[Int] + p.Set[Int] + p.Iterable[Int] + p.Set[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Seq[Int] + p.Iterable[Int] + p.Seq[Int] + p.Iterable[Int] + p.Seq[Int] + p.Iterable[Int] + p.m.Set[Int] + p.Iterable[Int] + p.Set[Int] + p.Iterable[Int] + p.Iterable[Int] + p.Seq[Int] + p.Iterable[Int] + p.Seq[Int] + p.Iterable[Int] + private[p] p.ASet[Int] + private[p] p.AIterable[Int] + p.Iterable[Int] + p.i.Seq[Int] + private[p] p.AIterable[Int] + List[Nothing] + scala.collection.immutable.Vector[Nothing] + scala.collection.immutable.Map[Int,Int] + scala.collection.immutable.Set[Int] + Seq[Int] + Array[Int] + scala.collection.AbstractSet[Int] + Comparable[java.lang.String] + scala.collection.immutable.LinearSeq[Int] + Iterable[Int] diff --git a/test/files/run/inferred-type-constructors-hou.flags b/test/files/run/inferred-type-constructors-hou.flags new file mode 100644 index 000000000000..41565c7e32bd --- /dev/null +++ b/test/files/run/inferred-type-constructors-hou.flags @@ -0,0 +1 @@ +-Ypartial-unification diff --git a/test/files/run/inferred-type-constructors-hou.scala b/test/files/run/inferred-type-constructors-hou.scala new file mode 100644 index 000000000000..79a8653f686b --- /dev/null +++ b/test/files/run/inferred-type-constructors-hou.scala @@ -0,0 +1,125 @@ +package p { + trait TCon[+CC[X]] { + def fPublic: CC[Int] = ??? + private[p] def fPackagePrivate: CC[Int] = ??? + protected[p] def fPackageProtected: CC[Int] = ??? + } + trait Iterable[+A] extends TCon[Iterable] + trait Set[A] extends Iterable[A] with TCon[Set] + trait Seq[+A] extends Iterable[A] with TCon[Seq] + + private[p] abstract class AIterable[+A] extends Iterable[A] + private[p] abstract class ASeq[+A] extends AIterable[A] with Seq[A] + private[p] abstract class ASet[A] extends AIterable[A] with Set[A] + + package m { + private[m] abstract class ASeq[A] extends p.ASeq[A] with Seq[A] + private[m] abstract class ASet[A] extends p.ASet[A] with Set[A] + trait Set[A] extends p.Set[A] with TCon[Set] + trait Seq[A] extends p.Seq[A] with TCon[Seq] + trait BitSet extends ASet[Int] + trait IntSeq extends ASeq[Int] + } + + package i { + private[i] abstract class ASeq[+A] extends p.ASeq[A] with Seq[A] + private[i] abstract class ASet[A] extends p.ASet[A] with Set[A] + trait Set[A] extends p.Set[A] with TCon[Set] + trait Seq[+A] extends p.Seq[A] with TCon[Seq] + trait BitSet extends ASet[Int] + trait IntSeq extends ASeq[Int] + } +} + +object Test { + import scala.reflect.runtime.universe._ + // Complicated by the absence of usable type constructor type tags. + def extract[A, CC[X]](xs: CC[A]): CC[A] = xs + def whatis[T: TypeTag](x: T): Unit = { + val tpe = typeOf[T] + val access = tpe.typeSymbol.asInstanceOf[scala.reflect.internal.HasFlags].accessString.replaceAllLiterally("package ", "") + println(f"$access%15s $tpe") + } + + trait IntIterable extends p.Iterable[Int] + trait IntSet extends p.Set[Int] + trait IntSeq extends p.Seq[Int] + + trait MutableIntSet extends p.m.Set[Int] + trait MutableIntSeq extends p.m.Seq[Int] + + trait ImmutableIntSet extends p.i.Set[Int] + trait ImmutableIntSeq extends p.i.Seq[Int] + + def f1: IntIterable = null + def f2: IntSet = null + def f3: IntSeq = null + + def g1: MutableIntSet = null + def g2: MutableIntSeq = null + def g3: p.m.BitSet = null + + def h1: ImmutableIntSeq = null + def h2: p.i.BitSet = null + def h3: p.i.IntSeq = null + + def main(args: Array[String]): Unit = { + whatis(extract(f1)) + whatis(extract(f2)) + whatis(extract(f3)) + whatis(extract(g1)) + whatis(extract(g2)) + whatis(extract(g3)) + whatis(extract(h1)) + whatis(extract(h2)) + whatis(extract(h3)) + + whatis(extract(if (true) f1 else f2)) + whatis(extract(if (true) f1 else f3)) + whatis(extract(if (true) f1 else g1)) + whatis(extract(if (true) f1 else g2)) + whatis(extract(if (true) f1 else g3)) + whatis(extract(if (true) f1 else h1)) + whatis(extract(if (true) f1 else h2)) + whatis(extract(if (true) f1 else h3)) + whatis(extract(if (true) f2 else f3)) + whatis(extract(if (true) f2 else g1)) + whatis(extract(if (true) f2 else g2)) + whatis(extract(if (true) f2 else g3)) + whatis(extract(if (true) f2 else h1)) + whatis(extract(if (true) f2 else h2)) + whatis(extract(if (true) f2 else h3)) + whatis(extract(if (true) f3 else g1)) + whatis(extract(if (true) f3 else g2)) + whatis(extract(if (true) f3 else g3)) + whatis(extract(if (true) f3 else h1)) + whatis(extract(if (true) f3 else h2)) + whatis(extract(if (true) f3 else h3)) + whatis(extract(if (true) g1 else g2)) + whatis(extract(if (true) g1 else g3)) + whatis(extract(if (true) g1 else h1)) + whatis(extract(if (true) g1 else h2)) + whatis(extract(if (true) g1 else h3)) + whatis(extract(if (true) g2 else g3)) + whatis(extract(if (true) g2 else h1)) + whatis(extract(if (true) g2 else h2)) + whatis(extract(if (true) g2 else h3)) + whatis(extract(if (true) g3 else h1)) + whatis(extract(if (true) g3 else h2)) + whatis(extract(if (true) g3 else h3)) + whatis(extract(if (true) h1 else h2)) + whatis(extract(if (true) h1 else h3)) + whatis(extract(if (true) h2 else h3)) + + whatis(extract(Nil)) + whatis(extract(Vector())) + whatis(extract(Map[Int,Int]())) + whatis(extract(Set[Int]())) + whatis(extract(Seq[Int]())) + whatis(extract(Array[Int]())) + whatis(extract(scala.collection.immutable.BitSet(1))) + whatis(extract("abc")) + whatis(extract(if (true) Stream(1) else List(1))) + whatis(extract(if (true) Seq(1) else Set(1))) + } +} From bf478084fdf65c7bd156e2e6f04a96bfadf6986b Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Fri, 20 May 2016 12:49:42 +0100 Subject: [PATCH 0079/2793] -Xexperimental mode now only includes -Ypartial-unification --- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 2 +- test/files/run/t5676.flags | 2 +- test/junit/scala/tools/nsc/settings/SettingsTest.scala | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index dcbff2426507..be34b1545380 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -30,7 +30,7 @@ trait ScalaSettings extends AbsScalaSettings protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".") /** Enabled under -Xexperimental. */ - protected def experimentalSettings = List[BooleanSetting](YmethodInfer, overrideObjects, overrideVars) + protected def experimentalSettings = List[BooleanSetting](YpartialUnification) /** Enabled under -Xfuture. */ protected def futureSettings = List[BooleanSetting]() diff --git a/test/files/run/t5676.flags b/test/files/run/t5676.flags index e1b37447c953..73f1330c31df 100644 --- a/test/files/run/t5676.flags +++ b/test/files/run/t5676.flags @@ -1 +1 @@ --Xexperimental \ No newline at end of file +-Yoverride-objects diff --git a/test/junit/scala/tools/nsc/settings/SettingsTest.scala b/test/junit/scala/tools/nsc/settings/SettingsTest.scala index 01a235101144..0f2d206273b0 100644 --- a/test/junit/scala/tools/nsc/settings/SettingsTest.scala +++ b/test/junit/scala/tools/nsc/settings/SettingsTest.scala @@ -31,11 +31,11 @@ class SettingsTest { val s = new MutableSettings(msg => throw new IllegalArgumentException(msg)) val (ok, residual) = s.processArguments(args.toList, processAll = true) assert(residual.isEmpty) - s.YmethodInfer // among -Xexperimental + s.YpartialUnification // among -Xexperimental } assertTrue(check("-Xexperimental").value) - assertFalse(check("-Xexperimental", "-Yinfer-argument-types:false").value) - assertFalse(check("-Yinfer-argument-types:false", "-Xexperimental").value) + assertFalse(check("-Xexperimental", "-Ypartial-unification:false").value) + assertFalse(check("-Ypartial-unification:false", "-Xexperimental").value) } // for the given args, select the desired setting From b462e5a97b499bc91222014e45ec2439f56b46b7 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 24 May 2016 08:21:56 +0200 Subject: [PATCH 0080/2793] SI-7898 Label for parsing -i sources Text-based REPL pre-parses, so use the current label for errors. --- .../scala/tools/nsc/interpreter/IMain.scala | 2 +- test/files/run/repl-paste-parse.check | 6 +++++ test/files/run/repl-paste-parse.scala | 27 +++++++++++++++++++ test/files/run/repl-paste-parse.script | 1 + 4 files changed, 35 insertions(+), 1 deletion(-) create mode 100755 test/files/run/repl-paste-parse.check create mode 100644 test/files/run/repl-paste-parse.scala create mode 100644 test/files/run/repl-paste-parse.script diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 1e7a9cefed04..dc8b6204c01e 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -1191,7 +1191,7 @@ class IMain(@BeanProperty val factory: ScriptEngineFactory, initialSettings: Set var isIncomplete = false def parse = { reporter.reset() - val trees = newUnitParser(line).parseStats() + val trees = newUnitParser(line, label).parseStats() if (reporter.hasErrors) Error(trees) else if (isIncomplete) Incomplete(trees) else Success(trees) diff --git a/test/files/run/repl-paste-parse.check b/test/files/run/repl-paste-parse.check new file mode 100755 index 000000000000..7b2148dc747f --- /dev/null +++ b/test/files/run/repl-paste-parse.check @@ -0,0 +1,6 @@ +Type in expressions for evaluation. Or try :help. + +scala> repl-paste-parse.script:1: error: illegal start of simple pattern +val case = 9 + ^ +:quit diff --git a/test/files/run/repl-paste-parse.scala b/test/files/run/repl-paste-parse.scala new file mode 100644 index 000000000000..e93ad4d02bb3 --- /dev/null +++ b/test/files/run/repl-paste-parse.scala @@ -0,0 +1,27 @@ + +import java.io.{ BufferedReader, StringReader, StringWriter, PrintWriter } + +import scala.tools.partest.DirectTest +import scala.tools.nsc.interpreter.ILoop +import scala.tools.nsc.GenericRunnerSettings + +object Test extends DirectTest { + override def extraSettings = s"-usejavacp -i $scriptPath" + def scriptPath = testPath.changeExtension("script") + override def newSettings(args: List[String]) = { + val ss = new GenericRunnerSettings(Console.println) + ss.processArguments(args, true) + ss + } + def code = "" + def show() = { + val r = new BufferedReader(new StringReader("")) + val w = new StringWriter + val p = new PrintWriter(w, true) + new ILoop(r, p).process(settings) + w.toString.lines foreach { s => + if (!s.startsWith("Welcome to Scala")) println(s) + } + } +} + diff --git a/test/files/run/repl-paste-parse.script b/test/files/run/repl-paste-parse.script new file mode 100644 index 000000000000..903f6e7b0c07 --- /dev/null +++ b/test/files/run/repl-paste-parse.script @@ -0,0 +1 @@ +val case = 9 From 60f28f9e6a330e91a0f1204917301d401a6fce72 Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Mon, 25 Apr 2016 16:33:04 -0700 Subject: [PATCH 0081/2793] SI-9522 release key reference when deleting from OpenHashMap This sets the key field in the hash table entry to its default value when an entry is deleted, so as not to unexpectedly retain an object reference, leading to a memory leak. Also includes incidental changes to the slot location algorithm that reduce the number of deleted entries. --- build.sbt | 3 +- build.xml | 1 + src/eclipse/repl/.classpath | 2 +- src/eclipse/test-junit/.classpath | 1 + .../collection/mutable/OpenHashMap.scala | 83 ++++++++++++------- .../collection/mutable/OpenHashMapTest.scala | 58 ++++++++++++- 6 files changed, 116 insertions(+), 32 deletions(-) diff --git a/build.sbt b/build.sbt index d592b86aff5d..2eb629f9235c 100644 --- a/build.sbt +++ b/build.sbt @@ -66,6 +66,7 @@ val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % ver val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest")) val junitDep = "junit" % "junit" % "4.11" val junitIntefaceDep = "com.novocode" % "junit-interface" % "0.11" % "test" +val jolDep = "org.openjdk.jol" % "jol-core" % "0.5" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "jline" % "jline" % versionProps("jline.version") val antDep = "org.apache.ant" % "ant" % "1.9.4" @@ -544,7 +545,7 @@ lazy val junit = project.in(file("test") / "junit") .settings(disablePublishing: _*) .settings( fork in Test := true, - libraryDependencies ++= Seq(junitDep, junitIntefaceDep), + libraryDependencies ++= Seq(junitDep, junitIntefaceDep, jolDep), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), unmanagedSourceDirectories in Test := List(baseDirectory.value) ) diff --git a/build.xml b/build.xml index 778bcc561b14..c1b0b228a1f9 100644 --- a/build.xml +++ b/build.xml @@ -277,6 +277,7 @@ TODO: + diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath index 682377adc98b..141f84e6bb21 100644 --- a/src/eclipse/repl/.classpath +++ b/src/eclipse/repl/.classpath @@ -2,7 +2,7 @@ - + diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath index 3635c8511272..1e1b51066312 100644 --- a/src/eclipse/test-junit/.classpath +++ b/src/eclipse/test-junit/.classpath @@ -11,6 +11,7 @@ + diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index 5f8f5b9a0a32..5bea1634c4bf 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -21,10 +21,16 @@ object OpenHashMap { def apply[K, V](elems : (K, V)*) = new OpenHashMap[K, V] ++= elems def empty[K, V] = new OpenHashMap[K, V] - final private class OpenEntry[Key, Value](val key: Key, - val hash: Int, + /** A hash table entry. + * + * The entry is occupied if and only if its `value` is a `Some`; + * deleted if and only if its `value` is `None`. + * If its `key` is not the default value of type `Key`, the entry is occupied. + * If the entry is occupied, `hash` contains the hash value of `key`. + */ + final private class OpenEntry[Key, Value](var key: Key, + var hash: Int, var value: Option[Value]) - extends HashEntry[Key, OpenEntry[Key, Value]] private[mutable] def nextPositivePowerOfTwo(i : Int) = 1 << (32 - Integer.numberOfLeadingZeros(i - 1)) } @@ -64,7 +70,14 @@ extends AbstractMap[Key, Value] private[this] val actualInitialSize = OpenHashMap.nextPositivePowerOfTwo(initialSize) private var mask = actualInitialSize - 1 - private var table : Array[Entry] = new Array[Entry](actualInitialSize) + + /** The hash table. + * + * The table's entries are initialized to `null`, indication of an empty slot. + * A slot is either deleted or occupied if and only if the entry is non-`null`. + */ + private[this] var table = new Array[Entry](actualInitialSize) + private var _size = 0 private var deleted = 0 @@ -91,42 +104,43 @@ extends AbstractMap[Key, Value] table = new Array[Entry](newSize) mask = newSize - 1 oldTable.foreach( entry => - if (entry != null && entry.value != None) addEntry(entry)) + if (entry != null && entry.value != None) + table(findIndex(entry.key, entry.hash)) = entry ) deleted = 0 } /** Return the index of the first slot in the hash table (in probe order) - * that either is empty, or is or was last occupied by the given key. - */ - private[this] def findIndex(key: Key) : Int = findIndex(key, hashOf(key)) - - /** Return the index of the first slot in the hash table (in probe order) - * that either is empty, or is or was last occupied by the given key. - * - * This method is an optimization for when the hash value is in hand. + * that is, in order of preference, either occupied by the given key, deleted, or empty. * * @param hash hash value for `key` */ private[this] def findIndex(key: Key, hash: Int): Int = { var j = hash - var index = hash & mask var perturb = index - while(table(index) != null && - !(table(index).hash == hash && - table(index).key == key)){ + + /** Index of the first slot containing a deleted entry, or -1 if none found yet. */ + var firstDeletedIndex = -1 + + var entry = table(index) + while (entry != null) { + if (entry.hash == hash && entry.key == key && entry.value != None) + return index + + if (firstDeletedIndex == -1 && entry.value == None) + firstDeletedIndex = index + j = 5 * j + 1 + perturb perturb >>= 5 index = j & mask + entry = table(index) } - index - } - private[this] def addEntry(entry: Entry) = - if (entry != null) table(findIndex(entry.key, entry.hash)) = entry + if (firstDeletedIndex == -1) index else firstDeletedIndex + } override def update(key: Key, value: Value) { - put(key, hashOf(key), value) + put(key, value) } @deprecatedOverriding("+= should not be overridden in order to maintain consistency with put.", "2.11.0") @@ -150,6 +164,8 @@ extends AbstractMap[Key, Value] } else { val res = entry.value if (entry.value == None) { + entry.key = key + entry.hash = hash size += 1 deleted -= 1 modCount += 1 @@ -159,13 +175,22 @@ extends AbstractMap[Key, Value] } } + /** Delete the hash table slot contained in the given entry. */ + @inline + private[this] def deleteSlot(entry: Entry) = { + entry.key = null.asInstanceOf[Key] + entry.hash = 0 + entry.value = None + + size -= 1 + deleted += 1 + } + override def remove(key : Key): Option[Value] = { - val index = findIndex(key) - if (table(index) != null && table(index).value != None){ - val res = table(index).value - table(index).value = None - size -= 1 - deleted += 1 + val entry = table(findIndex(key, hashOf(key))) + if (entry != null && entry.value != None) { + val res = entry.value + deleteSlot(entry) res } else None } @@ -249,7 +274,7 @@ extends AbstractMap[Key, Value] } override def retain(f : (Key, Value) => Boolean) = { - foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) {entry.value = None; size -= 1; deleted += 1} ) + foreachUndeletedEntry(entry => if (!f(entry.key, entry.value.get)) deleteSlot(entry)) this } diff --git a/test/junit/scala/collection/mutable/OpenHashMapTest.scala b/test/junit/scala/collection/mutable/OpenHashMapTest.scala index 9b5c20e01a78..b6cddf210111 100644 --- a/test/junit/scala/collection/mutable/OpenHashMapTest.scala +++ b/test/junit/scala/collection/mutable/OpenHashMapTest.scala @@ -4,6 +4,10 @@ import org.junit.Test import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import org.openjdk.jol.info.GraphLayout +import org.openjdk.jol.info.GraphWalker +import org.openjdk.jol.info.GraphVisitor +import org.openjdk.jol.info.GraphPathRecord /** Tests for [[OpenHashMap]]. */ @RunWith(classOf[JUnit4]) @@ -28,7 +32,13 @@ class OpenHashMapTest { val fieldMirror = mirror.reflect(m).reflectField(termSym) */ // Use Java reflection instead for now. - val field = m.getClass.getDeclaredField("deleted") + val field = + try { // Name may or not be mangled, depending on what the compiler authors are doing. + m.getClass.getDeclaredField("scala$collection$mutable$OpenHashMap$$deleted") + } catch { + case _: NoSuchFieldException => + m.getClass.getDeclaredField("deleted") + } field.setAccessible(true) m.put(0, 0) @@ -39,4 +49,50 @@ class OpenHashMapTest { // TODO assertEquals(0, fieldMirror.get.asInstanceOf[Int]) assertEquals(0, field.getInt(m)) } + + /** Test that an [[OpenHashMap]] frees references to a deleted key (SI-9522). */ + @Test + def freesDeletedKey { + import scala.language.reflectiveCalls + + class MyClass { + override def hashCode() = 42 + } + + val counter = new GraphVisitor() { + private[this] var instanceCount: Int = _ + + def countInstances(obj: AnyRef) = { + instanceCount = 0 + val walker = new GraphWalker(obj) + walker.addVisitor(this) + walker.walk + instanceCount + } + + override def visit(record: GraphPathRecord) { + if (record.klass() == classOf[MyClass]) instanceCount += 1 + } + } + + val m = OpenHashMap.empty[MyClass, Int] + val obj = new MyClass + assertEquals("Found a key instance in the map before adding one!?", 0, counter.countInstances(m)) + m.put(obj, 0) + assertEquals("There should be only one key instance in the map.", 1, counter.countInstances(m)) + m.put(obj, 1) + assertEquals("There should still be only one key instance in the map.", 1, counter.countInstances(m)) + m.remove(obj) + assertEquals("There should be no key instance in the map.", 0, counter.countInstances(m)) + + val obj2 = new MyClass + assertEquals("The hash codes of the test objects need to match.", obj.##, obj2.##) + m.put(obj, 0) + m.put(obj2, 0) + assertEquals("There should be two key instances in the map.", 2, counter.countInstances(m)) + m.remove(obj) + assertEquals("There should be one key instance in the map.", 1, counter.countInstances(m)) + m.remove(obj2) + assertEquals("There should be no key instance in the map.", 0, counter.countInstances(m)) + } } From dd3a90e939fef264292b3dbc75d0cdbce4eeae6f Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Tue, 24 May 2016 21:12:20 +0100 Subject: [PATCH 0082/2793] Fix Scaladoc link syntax on java.util.Formatter references --- src/library/scala/Predef.scala | 3 +-- src/library/scala/collection/immutable/StringLike.scala | 6 ++---- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 58d43f866616..216f6663b5d5 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -388,8 +388,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { * based on a string pattern (in a fashion similar to printf in C). * * The interpretation of the formatting patterns is described in - * - * `java.util.Formatter`. + * [[java.util.Formatter]]. * * Consider using the [[scala.StringContext.f f interpolator]] as more type safe and idiomatic. * diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 8a9df0e8625d..3c7507f480da 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -336,8 +336,7 @@ self => * holes. * * The interpretation of the formatting patterns is described in - * - * `java.util.Formatter`, with the addition that + * [[java.util.Formatter]], with the addition that * classes deriving from `ScalaNumber` (such as [[scala.BigInt]] and * [[scala.BigDecimal]]) are unwrapped to pass a type which `Formatter` * understands. @@ -352,8 +351,7 @@ self => * which influences formatting as in `java.lang.String`'s format. * * The interpretation of the formatting patterns is described in - * - * `java.util.Formatter`, with the addition that + * [[java.util.Formatter]], with the addition that * classes deriving from `ScalaNumber` (such as `scala.BigInt` and * `scala.BigDecimal`) are unwrapped to pass a type which `Formatter` * understands. From 1db58b52e064579e857260de93e1a706a783a7e5 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 24 May 2016 22:13:20 +0200 Subject: [PATCH 0083/2793] Debug flag to print a summary of the inliner's work Example output below. Note that inlining List.map fails because the trait forwarder uses `INVOKESPECIAL` for now, will change with pr 5177. $ cat Test.scala class C { def foo = Map(1 -> 'a', 2 -> 'b') def bar(l: List[Int]) = l.map(_ + 1) } $ qsc -Yopt-log-inline _ -Yopt:l:classpath Test.scala Inlining into C.foo (initially 36 instructions, ultimately 72): - Inlined scala/Predef$ArrowAssoc$.$minus$greater$extension (8 instructions) 2 times: the callee is annotated `@inline` Inlining into C.bar (initially 12 instructions, ultimately 12): - Failed to inline scala/collection/immutable/List.map (the callee is a higher-order method, the argument for parameter (bf: Function1) is a function literal): The callee scala/collection/immutable/List::map(Lscala/Function1;Lscala/collection/generic/CanBuildFrom;)Ljava/lang/Object; contains the instruction INVOKESPECIAL scala/collection/TraversableLike.map (Lscala/Function1;Lscala/collection/generic/CanBuildFrom;)Ljava/lang/Object; that would cause an IllegalAccessError when inlined into class C. --- .../tools/nsc/backend/jvm/opt/Inliner.scala | 38 ++++++++++++++- .../backend/jvm/opt/InlinerHeuristics.scala | 48 +++++++++++++++---- .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 3 +- .../tools/nsc/settings/ScalaSettings.scala | 4 +- .../scala/tools/nsc/util/StackTracing.scala | 2 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 10 ++-- 6 files changed, 87 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index f35eaa45e927..4b65a566d3ec 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -25,6 +25,9 @@ class Inliner[BT <: BTypes](val btypes: BT) { import inlinerHeuristics._ import backendUtils._ + case class InlineLog(request: InlineRequest, sizeBefore: Int, sizeAfter: Int, sizeInlined: Int, warning: Option[CannotInlineWarning]) + var inlineLog: List[InlineLog] = Nil + def runInliner(): Unit = { for (request <- collectAndOrderInlineRequests) { val Right(callee) = request.callsite.callee // collectAndOrderInlineRequests returns callsites with a known callee @@ -42,6 +45,29 @@ class Inliner[BT <: BTypes](val btypes: BT) { } } } + + if (compilerSettings.YoptLogInline.isSetByUser) { + val methodPrefix = { val p = compilerSettings.YoptLogInline.value; if (p == "_") "" else p } + val byCallsiteMethod = inlineLog.groupBy(_.request.callsite.callsiteMethod).toList.sortBy(_._2.head.request.callsite.callsiteClass.internalName) + for ((m, mLogs) <- byCallsiteMethod) { + val initialSize = mLogs.minBy(_.sizeBefore).sizeBefore + val firstLog = mLogs.head + val methodName = s"${firstLog.request.callsite.callsiteClass.internalName}.${m.name}" + if (methodName.startsWith(methodPrefix)) { + println(s"Inlining into $methodName (initially $initialSize instructions, ultimately ${m.instructions.size}):") + val byCallee = mLogs.groupBy(_.request.callsite.callee.get).toList.sortBy(_._2.length).reverse + for ((c, cLogs) <- byCallee) { + val first = cLogs.head + if (first.warning.isEmpty) { + val num = if (cLogs.tail.isEmpty) "" else s" ${cLogs.length} times" + println(s" - Inlined ${c.calleeDeclarationClass.internalName}.${c.callee.name} (${first.sizeInlined} instructions)$num: ${first.request.reason}") + } else + println(s" - Failed to inline ${c.calleeDeclarationClass.internalName}.${c.callee.name} (${first.request.reason}): ${first.warning.get}") + } + println() + } + } + } } /** @@ -184,7 +210,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { def impl(post: InlineRequest, at: Callsite): List[InlineRequest] = { post.callsite.inlinedClones.find(_.clonedWhenInlining == at) match { case Some(clonedCallsite) => - List(InlineRequest(clonedCallsite.callsite, post.post)) + List(InlineRequest(clonedCallsite.callsite, post.post, post.reason)) case None => post.post.flatMap(impl(_, post.callsite)).flatMap(impl(_, at)) } @@ -199,9 +225,17 @@ class Inliner[BT <: BTypes](val btypes: BT) { * @return An inliner warning for each callsite that could not be inlined. */ def inline(request: InlineRequest): List[CannotInlineWarning] = canInlineBody(request.callsite) match { - case Some(w) => List(w) + case Some(w) => + if (compilerSettings.YoptLogInline.isSetByUser) { + val size = request.callsite.callsiteMethod.instructions.size + inlineLog ::= InlineLog(request, size, size, 0, Some(w)) + } + List(w) case None => + val sizeBefore = request.callsite.callsiteMethod.instructions.size inlineCallsite(request.callsite) + if (compilerSettings.YoptLogInline.isSetByUser) + inlineLog ::= InlineLog(request, sizeBefore, request.callsite.callsiteMethod.instructions.size, request.callsite.callee.get.callee.instructions.size, None) val postRequests = request.post.flatMap(adaptPostRequestForMainCallsite(_, request.callsite)) postRequests flatMap inline } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala index 6aaf9734d381..fd65b7176299 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala @@ -17,7 +17,7 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { import inliner._ import callGraph._ - case class InlineRequest(callsite: Callsite, post: List[InlineRequest]) { + case class InlineRequest(callsite: Callsite, post: List[InlineRequest], reason: String) { // invariant: all post inline requests denote callsites in the callee of the main callsite for (pr <- post) assert(pr.callsite.callsiteMethod == callsite.callee.get.callee, s"Callsite method mismatch: main $callsite - post ${pr.callsite}") } @@ -40,7 +40,7 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { var requests = Set.empty[InlineRequest] callGraph.callsites(methodNode).valuesIterator foreach { case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, canInlineFromSource, calleeAnnotatedInline, _, _, callsiteWarning)), _, _, _, pos, _, _) => - inlineRequest(callsite) match { + inlineRequest(callsite, requests) match { case Some(Right(req)) => requests += req case Some(Left(w)) => if ((calleeAnnotatedInline && bTypes.compilerSettings.YoptWarningEmitAtInlineFailed) || w.emitWarning(compilerSettings)) { @@ -87,20 +87,29 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { * InlineRequest for the original callsite? new subclass of OptimizerWarning. * `Some(Right)` if the callsite should be and can be inlined */ - def inlineRequest(callsite: Callsite): Option[Either[OptimizerWarning, InlineRequest]] = { + def inlineRequest(callsite: Callsite, selectedRequestsForCallee: Set[InlineRequest]): Option[Either[OptimizerWarning, InlineRequest]] = { val callee = callsite.callee.get - def requestIfCanInline(callsite: Callsite): Either[OptimizerWarning, InlineRequest] = inliner.earlyCanInlineCheck(callsite) match { + def requestIfCanInline(callsite: Callsite, reason: String): Either[OptimizerWarning, InlineRequest] = inliner.earlyCanInlineCheck(callsite) match { case Some(w) => Left(w) - case None => Right(InlineRequest(callsite, Nil)) + case None => Right(InlineRequest(callsite, Nil, reason)) } compilerSettings.YoptInlineHeuristics.value match { case "everything" => - if (callee.safeToInline) Some(requestIfCanInline(callsite)) + if (callee.safeToInline) { + val reason = if (compilerSettings.YoptLogInline.isSetByUser) "the inline strategy is \"everything\"" else null + Some(requestIfCanInline(callsite, reason)) + } else None case "at-inline-annotated" => - if (callee.safeToInline && callee.annotatedInline) Some(requestIfCanInline(callsite)) + if (callee.safeToInline && callee.annotatedInline) { + val reason = if (compilerSettings.YoptLogInline.isSetByUser) { + val what = if (callee.safeToInline) "callee" else "callsite" + s"the $what is annotated `@inline`" + } else null + Some(requestIfCanInline(callsite, reason)) + } else None case "default" => @@ -108,7 +117,30 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { def shouldInlineHO = callee.samParamTypes.nonEmpty && (callee.samParamTypes exists { case (index, _) => callsite.argInfos.contains(index) }) - if (callee.annotatedInline || callsite.annotatedInline || shouldInlineHO) Some(requestIfCanInline(callsite)) + if (callee.annotatedInline || callsite.annotatedInline || shouldInlineHO) { + val reason = if (compilerSettings.YoptLogInline.isSetByUser) { + if (callee.annotatedInline || callsite.annotatedInline) { + val what = if (callee.safeToInline) "callee" else "callsite" + s"the $what is annotated `@inline`" + } else { + val paramNames = Option(callee.callee.parameters).map(_.asScala.map(_.name).toVector) + def param(i: Int) = { + def syn = s"" + paramNames.fold(syn)(v => v.applyOrElse(i, (_: Int) => syn)) + } + def samInfo(i: Int, sam: String, arg: String) = s"the argument for parameter (${param(i)}: $sam) is a $arg" + val argInfos = for ((i, sam) <- callee.samParamTypes; info <- callsite.argInfos.get(i)) yield { + val argKind = info match { + case FunctionLiteral => "function literal" + case ForwardedParam(_) => "parameter of the callsite method" + } + samInfo(i, sam.internalName.split('/').last, argKind) + } + s"the callee is a higher-order method, ${argInfos.mkString(", ")}" + } + } else null + Some(requestIfCanInline(callsite, reason)) + } else None } else None } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 4e1349257e85..4972a49bb467 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -231,7 +231,8 @@ class LocalOpt[BT <: BTypes](val btypes: BT) { // for local variables in dead blocks. Maybe that's a bug in the ASM framework. var currentTrace: String = null - val doTrace = compilerSettings.YoptTrace.isSetByUser && compilerSettings.YoptTrace.value == ownerClassName + "." + method.name + val methodPrefix = {val p = compilerSettings.YoptTrace.value; if (p == "_") "" else p } + val doTrace = compilerSettings.YoptTrace.isSetByUser && s"$ownerClassName.${method.name}".startsWith(methodPrefix) def traceIfChanged(optName: String): Unit = if (doTrace) { val after = AsmUtils.textify(method) if (currentTrace != after) { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 9a0d86a94df1..aa43772dd774 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -302,7 +302,9 @@ trait ScalaSettings extends AbsScalaSettings def YoptWarningNoInlineMissingBytecode = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingBytecode) def YoptWarningNoInlineMissingScalaInlineInfoAttr = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingScalaInlineInfoAttr) - val YoptTrace = StringSetting("-Yopt-trace", "package/Class.method", "Trace the optimizer progress for a specific method.", "") + val YoptTrace = StringSetting("-Yopt-trace", "package/Class.method", "Trace the optimizer progress for methods; `_` to print all, prefix match to select.", "") + + val YoptLogInline = StringSetting("-Yopt-log-inline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "") private def removalIn212 = "This flag is scheduled for removal in 2.12. If you have a case where you need this flag then please report a bug." diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala index fa4fe29f2822..0765bb923f0d 100644 --- a/src/compiler/scala/tools/nsc/util/StackTracing.scala +++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala @@ -19,7 +19,7 @@ private[util] trait StackTracing extends Any { def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = { import collection.mutable.{ ArrayBuffer, ListBuffer } import compat.Platform.EOL - import util.Properties.isJavaAtLeast + import scala.util.Properties.isJavaAtLeast val sb = ListBuffer.empty[String] diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index fd020c7d9390..fb708c4f293b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -72,7 +72,7 @@ class InlinerTest extends BytecodeTesting { def inlineTest(code: String, mod: ClassNode => Unit = _ => ()): MethodNode = { val (gMethod, fCall) = gMethAndFCallsite(code, mod) - inliner.inline(InlineRequest(fCall, Nil)) + inliner.inline(InlineRequest(fCall, Nil, null)) gMethod } @@ -343,7 +343,7 @@ class InlinerTest extends BytecodeTesting { val warning = inliner.canInlineBody(call) assert(warning.isEmpty, warning) - inliner.inline(InlineRequest(call, Nil)) + inliner.inline(InlineRequest(call, Nil, null)) val ins = instructionsFromMethod(fMeth) // no invocations, lowestOneBit is inlined @@ -976,7 +976,7 @@ class InlinerTest extends BytecodeTesting { inliner.inline(InlineRequest(hCall, post = List(InlineRequest(gCall, - post = List(InlineRequest(fCall, Nil)))))) + post = List(InlineRequest(fCall, Nil, null)), null)), null)) assertNoInvoke(convertMethod(iMeth)) // no invoke in i: first h is inlined, then the inlined call to g is also inlined, etc for f assertInvoke(convertMethod(gMeth), "C", "f") // g itself still has the call to f } @@ -998,11 +998,11 @@ class InlinerTest extends BytecodeTesting { val bCall = getCallsite(c, "b") val cCall = getCallsite(d, "c") - inliner.inline(InlineRequest(bCall, Nil)) + inliner.inline(InlineRequest(bCall, Nil, null)) val req = InlineRequest(cCall, List(InlineRequest(bCall, - List(InlineRequest(aCall, Nil))))) + List(InlineRequest(aCall, Nil, null)), null)), null) inliner.inline(req) assertNoInvoke(convertMethod(d)) From 450df0e4225da325b1a68461d8eb852bc1e61cbc Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 25 May 2016 11:03:47 +0100 Subject: [PATCH 0084/2793] Use full braces style in AnsiColor example This matches the use of ${..} in Console. --- src/library/scala/io/AnsiColor.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala index 720049ba8e10..df589bc66c0d 100644 --- a/src/library/scala/io/AnsiColor.scala +++ b/src/library/scala/io/AnsiColor.scala @@ -13,7 +13,7 @@ package io * * object ColorDemo extends App { * - * println(s"$REVERSED${BOLD}Hello 1979!$RESET") + * println(s"${REVERSED}${BOLD}Hello 1979!${RESET}") * } * }}} * From b85c9a722ce56ebfb6e41cea7a7387126286b187 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 25 May 2016 16:42:19 +0200 Subject: [PATCH 0085/2793] Rename -Yopt to -opt, -Yopt-warnings to -opt-warnings Keep -Yopt-inline-heuristics and -Yopt-trace unchanged --- build-ant-macros.xml | 2 +- build.xml | 8 +- project/ScalaOptionParser.scala | 2 +- scripts/jobs/integrate/bootstrap | 6 +- scripts/jobs/validate/test | 2 +- src/compiler/scala/tools/nsc/Reporting.scala | 2 +- .../nsc/backend/jvm/BCodeIdiomatic.scala | 2 +- .../scala/tools/nsc/backend/jvm/BTypes.scala | 2 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- .../nsc/backend/jvm/BackendReporting.scala | 24 +++--- .../tools/nsc/backend/jvm/GenBCode.scala | 8 +- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 4 +- .../backend/jvm/opt/ClosureOptimizer.scala | 2 +- .../tools/nsc/backend/jvm/opt/Inliner.scala | 2 +- .../backend/jvm/opt/InlinerHeuristics.scala | 6 +- .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 32 +++---- .../tools/nsc/settings/ScalaSettings.scala | 84 +++++++++---------- .../instrumented/inline-in-constructors.flags | 2 +- test/files/jvm/bytecode-test-example.flags | 2 +- test/files/jvm/unreachable/Foo_1.flags | 2 +- test/files/neg/inlineIndyLambdaPrivate.flags | 2 +- test/files/neg/inlineMaxSize.flags | 2 +- test/files/neg/optimiseDeprecated.check | 2 +- test/files/neg/partestInvalidFlag.check | 4 +- test/files/neg/partestInvalidFlag.flags | 2 +- test/files/neg/sealed-final-neg.flags | 2 +- test/files/pos/inline-access-levels.flags | 2 +- test/files/pos/t3234.flags | 2 +- test/files/pos/t3420.flags | 2 +- test/files/pos/t4840.flags | 2 +- test/files/pos/t8410.flags | 2 +- test/files/pos/t9111-inliner-workaround.flags | 2 +- test/files/run/bcodeInlinerMixed.flags | 2 +- test/files/run/classfile-format-51.scala | 2 +- test/files/run/classfile-format-52.scala | 2 +- test/files/run/finalvar.flags | 2 +- test/files/run/icode-reader-dead-code.scala | 2 +- test/files/run/noInlineUnknownIndy/Test.scala | 2 +- test/files/run/nothingTypeDce.flags | 2 +- test/files/run/nothingTypeDce.scala | 2 +- test/files/run/nothingTypeNoOpt.flags | 2 +- test/files/run/nothingTypeNoOpt.scala | 2 +- test/files/run/synchronized.flags | 2 +- test/files/run/t2106.flags | 2 +- test/files/run/t3509.flags | 2 +- test/files/run/t3569.flags | 2 +- test/files/run/t4285.flags | 2 +- test/files/run/t4935.flags | 2 +- test/files/run/t5789.scala | 2 +- test/files/run/t6102.flags | 2 +- test/files/run/t6188.flags | 2 +- test/files/run/t7407.flags | 2 +- test/files/run/t7459b-optimize.flags | 2 +- test/files/run/t7582.flags | 2 +- test/files/run/t7582b.flags | 2 +- test/files/run/t7852.flags | 2 +- test/files/run/t8601-closure-elim.flags | 2 +- test/files/run/t8601.flags | 2 +- test/files/run/t8601b.flags | 2 +- test/files/run/t8601c.flags | 2 +- test/files/run/t8601d.flags | 2 +- test/files/run/t8601e.flags | 2 +- test/files/run/t8925.flags | 2 +- test/files/run/t9003.flags | 2 +- test/files/run/t9403.flags | 2 +- .../tools/nsc/backend/jvm/BTypesTest.scala | 2 +- .../nsc/backend/jvm/DirectCompileTest.scala | 2 +- .../backend/jvm/OptimizedBytecodeTest.scala | 2 +- .../jvm/analysis/NullnessAnalyzerTest.scala | 2 +- .../jvm/analysis/ProdConsAnalyzerTest.scala | 2 +- .../nsc/backend/jvm/opt/AnalyzerTest.scala | 2 +- .../jvm/opt/BTypesFromClassfileTest.scala | 2 +- .../nsc/backend/jvm/opt/CallGraphTest.scala | 2 +- .../jvm/opt/ClosureOptimizerTest.scala | 2 +- .../jvm/opt/CompactLocalVariablesTest.scala | 4 +- .../jvm/opt/EmptyExceptionHandlersTest.scala | 4 +- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 2 +- .../backend/jvm/opt/InlineWarningTest.scala | 10 +-- .../jvm/opt/InlinerIllegalAccessTest.scala | 2 +- .../opt/InlinerSeparateCompilationTest.scala | 4 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 8 +- .../backend/jvm/opt/MethodLevelOptsTest.scala | 2 +- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 2 +- .../backend/jvm/opt/UnreachableCodeTest.scala | 6 +- .../jvm/opt/UnusedLocalVariablesTest.scala | 2 +- .../transform/patmat/PatmatBytecodeTest.scala | 2 +- .../jvm/constant-optimization/Foo_1.flags | 2 +- 87 files changed, 178 insertions(+), 178 deletions(-) diff --git a/build-ant-macros.xml b/build-ant-macros.xml index 19429cdaa3f5..e077cfbb4c88 100644 --- a/build-ant-macros.xml +++ b/build-ant-macros.xml @@ -6,7 +6,7 @@ - + diff --git a/build.xml b/build.xml index c1b0b228a1f9..50ced2424718 100644 --- a/build.xml +++ b/build.xml @@ -19,7 +19,7 @@ ant $antArgs $scalacArgs $targets antArgs tend to be: -Darchives.skipxz=true - -Dscalac.args.optimise=-Yopt:l:classpath + -Dscalac.args.optimise=-opt:l:classpath scalacArgs examples: "-Dscalac.args=\"-Yrangepos\" -Dpartest.scalac_opts=\"-Yrangepos\"" @@ -79,13 +79,13 @@ TODO: - + - + @@ -1043,7 +1043,7 @@ TODO: - + diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index f2fd4d86d7c0..82b8f5762c57 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -108,7 +108,7 @@ object ScalaOptionParser { private def multiChoiceSettingNames = Map[String, List[String]]( "-Xlint" -> List("adapted-args", "nullary-unit", "inaccessible", "nullary-override", "infer-any", "missing-interpolator", "doc-detached", "private-shadow", "type-parameter-shadow", "poly-implicit-overload", "option-implicit", "delayedinit-select", "by-name-right-associative", "package-object-classes", "unsound-match", "stars-align"), "-language" -> List("help", "_", "dynamics", "postfixOps", "reflectiveCalls", "implicitConversions", "higherKinds", "existentials", "experimental.macros"), - "-Yopt" -> List("l:none", "l:default", "l:method", "l:project", "l:classpath", "unreachable-code", "simplify-jumps", "empty-line-numbers", "empty-labels", "compact-locals", "nullness-tracking", "closure-elimination", "inline-project", "inline-global"), + "-opt" -> List("l:none", "l:default", "l:method", "l:project", "l:classpath", "unreachable-code", "simplify-jumps", "empty-line-numbers", "empty-labels", "compact-locals", "nullness-tracking", "closure-elimination", "inline-project", "inline-global"), "-Ystatistics" -> List("parser", "typer", "patmat", "erasure", "cleanup", "jvm") ) private def scalaVersionSettings = List("-Xmigration", "-Xsource") diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 65758ea528c4..05c7d22b26b4 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -494,7 +494,7 @@ bootstrap() { -Dremote.snapshot.repository=NOPE\ -Dremote.release.repository=$releaseTempRepoUrl\ -Drepository.credentials.id=$releaseTempRepoCred\ - -Dscalac.args.optimise=-Yopt:l:classpath\ + -Dscalac.args.optimise=-opt:l:classpath\ -Ddocs.skip=1\ -Dlocker.skip=1\ $publishStarrPrivateTask >> $baseDir/logs/builds 2>&1 @@ -516,7 +516,7 @@ bootstrap() { $SET_STARR\ -Dremote.release.repository=$releaseTempRepoUrl\ -Drepository.credentials.id=$releaseTempRepoCred\ - -Dscalac.args.optimise=-Yopt:l:classpath\ + -Dscalac.args.optimise=-opt:l:classpath\ -Ddocs.skip=1\ -Dlocker.skip=1\ $publishLockerPrivateTask >> $baseDir/logs/builds 2>&1 @@ -555,7 +555,7 @@ bootstrap() { -Dremote.snapshot.repository=NOPE\ -Dremote.release.repository=$releaseTempRepoUrl\ -Drepository.credentials.id=$releaseTempRepoCred\ - -Dscalac.args.optimise=-Yopt:l:classpath\ + -Dscalac.args.optimise=-opt:l:classpath\ $antBuildTask $publishPrivateTask # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index d63d39c65d35..7c7024e87c9b 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -10,7 +10,7 @@ case $prDryRun in # build quick using STARR built upstream, as specified by scalaVersion # (in that sense it's locker, since it was built with starr by that upstream job) ant -Dstarr.version=$scalaVersion \ - -Dscalac.args.optimise=-Yopt:l:classpath \ + -Dscalac.args.optimise=-opt:l:classpath \ -Dlocker.skip=1 -Dextra.repo.url=$prRepoUrl \ $testExtraArgs ${testTarget-test.core docs.done} ;; diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 5bdbf4bb6a63..8d0aedc76db8 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -49,7 +49,7 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w private val _deprecationWarnings = new ConditionalWarning("deprecation", settings.deprecation) private val _uncheckedWarnings = new ConditionalWarning("unchecked", settings.unchecked) private val _featureWarnings = new ConditionalWarning("feature", settings.feature) - private val _inlinerWarnings = new ConditionalWarning("inliner", () => !settings.YoptWarningsSummaryOnly, settings.YoptWarnings) + private val _inlinerWarnings = new ConditionalWarning("inliner", () => !settings.optWarningsSummaryOnly, settings.optWarnings) private val _allConditionalWarnings = List(_deprecationWarnings, _uncheckedWarnings, _featureWarnings, _inlinerWarnings) // TODO: remove in favor of the overload that takes a Symbol, give that argument a default (NoSymbol) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index 0a95bc5e3998..ed1b4ec3255e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -392,7 +392,7 @@ abstract class BCodeIdiomatic extends SubComponent { private def addInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean, pos: Position) = { val node = new MethodInsnNode(opcode, owner, name, desc, itf) jmethod.instructions.add(node) - if (settings.YoptInlinerEnabled) callsitePositions(node) = pos + if (settings.optInlinerEnabled) callsitePositions(node) = pos } final def invokedynamic(owner: String, name: String, desc: String) { jmethod.visitMethodInsn(Opcodes.INVOKEDYNAMIC, owner, name, desc) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 2637d2105032..a708feb0a7b8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -271,7 +271,7 @@ abstract class BTypes { // The InlineInfo is built from the classfile (not from the symbol) for all classes that are NOT // being compiled. For those classes, the info is only needed if the inliner is enabled, othewise // we can save the memory. - if (!compilerSettings.YoptInlinerEnabled) BTypes.EmptyInlineInfo + if (!compilerSettings.optInlinerEnabled) BTypes.EmptyInlineInfo else fromClassfileAttribute getOrElse fromClassfileWithoutAttribute } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index d10b6c8dbaf6..d83b4a1d856e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -514,7 +514,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // enclosingTopLevelClass is being compiled. after flatten, all classes are considered top-level, // so `compiles` would return `false`. if (exitingPickler(currentRun.compiles(classSym))) buildFromSymbol // InlineInfo required for classes being compiled, we have to create the classfile attribute - else if (!compilerSettings.YoptInlinerEnabled) BTypes.EmptyInlineInfo // For other classes, we need the InlineInfo only inf the inliner is enabled. + else if (!compilerSettings.optInlinerEnabled) BTypes.EmptyInlineInfo // For other classes, we need the InlineInfo only inf the inliner is enabled. else { // For classes not being compiled, the InlineInfo is read from the classfile attribute. This // fixes an issue with mixed-in methods: the mixin phase enters mixin methods only to class diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala index 4287c24dc806..4ad4a9572844 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala @@ -117,15 +117,15 @@ object BackendReporting { def emitWarning(settings: ScalaSettings): Boolean = this match { case ClassNotFound(_, javaDefined) => - if (javaDefined) settings.YoptWarningNoInlineMixed - else settings.YoptWarningNoInlineMissingBytecode + if (javaDefined) settings.optWarningNoInlineMixed + else settings.optWarningNoInlineMissingBytecode case m @ MethodNotFound(_, _, _, missing) => if (m.isArrayMethod) false - else settings.YoptWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings)) + else settings.optWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings)) case FieldNotFound(_, _, _, missing) => - settings.YoptWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings)) + settings.optWarningNoInlineMissingBytecode || missing.exists(_.emitWarning(settings)) } } @@ -146,7 +146,7 @@ object BackendReporting { def emitWarning(settings: ScalaSettings): Boolean = this match { case NoClassBTypeInfoMissingBytecode(cause) => cause.emitWarning(settings) - case NoClassBTypeInfoClassSymbolInfoFailedSI9111(_) => settings.YoptWarningNoInlineMissingBytecode + case NoClassBTypeInfoClassSymbolInfoFailedSI9111(_) => settings.optWarningNoInlineMissingBytecode } } @@ -179,7 +179,7 @@ object BackendReporting { case MethodInlineInfoIncomplete(_, _, _, cause) => cause.emitWarning(settings) case MethodInlineInfoMissing(_, _, _, Some(cause)) => cause.emitWarning(settings) - case MethodInlineInfoMissing(_, _, _, None) => settings.YoptWarningNoInlineMissingBytecode + case MethodInlineInfoMissing(_, _, _, None) => settings.optWarningNoInlineMissingBytecode case MethodInlineInfoError(_, _, _, cause) => cause.emitWarning(settings) } @@ -225,7 +225,7 @@ object BackendReporting { def emitWarning(settings: ScalaSettings): Boolean = this match { case _: IllegalAccessInstruction | _: MethodWithHandlerCalledOnNonEmptyStack | _: SynchronizedMethod | _: StrictfpMismatch | _: ResultingMethodTooLarge => - settings.YoptWarnings.contains(settings.YoptWarningsChoices.anyInlineFailed) + settings.optWarnings.contains(settings.optWarningsChoices.anyInlineFailed) case IllegalAccessCheckFailed(_, _, _, _, _, cause) => cause.emitWarning(settings) @@ -247,7 +247,7 @@ object BackendReporting { // but at the place where it's created (in findIllegalAccess) we don't have the necessary data (calleeName, calleeDescriptor). case object UnknownInvokeDynamicInstruction extends OptimizerWarning { override def toString = "The callee contains an InvokeDynamic instruction with an unknown bootstrap method (not a LambdaMetaFactory)." - def emitWarning(settings: ScalaSettings): Boolean = settings.YoptWarnings.contains(settings.YoptWarningsChoices.anyInlineFailed) + def emitWarning(settings: ScalaSettings): Boolean = settings.optWarnings.contains(settings.optWarningsChoices.anyInlineFailed) } /** @@ -259,7 +259,7 @@ object BackendReporting { override def emitWarning(settings: ScalaSettings): Boolean = this match { case RewriteClosureAccessCheckFailed(_, cause) => cause.emitWarning(settings) - case RewriteClosureIllegalAccess(_, _) => settings.YoptWarnings.contains(settings.YoptWarningsChoices.anyInlineFailed) + case RewriteClosureIllegalAccess(_, _) => settings.optWarnings.contains(settings.optWarningsChoices.anyInlineFailed) } override def toString: String = this match { @@ -291,10 +291,10 @@ object BackendReporting { } def emitWarning(settings: ScalaSettings): Boolean = this match { - case NoInlineInfoAttribute(_) => settings.YoptWarningNoInlineMissingScalaInlineInfoAttr + case NoInlineInfoAttribute(_) => settings.optWarningNoInlineMissingScalaInlineInfoAttr case ClassNotFoundWhenBuildingInlineInfoFromSymbol(cause) => cause.emitWarning(settings) - case ClassSymbolInfoFailureSI9111(_) => settings.YoptWarningNoInlineMissingBytecode - case UnknownScalaInlineInfoVersion(_, _) => settings.YoptWarningNoInlineMissingScalaInlineInfoAttr + case ClassSymbolInfoFailureSI9111(_) => settings.optWarningNoInlineMissingBytecode + case UnknownScalaInlineInfoVersion(_, _) => settings.optWarningNoInlineMissingScalaInlineInfoAttr } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 3520d5759941..02dc2b8edea2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -225,19 +225,19 @@ abstract class GenBCode extends BCodeSyncAndTry { // add classes to the bytecode repo before building the call graph: the latter needs to // look up classes and methods in the code repo. - if (settings.YoptAddToBytecodeRepository) q2.asScala foreach { + if (settings.optAddToBytecodeRepository) q2.asScala foreach { case Item2(_, mirror, plain, bean, _) => if (mirror != null) byteCodeRepository.add(mirror, ByteCodeRepository.CompilationUnit) if (plain != null) byteCodeRepository.add(plain, ByteCodeRepository.CompilationUnit) if (bean != null) byteCodeRepository.add(bean, ByteCodeRepository.CompilationUnit) } - if (settings.YoptBuildCallGraph) q2.asScala foreach { item => + if (settings.optBuildCallGraph) q2.asScala foreach { item => // skip call graph for mirror / bean: wd don't inline into tem, and they are not used in the plain class if (item.plain != null) callGraph.addClass(item.plain) } - if (settings.YoptInlinerEnabled) + if (settings.optInlinerEnabled) bTypes.inliner.runInliner() - if (settings.YoptClosureInvocations) + if (settings.optClosureInvocations) closureOptimizer.rewriteClosureApplyInvocations() } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index d241acf7b1cc..e8d1bf203a42 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -102,7 +102,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { // It is also used to get the stack height at the call site. val analyzer = { - if (compilerSettings.YoptNullnessTracking && AsmAnalyzer.sizeOKForNullness(methodNode)) { + if (compilerSettings.optNullnessTracking && AsmAnalyzer.sizeOKForNullness(methodNode)) { Some(new AsmAnalyzer(methodNode, definingClass.internalName, new NullnessAnalyzer(btypes))) } else if (AsmAnalyzer.sizeOKForBasicValue(methodNode)) { Some(new AsmAnalyzer(methodNode, definingClass.internalName)) @@ -273,7 +273,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { // callee, we only check there for the methodInlineInfo, we should find it there. calleeDeclarationClassBType.info.orThrow.inlineInfo.methodInfos.get(methodSignature) match { case Some(methodInlineInfo) => - val canInlineFromSource = compilerSettings.YoptInlineGlobal || calleeSource == CompilationUnit + val canInlineFromSource = compilerSettings.optInlineGlobal || calleeSource == CompilationUnit val isAbstract = BytecodeUtils.isAbstractMethod(calleeMethodNode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala index 93dc40f318c0..7f9858286eae 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala @@ -358,7 +358,7 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) { val callee = bodyMethod.map({ case (bodyMethodNode, bodyMethodDeclClass) => val bodyDeclClassType = classBTypeFromParsedClassfile(bodyMethodDeclClass) - val canInlineFromSource = compilerSettings.YoptInlineGlobal || bodyMethodIsBeingCompiled + val canInlineFromSource = compilerSettings.optInlineGlobal || bodyMethodIsBeingCompiled Callee( callee = bodyMethodNode, calleeDeclarationClass = bodyDeclClassType, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index f35eaa45e927..9660d878eb26 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -35,7 +35,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { val warnings = inline(request) for (warning <- warnings) { - if ((callee.annotatedInline && btypes.compilerSettings.YoptWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) { + if ((callee.annotatedInline && btypes.compilerSettings.optWarningEmitAtInlineFailed) || warning.emitWarning(compilerSettings)) { val annotWarn = if (callee.annotatedInline) " is annotated @inline but" else "" val msg = s"${BackendReporting.methodSignature(callee.calleeDeclarationClass.internalName, callee.callee)}$annotWarn could not be inlined:\n$warning" backendReporting.inlinerWarning(request.callsite.callsitePosition, msg) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala index 6aaf9734d381..d39db4d3b121 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala @@ -43,16 +43,16 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { inlineRequest(callsite) match { case Some(Right(req)) => requests += req case Some(Left(w)) => - if ((calleeAnnotatedInline && bTypes.compilerSettings.YoptWarningEmitAtInlineFailed) || w.emitWarning(compilerSettings)) { + if ((calleeAnnotatedInline && bTypes.compilerSettings.optWarningEmitAtInlineFailed) || w.emitWarning(compilerSettings)) { val annotWarn = if (calleeAnnotatedInline) " is annotated @inline but" else "" val msg = s"${BackendReporting.methodSignature(calleeDeclClass.internalName, callee)}$annotWarn could not be inlined:\n$w" backendReporting.inlinerWarning(callsite.callsitePosition, msg) } case None => - if (canInlineFromSource && calleeAnnotatedInline && !callsite.annotatedNoInline && bTypes.compilerSettings.YoptWarningEmitAtInlineFailed) { + if (canInlineFromSource && calleeAnnotatedInline && !callsite.annotatedNoInline && bTypes.compilerSettings.optWarningEmitAtInlineFailed) { // if the callsite is annotated @inline, we report an inline warning even if the underlying - // reason is, for example, mixed compilation (which has a separate -Yopt-warning flag). + // reason is, for example, mixed compilation (which has a separate -opt-warning flag). def initMsg = s"${BackendReporting.methodSignature(calleeDeclClass.internalName, callee)} is annotated @inline but cannot be inlined" def warnMsg = callsiteWarning.map(" Possible reason:\n" + _).getOrElse("") if (!safeToInline) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 4e1349257e85..17490862c82e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -191,7 +191,7 @@ class LocalOpt[BT <: BTypes](val btypes: BT) { * @return `true` if unreachable code was eliminated in some method, `false` otherwise. */ def methodOptimizations(clazz: ClassNode): Boolean = { - !compilerSettings.YoptNone && clazz.methods.asScala.foldLeft(false) { + !compilerSettings.optNone && clazz.methods.asScala.foldLeft(false) { case (changed, method) => methodOptimizations(method, clazz.name) || changed } } @@ -261,46 +261,46 @@ class LocalOpt[BT <: BTypes](val btypes: BT) { traceIfChanged("beforeMethodOpt") // NULLNESS OPTIMIZATIONS - val runNullness = compilerSettings.YoptNullnessTracking && requestNullness + val runNullness = compilerSettings.optNullnessTracking && requestNullness val nullnessOptChanged = runNullness && nullnessOptimizations(method, ownerClassName) traceIfChanged("nullness") // UNREACHABLE CODE // Both AliasingAnalyzer (used in copyProp) and ProdConsAnalyzer (used in eliminateStaleStores, // boxUnboxElimination) require not having unreachable instructions (null frames). - val runDCE = (compilerSettings.YoptUnreachableCode && (requestDCE || nullnessOptChanged)) || - compilerSettings.YoptBoxUnbox || - compilerSettings.YoptCopyPropagation + val runDCE = (compilerSettings.optUnreachableCode && (requestDCE || nullnessOptChanged)) || + compilerSettings.optBoxUnbox || + compilerSettings.optCopyPropagation val (codeRemoved, liveLabels) = if (runDCE) removeUnreachableCodeImpl(method, ownerClassName) else (false, Set.empty[LabelNode]) traceIfChanged("dce") // BOX-UNBOX - val runBoxUnbox = compilerSettings.YoptBoxUnbox && (requestBoxUnbox || nullnessOptChanged) + val runBoxUnbox = compilerSettings.optBoxUnbox && (requestBoxUnbox || nullnessOptChanged) val boxUnboxChanged = runBoxUnbox && boxUnboxElimination(method, ownerClassName) traceIfChanged("boxUnbox") // COPY PROPAGATION - val runCopyProp = compilerSettings.YoptCopyPropagation && (firstIteration || boxUnboxChanged) + val runCopyProp = compilerSettings.optCopyPropagation && (firstIteration || boxUnboxChanged) val copyPropChanged = runCopyProp && copyPropagation(method, ownerClassName) traceIfChanged("copyProp") // STALE STORES - val runStaleStores = compilerSettings.YoptCopyPropagation && (requestStaleStores || nullnessOptChanged || codeRemoved || boxUnboxChanged || copyPropChanged) + val runStaleStores = compilerSettings.optCopyPropagation && (requestStaleStores || nullnessOptChanged || codeRemoved || boxUnboxChanged || copyPropChanged) val storesRemoved = runStaleStores && eliminateStaleStores(method, ownerClassName) traceIfChanged("staleStores") // REDUNDANT CASTS - val runRedundantCasts = compilerSettings.YoptRedundantCasts && (firstIteration || boxUnboxChanged) + val runRedundantCasts = compilerSettings.optRedundantCasts && (firstIteration || boxUnboxChanged) val castRemoved = runRedundantCasts && eliminateRedundantCasts(method, ownerClassName) traceIfChanged("redundantCasts") // PUSH-POP - val runPushPop = compilerSettings.YoptCopyPropagation && (requestPushPop || firstIteration || storesRemoved || castRemoved) + val runPushPop = compilerSettings.optCopyPropagation && (requestPushPop || firstIteration || storesRemoved || castRemoved) val pushPopRemoved = runPushPop && eliminatePushPop(method, ownerClassName) traceIfChanged("pushPop") // STORE-LOAD PAIRS - val runStoreLoad = compilerSettings.YoptCopyPropagation && (requestStoreLoad || boxUnboxChanged || copyPropChanged || pushPopRemoved) + val runStoreLoad = compilerSettings.optCopyPropagation && (requestStoreLoad || boxUnboxChanged || copyPropChanged || pushPopRemoved) val storeLoadRemoved = runStoreLoad && eliminateStoreLoad(method) traceIfChanged("storeLoadPairs") @@ -312,7 +312,7 @@ class LocalOpt[BT <: BTypes](val btypes: BT) { // SIMPLIFY JUMPS // almost all of the above optimizations enable simplifying more jumps, so we just run it in every iteration - val runSimplifyJumps = compilerSettings.YoptSimplifyJumps + val runSimplifyJumps = compilerSettings.optSimplifyJumps val jumpsChanged = runSimplifyJumps && simplifyJumps(method) traceIfChanged("simplifyJumps") @@ -358,21 +358,21 @@ class LocalOpt[BT <: BTypes](val btypes: BT) { requestPushPop = true, requestStoreLoad = true, firstIteration = true) - if (compilerSettings.YoptUnreachableCode) unreachableCodeEliminated += method + if (compilerSettings.optUnreachableCode) unreachableCodeEliminated += method r } else (false, false) // (*) Removing stale local variable descriptors is required for correctness, see comment in `methodOptimizations` val localsRemoved = - if (compilerSettings.YoptCompactLocals) compactLocalVariables(method) // also removes unused + if (compilerSettings.optCompactLocals) compactLocalVariables(method) // also removes unused else if (requireEliminateUnusedLocals) removeUnusedLocalVariableNodes(method)() // (*) else false traceIfChanged("localVariables") - val lineNumbersRemoved = if (compilerSettings.YoptUnreachableCode) removeEmptyLineNumbers(method) else false + val lineNumbersRemoved = if (compilerSettings.optUnreachableCode) removeEmptyLineNumbers(method) else false traceIfChanged("lineNumbers") - val labelsRemoved = if (compilerSettings.YoptUnreachableCode) removeEmptyLabelNodes(method) else false + val labelsRemoved = if (compilerSettings.optUnreachableCode) removeEmptyLabelNodes(method) else false traceIfChanged("labels") // assert that local variable annotations are empty (we don't emit them) - otherwise we'd have diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 9a0d86a94df1..21a6850a43de 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -205,7 +205,7 @@ trait ScalaSettings extends AbsScalaSettings val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") - object YoptChoices extends MultiChoiceEnumeration { + object optChoices extends MultiChoiceEnumeration { val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers guarding no instructions, redundant metadata (debug information, line numbers).") val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") val compactLocals = Choice("compact-locals", "Eliminate empty slots in the sequence of local variables.") @@ -217,8 +217,8 @@ trait ScalaSettings extends AbsScalaSettings val inlineProject = Choice("inline-project", "Inline only methods defined in the files being compiled. Enables unreachable-code.") val inlineGlobal = Choice("inline-global", "Inline methods from any source, including classfiles on the compile classpath. Enables unreachable-code.") - // note: unlike the other optimizer levels, "l:none" appears up in the `Yopt.value` set because it's not an expanding option (expandsTo is empty) - val lNone = Choice("l:none", "Disable optimizations. Takes precedence: `-Yopt:l:none,+box-unbox` / `-Yopt:l:none -Yopt:box-unbox` don't enable box-unbox.") + // note: unlike the other optimizer levels, "l:none" appears up in the `opt.value` set because it's not an expanding option (expandsTo is empty) + val lNone = Choice("l:none", "Disable optimizations. Takes precedence: `-opt:l:none,+box-unbox` / `-opt:l:none -opt:box-unbox` don't enable box-unbox.") private val defaultChoices = List(unreachableCode) val lDefault = Choice("l:default", "Enable default optimizations: "+ defaultChoices.mkString("", ",", "."), expandsTo = defaultChoices) @@ -234,37 +234,37 @@ trait ScalaSettings extends AbsScalaSettings } // We don't use the `default` parameter of `MultiChoiceSetting`: it specifies the default values - // when `-Yopt` is passed without explicit choices. When `-Yopt` is not explicitly specified, the - // set `Yopt.value` is empty. - val Yopt = MultiChoiceSetting( - name = "-Yopt", + // when `-opt` is passed without explicit choices. When `-opt` is not explicitly specified, the + // set `opt.value` is empty. + val opt = MultiChoiceSetting( + name = "-opt", helpArg = "optimization", descr = "Enable optimizations", - domain = YoptChoices) + domain = optChoices) - private def optEnabled(choice: YoptChoices.Choice) = { - !Yopt.contains(YoptChoices.lNone) && { - Yopt.contains(choice) || - !Yopt.isSetByUser && YoptChoices.lDefault.expandsTo.contains(choice) + private def optEnabled(choice: optChoices.Choice) = { + !opt.contains(optChoices.lNone) && { + opt.contains(choice) || + !opt.isSetByUser && optChoices.lDefault.expandsTo.contains(choice) } } - def YoptNone = Yopt.contains(YoptChoices.lNone) - def YoptUnreachableCode = optEnabled(YoptChoices.unreachableCode) - def YoptSimplifyJumps = optEnabled(YoptChoices.simplifyJumps) - def YoptCompactLocals = optEnabled(YoptChoices.compactLocals) - def YoptCopyPropagation = optEnabled(YoptChoices.copyPropagation) - def YoptRedundantCasts = optEnabled(YoptChoices.redundantCasts) - def YoptBoxUnbox = optEnabled(YoptChoices.boxUnbox) - def YoptNullnessTracking = optEnabled(YoptChoices.nullnessTracking) - def YoptClosureInvocations = optEnabled(YoptChoices.closureInvocations) + def optNone = opt.contains(optChoices.lNone) + def optUnreachableCode = optEnabled(optChoices.unreachableCode) + def optSimplifyJumps = optEnabled(optChoices.simplifyJumps) + def optCompactLocals = optEnabled(optChoices.compactLocals) + def optCopyPropagation = optEnabled(optChoices.copyPropagation) + def optRedundantCasts = optEnabled(optChoices.redundantCasts) + def optBoxUnbox = optEnabled(optChoices.boxUnbox) + def optNullnessTracking = optEnabled(optChoices.nullnessTracking) + def optClosureInvocations = optEnabled(optChoices.closureInvocations) - def YoptInlineProject = optEnabled(YoptChoices.inlineProject) - def YoptInlineGlobal = optEnabled(YoptChoices.inlineGlobal) - def YoptInlinerEnabled = YoptInlineProject || YoptInlineGlobal + def optInlineProject = optEnabled(optChoices.inlineProject) + def optInlineGlobal = optEnabled(optChoices.inlineGlobal) + def optInlinerEnabled = optInlineProject || optInlineGlobal - def YoptBuildCallGraph = YoptInlinerEnabled || YoptClosureInvocations - def YoptAddToBytecodeRepository = YoptBuildCallGraph || YoptInlinerEnabled || YoptClosureInvocations + def optBuildCallGraph = optInlinerEnabled || optClosureInvocations + def optAddToBytecodeRepository = optBuildCallGraph || optInlinerEnabled || optClosureInvocations val YoptInlineHeuristics = ChoiceSetting( name = "-Yopt-inline-heuristics", @@ -273,7 +273,7 @@ trait ScalaSettings extends AbsScalaSettings choices = List("at-inline-annotated", "everything", "default"), default = "default") - object YoptWarningsChoices extends MultiChoiceEnumeration { + object optWarningsChoices extends MultiChoiceEnumeration { val none = Choice("none" , "No optimizer warnings.") val atInlineFailedSummary = Choice("at-inline-failed-summary" , "One-line summary if there were @inline method calls that could not be inlined.") val atInlineFailed = Choice("at-inline-failed" , "A detailed warning for each @inline method call that could not be inlined.") @@ -283,24 +283,24 @@ trait ScalaSettings extends AbsScalaSettings val noInlineMissingScalaInlineInfoAttr = Choice("no-inline-missing-attribute", "Warn if an inlining decision cannot be made because a Scala classfile does not have a ScalaInlineInfo attribute.") } - val YoptWarnings = MultiChoiceSetting( - name = "-Yopt-warnings", + val optWarnings = MultiChoiceSetting( + name = "-opt-warnings", helpArg = "warning", descr = "Enable optimizer warnings", - domain = YoptWarningsChoices, - default = Some(List(YoptWarningsChoices.atInlineFailed.name))) + domain = optWarningsChoices, + default = Some(List(optWarningsChoices.atInlineFailed.name))) - def YoptWarningsSummaryOnly = YoptWarnings.value subsetOf Set(YoptWarningsChoices.none, YoptWarningsChoices.atInlineFailedSummary) + def optWarningsSummaryOnly = optWarnings.value subsetOf Set(optWarningsChoices.none, optWarningsChoices.atInlineFailedSummary) - def YoptWarningEmitAtInlineFailed = - !YoptWarnings.isSetByUser || - YoptWarnings.contains(YoptWarningsChoices.atInlineFailedSummary) || - YoptWarnings.contains(YoptWarningsChoices.atInlineFailed) || - YoptWarnings.contains(YoptWarningsChoices.anyInlineFailed) + def optWarningEmitAtInlineFailed = + !optWarnings.isSetByUser || + optWarnings.contains(optWarningsChoices.atInlineFailedSummary) || + optWarnings.contains(optWarningsChoices.atInlineFailed) || + optWarnings.contains(optWarningsChoices.anyInlineFailed) - def YoptWarningNoInlineMixed = YoptWarnings.contains(YoptWarningsChoices.noInlineMixed) - def YoptWarningNoInlineMissingBytecode = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingBytecode) - def YoptWarningNoInlineMissingScalaInlineInfoAttr = YoptWarnings.contains(YoptWarningsChoices.noInlineMissingScalaInlineInfoAttr) + def optWarningNoInlineMixed = optWarnings.contains(optWarningsChoices.noInlineMixed) + def optWarningNoInlineMissingBytecode = optWarnings.contains(optWarningsChoices.noInlineMissingBytecode) + def optWarningNoInlineMissingScalaInlineInfoAttr = optWarnings.contains(optWarningsChoices.noInlineMissingScalaInlineInfoAttr) val YoptTrace = StringSetting("-Yopt-trace", "package/Class.method", "Trace the optimizer progress for a specific method.", "") @@ -340,8 +340,8 @@ trait ScalaSettings extends AbsScalaSettings val future = BooleanSetting("-Xfuture", "Turn on future language features.") enablingIfNotSetByUser futureSettings val optimise = BooleanSetting("-optimise", "Compiler flag for the optimizer in Scala 2.11") .withAbbreviation("-optimize") - .withDeprecationMessage("In 2.12, -optimise enables -Yopt:l:classpath. Check -Yopt:help for using the Scala 2.12 optimizer.") - .withPostSetHook(_ => Yopt.tryToSet(List(YoptChoices.lClasspath.name))) + .withDeprecationMessage("In 2.12, -optimise enables -opt:l:classpath. Check -opt:help for using the Scala 2.12 optimizer.") + .withPostSetHook(_ => opt.tryToSet(List(optChoices.lClasspath.name))) val Xexperimental = BooleanSetting("-Xexperimental", "Enable experimental extensions.") enablingIfNotSetByUser experimentalSettings // Feature extensions diff --git a/test/files/instrumented/inline-in-constructors.flags b/test/files/instrumented/inline-in-constructors.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/instrumented/inline-in-constructors.flags +++ b/test/files/instrumented/inline-in-constructors.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/jvm/bytecode-test-example.flags b/test/files/jvm/bytecode-test-example.flags index bc22511cffca..213d7425d189 100644 --- a/test/files/jvm/bytecode-test-example.flags +++ b/test/files/jvm/bytecode-test-example.flags @@ -1 +1 @@ --Yopt:l:none +-opt:l:none diff --git a/test/files/jvm/unreachable/Foo_1.flags b/test/files/jvm/unreachable/Foo_1.flags index ac9438e8d0ca..d0a417b3c888 100644 --- a/test/files/jvm/unreachable/Foo_1.flags +++ b/test/files/jvm/unreachable/Foo_1.flags @@ -1 +1 @@ --Yopt:l:default \ No newline at end of file +-opt:l:default \ No newline at end of file diff --git a/test/files/neg/inlineIndyLambdaPrivate.flags b/test/files/neg/inlineIndyLambdaPrivate.flags index 01b466bd8c56..b38f5b8411ac 100644 --- a/test/files/neg/inlineIndyLambdaPrivate.flags +++ b/test/files/neg/inlineIndyLambdaPrivate.flags @@ -1 +1 @@ --Yopt:l:classpath -Yopt-inline-heuristics:everything -Yopt-warnings:_ -Xfatal-warnings \ No newline at end of file +-opt:l:classpath -Yopt-inline-heuristics:everything -opt-warnings:_ -Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/inlineMaxSize.flags b/test/files/neg/inlineMaxSize.flags index 18b474e7970e..e765b66af2db 100644 --- a/test/files/neg/inlineMaxSize.flags +++ b/test/files/neg/inlineMaxSize.flags @@ -1 +1 @@ --Ydelambdafy:method -Yopt:l:classpath -Yopt-warnings -Xfatal-warnings \ No newline at end of file +-Ydelambdafy:method -opt:l:classpath -opt-warnings -Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/optimiseDeprecated.check b/test/files/neg/optimiseDeprecated.check index d51d48f02334..16ab3bbf1a91 100644 --- a/test/files/neg/optimiseDeprecated.check +++ b/test/files/neg/optimiseDeprecated.check @@ -1,4 +1,4 @@ -warning: -optimise is deprecated: In 2.12, -optimise enables -Yopt:l:classpath. Check -Yopt:help for using the Scala 2.12 optimizer. +warning: -optimise is deprecated: In 2.12, -optimise enables -opt:l:classpath. Check -opt:help for using the Scala 2.12 optimizer. error: No warnings can be incurred under -Xfatal-warnings. one warning found one error found diff --git a/test/files/neg/partestInvalidFlag.check b/test/files/neg/partestInvalidFlag.check index 812191dc2254..7a54e3aa43ad 100644 --- a/test/files/neg/partestInvalidFlag.check +++ b/test/files/neg/partestInvalidFlag.check @@ -1,4 +1,4 @@ error: bad option: '-badCompilerFlag' -error: bad options: -badCompilerFlag notAFlag -Yopt:badChoice -error: flags file may only contain compiler options, found: -badCompilerFlag notAFlag -Yopt:badChoice +error: bad options: -badCompilerFlag notAFlag -opt:badChoice +error: flags file may only contain compiler options, found: -badCompilerFlag notAFlag -opt:badChoice three errors found diff --git a/test/files/neg/partestInvalidFlag.flags b/test/files/neg/partestInvalidFlag.flags index 68884532b9f8..d45fd3180904 100644 --- a/test/files/neg/partestInvalidFlag.flags +++ b/test/files/neg/partestInvalidFlag.flags @@ -1 +1 @@ --badCompilerFlag notAFlag -Yopt:badChoice +-badCompilerFlag notAFlag -opt:badChoice diff --git a/test/files/neg/sealed-final-neg.flags b/test/files/neg/sealed-final-neg.flags index 673aca893135..f2f36c1771c5 100644 --- a/test/files/neg/sealed-final-neg.flags +++ b/test/files/neg/sealed-final-neg.flags @@ -1 +1 @@ --Xfatal-warnings -Yopt:l:project -Yopt-warnings \ No newline at end of file +-Xfatal-warnings -opt:l:project -opt-warnings \ No newline at end of file diff --git a/test/files/pos/inline-access-levels.flags b/test/files/pos/inline-access-levels.flags index 9af9168a20e7..faa7d2b186f5 100644 --- a/test/files/pos/inline-access-levels.flags +++ b/test/files/pos/inline-access-levels.flags @@ -1 +1 @@ --Yopt:l:classpath -Xfatal-warnings -Yopt-warnings +-opt:l:classpath -Xfatal-warnings -opt-warnings diff --git a/test/files/pos/t3234.flags b/test/files/pos/t3234.flags index 13878e00a93e..b88ec8709d6e 100644 --- a/test/files/pos/t3234.flags +++ b/test/files/pos/t3234.flags @@ -1 +1 @@ --Yopt:l:project -Yopt-warnings -Xfatal-warnings +-opt:l:project -opt-warnings -Xfatal-warnings diff --git a/test/files/pos/t3420.flags b/test/files/pos/t3420.flags index 397969bb1d57..5eea92d94acb 100644 --- a/test/files/pos/t3420.flags +++ b/test/files/pos/t3420.flags @@ -1 +1 @@ --Yopt-warnings -Yopt:l:classpath -Xfatal-warnings \ No newline at end of file +-opt-warnings -opt:l:classpath -Xfatal-warnings \ No newline at end of file diff --git a/test/files/pos/t4840.flags b/test/files/pos/t4840.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/pos/t4840.flags +++ b/test/files/pos/t4840.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/pos/t8410.flags b/test/files/pos/t8410.flags index c3065096cfbd..85e425754141 100644 --- a/test/files/pos/t8410.flags +++ b/test/files/pos/t8410.flags @@ -1 +1 @@ --Yopt:l:project -Xfatal-warnings -deprecation:false -Yopt-warnings:none +-opt:l:project -Xfatal-warnings -deprecation:false -opt-warnings:none diff --git a/test/files/pos/t9111-inliner-workaround.flags b/test/files/pos/t9111-inliner-workaround.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/pos/t9111-inliner-workaround.flags +++ b/test/files/pos/t9111-inliner-workaround.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/run/bcodeInlinerMixed.flags b/test/files/run/bcodeInlinerMixed.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/run/bcodeInlinerMixed.flags +++ b/test/files/run/bcodeInlinerMixed.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala index 3ef0640b840e..3a6c4861f193 100644 --- a/test/files/run/classfile-format-51.scala +++ b/test/files/run/classfile-format-51.scala @@ -16,7 +16,7 @@ import Opcodes._ // verify. So the test includes a version check that short-circuits the whole test // on JDK 6 object Test extends DirectTest { - override def extraSettings: String = "-Yopt:l:classpath -usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = "-opt:l:classpath -usejavacp -d " + testOutput.path + " -cp " + testOutput.path def generateClass() { val invokerClassName = "DynamicInvoker" diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala index ebd0826303a4..03ceeb074fc6 100644 --- a/test/files/run/classfile-format-52.scala +++ b/test/files/run/classfile-format-52.scala @@ -13,7 +13,7 @@ import Opcodes._ // By its nature the test can only work on JDK 8+ because under JDK 7- the // interface won't verify. object Test extends DirectTest { - override def extraSettings: String = "-Yopt:l:classpath -usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = "-opt:l:classpath -usejavacp -d " + testOutput.path + " -cp " + testOutput.path def generateInterface() { val interfaceName = "HasDefaultMethod" diff --git a/test/files/run/finalvar.flags b/test/files/run/finalvar.flags index a8c7600a0322..c74d0cd32789 100644 --- a/test/files/run/finalvar.flags +++ b/test/files/run/finalvar.flags @@ -1 +1 @@ --Yoverride-vars -Yopt:l:project \ No newline at end of file +-Yoverride-vars -opt:l:project \ No newline at end of file diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala index df31219dd500..f646455c8950 100644 --- a/test/files/run/icode-reader-dead-code.scala +++ b/test/files/run/icode-reader-dead-code.scala @@ -36,7 +36,7 @@ object Test extends DirectTest { // If inlining fails, the compiler will issue an inliner warning that is not present in the // check file - compileString(newCompiler("-usejavacp", "-Yopt:l:classpath"))(bCode) + compileString(newCompiler("-usejavacp", "-opt:l:classpath"))(bCode) } def readClass(file: String) = { diff --git a/test/files/run/noInlineUnknownIndy/Test.scala b/test/files/run/noInlineUnknownIndy/Test.scala index 8d2d20a3cd52..c6d227b6f2c2 100644 --- a/test/files/run/noInlineUnknownIndy/Test.scala +++ b/test/files/run/noInlineUnknownIndy/Test.scala @@ -11,7 +11,7 @@ object Test extends DirectTest { def compileCode(code: String) = { val classpath = List(sys.props("partest.lib"), testOutput.path) mkString sys.props("path.separator") - compileString(newCompiler("-cp", classpath, "-d", testOutput.path, "-Yopt:l:classpath", "-Yopt-inline-heuristics:everything", "-Yopt-warnings:_"))(code) + compileString(newCompiler("-cp", classpath, "-d", testOutput.path, "-opt:l:classpath", "-Yopt-inline-heuristics:everything", "-opt-warnings:_"))(code) } def show(): Unit = { diff --git a/test/files/run/nothingTypeDce.flags b/test/files/run/nothingTypeDce.flags index 8785c036f63d..475f6db67c5f 100644 --- a/test/files/run/nothingTypeDce.flags +++ b/test/files/run/nothingTypeDce.flags @@ -1 +1 @@ --Yopt:unreachable-code +-opt:unreachable-code diff --git a/test/files/run/nothingTypeDce.scala b/test/files/run/nothingTypeDce.scala index 5c3a0731fd47..cb1e59e45c27 100644 --- a/test/files/run/nothingTypeDce.scala +++ b/test/files/run/nothingTypeDce.scala @@ -1,6 +1,6 @@ // See comment in BCodeBodyBuilder -// -Yopt:unreachable-code +// -opt:unreachable-code class C { // can't just emit a call to ???, that returns value of type Nothing$ (not Int). diff --git a/test/files/run/nothingTypeNoOpt.flags b/test/files/run/nothingTypeNoOpt.flags index bc22511cffca..213d7425d189 100644 --- a/test/files/run/nothingTypeNoOpt.flags +++ b/test/files/run/nothingTypeNoOpt.flags @@ -1 +1 @@ --Yopt:l:none +-opt:l:none diff --git a/test/files/run/nothingTypeNoOpt.scala b/test/files/run/nothingTypeNoOpt.scala index 33b20ba851a4..cc68364bf988 100644 --- a/test/files/run/nothingTypeNoOpt.scala +++ b/test/files/run/nothingTypeNoOpt.scala @@ -1,6 +1,6 @@ // See comment in BCodeBodyBuilder -// -target:jvm-1.6 -Yopt:l:none +// -target:jvm-1.6 -opt:l:none // target enables stack map frame generation class C { diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags index 19c578e4ad0b..82eb1b9bdd6f 100644 --- a/test/files/run/synchronized.flags +++ b/test/files/run/synchronized.flags @@ -1 +1 @@ --Yopt:l:project +-opt:l:project diff --git a/test/files/run/t2106.flags b/test/files/run/t2106.flags index b0139685fa17..cde9a0c4e625 100644 --- a/test/files/run/t2106.flags +++ b/test/files/run/t2106.flags @@ -1 +1 @@ --Yopt-warnings -Yopt:l:classpath +-opt-warnings -opt:l:classpath diff --git a/test/files/run/t3509.flags b/test/files/run/t3509.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/run/t3509.flags +++ b/test/files/run/t3509.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/run/t3569.flags b/test/files/run/t3569.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/run/t3569.flags +++ b/test/files/run/t3569.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/run/t4285.flags b/test/files/run/t4285.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/run/t4285.flags +++ b/test/files/run/t4285.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/run/t4935.flags b/test/files/run/t4935.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t4935.flags +++ b/test/files/run/t4935.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t5789.scala b/test/files/run/t5789.scala index 677c9ca229b7..893294b56b1b 100644 --- a/test/files/run/t5789.scala +++ b/test/files/run/t5789.scala @@ -5,7 +5,7 @@ import scala.tools.partest.ReplTest object Test extends ReplTest { - override def extraSettings = "-Yopt:l:classpath" + override def extraSettings = "-opt:l:classpath" def code = """ val n = 2 () => n diff --git a/test/files/run/t6102.flags b/test/files/run/t6102.flags index db58cf3b4c45..7f938c550fea 100644 --- a/test/files/run/t6102.flags +++ b/test/files/run/t6102.flags @@ -1 +1 @@ --Yopt:l:classpath -Xfatal-warnings +-opt:l:classpath -Xfatal-warnings diff --git a/test/files/run/t6188.flags b/test/files/run/t6188.flags index 422d6be43108..768ca4f13b95 100644 --- a/test/files/run/t6188.flags +++ b/test/files/run/t6188.flags @@ -1 +1 @@ --Yopt:l:classpath \ No newline at end of file +-opt:l:classpath \ No newline at end of file diff --git a/test/files/run/t7407.flags b/test/files/run/t7407.flags index bc22511cffca..213d7425d189 100644 --- a/test/files/run/t7407.flags +++ b/test/files/run/t7407.flags @@ -1 +1 @@ --Yopt:l:none +-opt:l:none diff --git a/test/files/run/t7459b-optimize.flags b/test/files/run/t7459b-optimize.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t7459b-optimize.flags +++ b/test/files/run/t7459b-optimize.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t7582.flags b/test/files/run/t7582.flags index 1f45833effde..7e64669429c1 100644 --- a/test/files/run/t7582.flags +++ b/test/files/run/t7582.flags @@ -1 +1 @@ --Yopt:l:classpath -Yopt-warnings \ No newline at end of file +-opt:l:classpath -opt-warnings \ No newline at end of file diff --git a/test/files/run/t7582b.flags b/test/files/run/t7582b.flags index 1f45833effde..7e64669429c1 100644 --- a/test/files/run/t7582b.flags +++ b/test/files/run/t7582b.flags @@ -1 +1 @@ --Yopt:l:classpath -Yopt-warnings \ No newline at end of file +-opt:l:classpath -opt-warnings \ No newline at end of file diff --git a/test/files/run/t7852.flags b/test/files/run/t7852.flags index bc22511cffca..213d7425d189 100644 --- a/test/files/run/t7852.flags +++ b/test/files/run/t7852.flags @@ -1 +1 @@ --Yopt:l:none +-opt:l:none diff --git a/test/files/run/t8601-closure-elim.flags b/test/files/run/t8601-closure-elim.flags index 642187ff4c17..24396d4d02bf 100644 --- a/test/files/run/t8601-closure-elim.flags +++ b/test/files/run/t8601-closure-elim.flags @@ -1 +1 @@ --Ydelambdafy:method -Yopt:l:classpath +-Ydelambdafy:method -opt:l:classpath diff --git a/test/files/run/t8601.flags b/test/files/run/t8601.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t8601.flags +++ b/test/files/run/t8601.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t8601b.flags b/test/files/run/t8601b.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t8601b.flags +++ b/test/files/run/t8601b.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t8601c.flags b/test/files/run/t8601c.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t8601c.flags +++ b/test/files/run/t8601c.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t8601d.flags b/test/files/run/t8601d.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t8601d.flags +++ b/test/files/run/t8601d.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t8601e.flags b/test/files/run/t8601e.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t8601e.flags +++ b/test/files/run/t8601e.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t8925.flags b/test/files/run/t8925.flags index bc22511cffca..213d7425d189 100644 --- a/test/files/run/t8925.flags +++ b/test/files/run/t8925.flags @@ -1 +1 @@ --Yopt:l:none +-opt:l:none diff --git a/test/files/run/t9003.flags b/test/files/run/t9003.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t9003.flags +++ b/test/files/run/t9003.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/files/run/t9403.flags b/test/files/run/t9403.flags index 65caa3736e84..63535a7f4fcc 100644 --- a/test/files/run/t9403.flags +++ b/test/files/run/t9403.flags @@ -1 +1 @@ --Yopt:l:classpath +-opt:l:classpath diff --git a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala index 0144fa7366a3..58df4691e473 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BTypesTest.scala @@ -11,7 +11,7 @@ import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) class BTypesTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:none" + override def compilerArgs = "-opt:l:none" import compiler.global locally { new global.Run() // initializes some of the compiler diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index 7fdfb315779d..f835e9b14010 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -12,7 +12,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class DirectCompileTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:method" + override def compilerArgs = "-opt:l:method" import compiler._ @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala index 003162c1ad41..8cf6a655d232 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala @@ -11,7 +11,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class OptimizedBytecodeTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:classpath -Yopt-warnings" + override def compilerArgs = "-opt:l:classpath -opt-warnings" import compiler._ @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index b0a86dfd28b3..1de5aa28ca3d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -17,7 +17,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class NullnessAnalyzerTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:none" + override def compilerArgs = "-opt:l:none" import compiler._ import global.genBCode.bTypes.backendUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala index fc2678523764..8cb04822de68 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerTest.scala @@ -16,7 +16,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class ProdConsAnalyzerTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:none" + override def compilerArgs = "-opt:l:none" import compiler._ import global.genBCode.bTypes.backendUtils._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala index 025248ac2880..33ca6a5fd25a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/AnalyzerTest.scala @@ -15,7 +15,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class AnalyzerTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:none" + override def compilerArgs = "-opt:l:none" import compiler._ @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala index e7aea71e7215..c23c60f7ad05 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -14,7 +14,7 @@ import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) class BTypesFromClassfileTest extends BytecodeTesting { // inliner enabled -> inlineInfos are collected (and compared) in ClassBTypes - override def compilerArgs = "-Yopt:inline-global" + override def compilerArgs = "-opt:inline-global" import compiler.global._ import definitions._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 630416a92582..80fbba133e7e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -18,7 +18,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class CallGraphTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:inline-global -Yopt-warnings" + override def compilerArgs = "-opt:inline-global -opt-warnings" import compiler._ import global.genBCode.bTypes val notPerRun: List[Clearable] = List( diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala index 218b02f82259..2da2ecdb723a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala @@ -13,7 +13,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class ClosureOptimizerTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:classpath -Yopt-warnings:_" + override def compilerArgs = "-opt:l:classpath -opt-warnings:_" import compiler._ @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala index c3748a05bd46..6f54f170b59a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala @@ -15,8 +15,8 @@ import scala.tools.testing.ClearAfterClass class CompactLocalVariablesTest extends ClearAfterClass { // recurse-unreachable-jumps is required for eliminating catch blocks, in the first dce round they // are still live.only after eliminating the empty handler the catch blocks become unreachable. - val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code,compact-locals")) - val noCompactVarsCompiler = cached("noCompactVarsCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) + val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-opt:unreachable-code,compact-locals")) + val noCompactVarsCompiler = cached("noCompactVarsCompiler", () => newCompiler(extraArgs = "-opt:unreachable-code")) @Test def compactUnused(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala index 3324058cb762..77215304fdd6 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyExceptionHandlersTest.scala @@ -15,10 +15,10 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class EmptyExceptionHandlersTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:unreachable-code" + override def compilerArgs = "-opt:unreachable-code" def dceCompiler = compiler - val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-opt:l:none")) val exceptionDescriptor = "java/lang/Exception" diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index e45d7139a3be..a691d634718f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -16,7 +16,7 @@ class InlineInfoTest extends BytecodeTesting { import compiler.global import global.genBCode.bTypes - override def compilerArgs = "-Yopt:l:classpath" + override def compilerArgs = "-opt:l:classpath" def notPerRun: List[Clearable] = List( bTypes.classBTypeFromInternalName, diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index f0913f36318d..6161dc7b7324 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -11,12 +11,12 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class InlineWarningTest extends BytecodeTesting { - def optCp = "-Yopt:l:classpath" - override def compilerArgs = s"$optCp -Yopt-warnings" + def optCp = "-opt:l:classpath" + override def compilerArgs = s"$optCp -opt-warnings" import compiler._ - val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = s"$optCp -Yopt-warnings:_")) + val compilerWarnAll = cached("compilerWarnAll", () => newCompiler(extraArgs = s"$optCp -opt-warnings:_")) @Test def nonFinal(): Unit = { @@ -87,10 +87,10 @@ class InlineWarningTest extends BytecodeTesting { assert(c == 1, c) // no warnings here - newCompiler(extraArgs = s"$optCp -Yopt-warnings:none").compileToBytes(scalaCode, List((javaCode, "A.java"))) + newCompiler(extraArgs = s"$optCp -opt-warnings:none").compileToBytes(scalaCode, List((javaCode, "A.java"))) c = 0 - newCompiler(extraArgs = s"$optCp -Yopt-warnings:no-inline-mixed").compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) + newCompiler(extraArgs = s"$optCp -opt-warnings:no-inline-mixed").compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.exists(i.msg contains _)}) assert(c == 2, c) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index c2ada8afec1f..3cb1fbdae6ff 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -14,7 +14,7 @@ import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) class InlinerIllegalAccessTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:none" + override def compilerArgs = "-opt:l:none" import compiler._ import global.genBCode.bTypes._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala index b196f1a9ba4d..a2513cacdc47 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala @@ -10,7 +10,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class InlinerSeparateCompilationTest { - val args = "-Yopt:l:classpath" + val args = "-opt:l:classpath" @Test def inlnieMixedinMember(): Unit = { @@ -32,7 +32,7 @@ class InlinerSeparateCompilationTest { """.stripMargin val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden" - val List(c, o, oMod, t) = compileClassesSeparately(List(codeA, codeB), args + " -Yopt-warnings", _.msg contains warn) + val List(c, o, oMod, t) = compileClassesSeparately(List(codeA, codeB), args + " -opt-warnings", _.msg contains warn) assertInvoke(getMethod(c, "t1"), "T", "f") assertNoInvoke(getMethod(c, "t2")) assertNoInvoke(getMethod(c, "t3")) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index fd020c7d9390..f1e3e9b9e145 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -19,9 +19,9 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class InlinerTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:classpath -Yopt-warnings" + override def compilerArgs = "-opt:l:classpath -opt-warnings" - val inlineOnlyCompiler = cached("inlineOnlyCompiler", () => newCompiler(extraArgs = "-Yopt:inline-project")) + val inlineOnlyCompiler = cached("inlineOnlyCompiler", () => newCompiler(extraArgs = "-opt:inline-project")) import compiler._ import global.genBCode.bTypes @@ -825,7 +825,7 @@ class InlinerTest extends BytecodeTesting { var c = 0 - newCompiler(extraArgs = compilerArgs + " -Yopt-warnings:_").compileClasses( + newCompiler(extraArgs = compilerArgs + " -opt-warnings:_").compileClasses( scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn}) @@ -1459,7 +1459,7 @@ class InlinerTest extends BytecodeTesting { val codeA = "final class A { @inline def f = 1 }" val codeB = "class B { def t(a: A) = a.f }" // tests that no warning is emitted - val List(a, b) = compileClassesSeparately(List(codeA, codeB), extraArgs = "-Yopt:l:project -Yopt-warnings") + val List(a, b) = compileClassesSeparately(List(codeA, codeB), extraArgs = "-opt:l:project -opt-warnings") assertInvoke(getMethod(b, "t"), "A", "f") } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index fa76c0d93086..9675e2e4456f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -17,7 +17,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class MethodLevelOptsTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:method" + override def compilerArgs = "-opt:l:method" import compiler._ def wrapInDefault(code: Instruction*) = List(Label(0), LineNumber(1, Label(0))) ::: code.toList ::: List(Label(1)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index 5bd285f97f7d..4791a29bfbb7 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -14,7 +14,7 @@ import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) class ScalaInlineInfoTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:l:none" + override def compilerArgs = "-opt:l:none" import compiler._ def inlineInfo(c: ClassNode): InlineInfo = c.attrs.asScala.collect({ case a: InlineInfoAttribute => a.inlineInfo }).head diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index 63bbcc396bb6..68ce61b48aa5 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -17,9 +17,9 @@ import scala.tools.testing.ClearAfterClass class UnreachableCodeTest extends ClearAfterClass { // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks, // see comment in BCodeBodyBuilder - val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:method")) - val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-Yopt:unreachable-code")) - val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-Yopt:l:none")) + val methodOptCompiler = cached("methodOptCompiler", () => newCompiler(extraArgs = "-opt:l:method")) + val dceCompiler = cached("dceCompiler", () => newCompiler(extraArgs = "-opt:unreachable-code")) + val noOptCompiler = cached("noOptCompiler", () => newCompiler(extraArgs = "-opt:l:none")) def assertEliminateDead(code: (Instruction, Boolean)*): Unit = { val method = genMethod()(code.map(_._1): _*) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala index c9c98b403b56..7ca09ff41d4b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnusedLocalVariablesTest.scala @@ -14,7 +14,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class UnusedLocalVariablesTest extends BytecodeTesting { - override def compilerArgs = "-Yopt:unreachable-code" + override def compilerArgs = "-opt:unreachable-code" import compiler._ @Test diff --git a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala index b6e8d4fbf2d7..de18dec34434 100644 --- a/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/transform/patmat/PatmatBytecodeTest.scala @@ -12,7 +12,7 @@ import scala.tools.testing.BytecodeTesting._ @RunWith(classOf[JUnit4]) class PatmatBytecodeTest extends BytecodeTesting { - val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-Yopt:l:project")) + val optCompiler = cached("optCompiler", () => newCompiler(extraArgs = "-opt:l:project")) import compiler._ diff --git a/test/pending/jvm/constant-optimization/Foo_1.flags b/test/pending/jvm/constant-optimization/Foo_1.flags index 9691c0985d52..432f01c02d30 100644 --- a/test/pending/jvm/constant-optimization/Foo_1.flags +++ b/test/pending/jvm/constant-optimization/Foo_1.flags @@ -1 +1 @@ -// constant otimization not there yet, -Yopt:nullness-tracking not enough. +// constant otimization not there yet, -opt:nullness-tracking not enough. From 54dff8120786501f0e273166869b73b5f3accd17 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 25 May 2016 11:55:47 -0700 Subject: [PATCH 0086/2793] SI-9382 Privatize enhanced x in Tuple2Zipped.Ops Probably there should be an Abide rule to avoid leaking the "underlying value" of a value class. The spec or SIP defines "underlying type" but doesn't mention the underlying value. The argument for concealing the member is that it is redundant and makes autocompletion results harder to read. Also, possibly an additional implicit might want to add a member so-named. --- src/library/scala/runtime/Tuple2Zipped.scala | 2 +- src/library/scala/runtime/Tuple3Zipped.scala | 2 +- test/files/neg/t9382.check | 10 ++++++++++ test/files/neg/t9382.scala | 6 ++++++ 4 files changed, 18 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t9382.check create mode 100644 test/files/neg/t9382.scala diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index 15331d416043..41ed9644aa28 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -115,7 +115,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 } object Tuple2Zipped { - final class Ops[T1, T2](val x: (T1, T2)) extends AnyVal { + final class Ops[T1, T2](private val x: (T1, T2)) extends AnyVal { def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], That] (implicit w1: T1 <:< CC1[El1], w2: T2 <:< CC2[El2], diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 62bee5ff0e66..89f401ea8000 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -123,7 +123,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers } object Tuple3Zipped { - final class Ops[T1, T2, T3](val x: (T1, T2, T3)) extends AnyVal { + final class Ops[T1, T2, T3](private val x: (T1, T2, T3)) extends AnyVal { def invert[El1, CC1[X] <: TraversableOnce[X], El2, CC2[X] <: TraversableOnce[X], El3, CC3[X] <: TraversableOnce[X], That] (implicit w1: T1 <:< CC1[El1], w2: T2 <:< CC2[El2], diff --git a/test/files/neg/t9382.check b/test/files/neg/t9382.check new file mode 100644 index 000000000000..93bf48926ab0 --- /dev/null +++ b/test/files/neg/t9382.check @@ -0,0 +1,10 @@ +t9382.scala:3: error: value x is not a member of (List[Int], List[Int]) + def f = (List(1,2,3), List(4,5,6)).x + ^ +t9382.scala:4: error: value x is not a member of (List[Int], List[Int], List[Int]) + def g = (List(1,2,3), List(4,5,6), List(7,8,9)).x + ^ +t9382.scala:5: error: value x is not a member of (Int, Int) + def huh = (1,2).x + ^ +three errors found diff --git a/test/files/neg/t9382.scala b/test/files/neg/t9382.scala new file mode 100644 index 000000000000..19703525e4a7 --- /dev/null +++ b/test/files/neg/t9382.scala @@ -0,0 +1,6 @@ + +trait T { + def f = (List(1,2,3), List(4,5,6)).x + def g = (List(1,2,3), List(4,5,6), List(7,8,9)).x + def huh = (1,2).x +} From 3873fcfcbcc6c7f0c1660c18e45b265170580546 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 24 May 2016 14:09:48 +0100 Subject: [PATCH 0087/2793] Fully qualify types in REPL generated code --- .../scala/tools/nsc/interpreter/IMain.scala | 6 +- .../tools/nsc/interpreter/ReplStrings.scala | 2 +- .../tools/nsc/interpreter/Scripted.scala | 26 +- ...repl-no-imports-no-predef-classbased.check | 23 ++ ...repl-no-imports-no-predef-classbased.scala | 19 + .../run/repl-no-imports-no-predef-power.check | 29 ++ .../run/repl-no-imports-no-predef-power.scala | 21 + .../files/run/repl-no-imports-no-predef.check | 360 ++++++++++++++++++ .../files/run/repl-no-imports-no-predef.scala | 108 ++++++ test/files/run/repl-parens.scala | 10 +- test/files/run/t7747-repl.check | 6 +- test/files/run/t7747-repl.scala | 6 +- .../tools/nsc/interpreter/ScriptedTest.scala | 19 + 13 files changed, 604 insertions(+), 31 deletions(-) create mode 100644 test/files/run/repl-no-imports-no-predef-classbased.check create mode 100644 test/files/run/repl-no-imports-no-predef-classbased.scala create mode 100644 test/files/run/repl-no-imports-no-predef-power.check create mode 100644 test/files/run/repl-no-imports-no-predef-power.scala create mode 100644 test/files/run/repl-no-imports-no-predef.check create mode 100644 test/files/run/repl-no-imports-no-predef.scala diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index a42a12a6fc5c..a77e6f45f8fb 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -614,7 +614,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends bindRep.compile(s""" |object ${bindRep.evalName} { | var value: $boundType = _ - | def set(x: Any) = value = x.asInstanceOf[$boundType] + | def set(x: _root_.scala.Any) = value = x.asInstanceOf[$boundType] |} """.stripMargin ) @@ -882,7 +882,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } class ClassBasedWrapper extends Wrapper { - def preambleHeader = "class %s extends Serializable { " + def preambleHeader = "class %s extends _root_.java.io.Serializable { " /** Adds an object that instantiates the outer wrapping class. */ def postamble = s""" @@ -915,7 +915,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val preamble = """ |object %s { | %s - | lazy val %s: String = %s { + | lazy val %s: _root_.java.lang.String = %s { | %s | ("" """.stripMargin.format( diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala index bf7508cb4e72..87ca05600c1f 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala @@ -34,7 +34,7 @@ trait ReplStrings { "\"" + string2code(str) + "\"" def any2stringOf(x: Any, maxlen: Int) = - "scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen) + "_root_.scala.runtime.ScalaRunTime.replStringOf(%s, %s)".format(x, maxlen) // no escaped or nested quotes private[this] val inquotes = """(['"])(.*?)\1""".r diff --git a/src/repl/scala/tools/nsc/interpreter/Scripted.scala b/src/repl/scala/tools/nsc/interpreter/Scripted.scala index 25d359bc0e1a..6aef486957d5 100644 --- a/src/repl/scala/tools/nsc/interpreter/Scripted.scala +++ b/src/repl/scala/tools/nsc/interpreter/Scripted.scala @@ -42,7 +42,7 @@ class Scripted(@BeanProperty val factory: ScriptEngineFactory, settings: Setting val adjusted = contextual.map { n => val valname = n.decodedName s"""def `$valname` = $ctx.`$valname` - def `${valname}_=`(x: Object) = $ctx.`$valname` = x""" + def `${valname}_=`(x: _root_.java.lang.Object) = $ctx.`$valname` = x""" }.mkString(preamble, "\n", "\n") ComputedImports(header, adjusted, trailer, path) } @@ -87,30 +87,32 @@ class Scripted(@BeanProperty val factory: ScriptEngineFactory, settings: Setting if (intp.isInitializeComplete) { // compile the dynamic ScriptContext object holder - scriptContextRep compile s""" - |import javax.script._ + val ctxRes = scriptContextRep compile s""" + |import _root_.javax.script._ |object ${scriptContextRep.evalName} { | var value: ScriptContext = _ - | def set(x: Any) = value = x.asInstanceOf[ScriptContext] + | def set(x: _root_.scala.Any) = value = x.asInstanceOf[ScriptContext] |} """.stripMargin + if (!ctxRes) throw new ScriptException("Failed to compile ctx") dynamicContext = getContext // Bridge dynamic references and script context - intp compileString s""" + val dynRes = intp compileString s""" |package scala.tools.nsc.interpreter - |import language.dynamics - |import javax.script._, ScriptContext.ENGINE_SCOPE - |object dynamicBindings extends Dynamic { + |import _root_.scala.language.dynamics + |import _root_.javax.script._, ScriptContext.ENGINE_SCOPE + |object dynamicBindings extends _root_.scala.Dynamic { | def context: ScriptContext = ${ scriptContextRep.evalPath }.value | // $ctx.x retrieves the attribute x - | def selectDynamic(field: String): Object = context.getAttribute(field) + | def selectDynamic(field: _root_.java.lang.String): _root_.java.lang.Object = context.getAttribute(field) | // $ctx.x = v - | def updateDynamic(field: String)(value: Object) = context.setAttribute(field, value, ENGINE_SCOPE) + | def updateDynamic(field: _root_.java.lang.String)(value: _root_.java.lang.Object) = context.setAttribute(field, value, ENGINE_SCOPE) |} |""".stripMargin + if (!dynRes) throw new ScriptException("Failed to compile dynamicBindings") intp beQuietDuring { - intp interpret s"val $ctx: scala.tools.nsc.interpreter.dynamicBindings.type = scala.tools.nsc.interpreter.dynamicBindings" + intp interpret s"val $ctx: _root_.scala.tools.nsc.interpreter.dynamicBindings.type = _root_.scala.tools.nsc.interpreter.dynamicBindings" intp bind ("$engine" -> (this: ScriptEngine with Compilable)) } } @@ -292,7 +294,7 @@ object Scripted { case _ => null } - def getProgram(statements: String*): String = statements.mkString("object Main extends App {\n\t", "\n\t", "\n}") + def getProgram(statements: String*): String = statements.mkString("object Main extends _root_.scala.App {\n\t", "\n\t", "\n}") def getScriptEngine: ScriptEngine = { val settings = new Settings() diff --git a/test/files/run/repl-no-imports-no-predef-classbased.check b/test/files/run/repl-no-imports-no-predef-classbased.check new file mode 100644 index 000000000000..a796600061c4 --- /dev/null +++ b/test/files/run/repl-no-imports-no-predef-classbased.check @@ -0,0 +1,23 @@ + +scala> case class K(s: java.lang.String) +defined class K + +scala> class C { implicit val k: K = K("OK?"); override def toString = "C(" + k.toString + ")" } +defined class C + +scala> val c = new C +c: C = C(K(OK?)) + +scala> import c.k +import c.k + +scala> scala.Predef.implicitly[K] +res0: K = K(OK?) + +scala> val k = 42 +k: Int = 42 + +scala> k // was K(OK?) +res1: Int = 42 + +scala> :quit diff --git a/test/files/run/repl-no-imports-no-predef-classbased.scala b/test/files/run/repl-no-imports-no-predef-classbased.scala new file mode 100644 index 000000000000..86bd07b2f2ed --- /dev/null +++ b/test/files/run/repl-no-imports-no-predef-classbased.scala @@ -0,0 +1,19 @@ +object Test extends scala.tools.partest.ReplTest { + + override def transformSettings(settings: scala.tools.nsc.Settings) = { + settings.noimports.value = true + settings.nopredef.value = true + settings.Yreplclassbased.value = true + settings + } + + def code = """ +case class K(s: java.lang.String) +class C { implicit val k: K = K("OK?"); override def toString = "C(" + k.toString + ")" } +val c = new C +import c.k +scala.Predef.implicitly[K] +val k = 42 +k // was K(OK?) +""" +} diff --git a/test/files/run/repl-no-imports-no-predef-power.check b/test/files/run/repl-no-imports-no-predef-power.check new file mode 100644 index 000000000000..0d4a30b8e3b3 --- /dev/null +++ b/test/files/run/repl-no-imports-no-predef-power.check @@ -0,0 +1,29 @@ + +scala> :power +Power mode enabled. :phase is at typer. +import scala.tools.nsc._, intp.global._, definitions._ +Try :help or completions for vals._ and power._ + +scala> // guarding against "error: reference to global is ambiguous" + +scala> global.emptyValDef // "it is imported twice in the same scope by ..." +warning: there was one deprecation warning; re-run with -deprecation for details +res0: $r.global.noSelfType.type = private val _ = _ + +scala> val tp = ArrayClass[scala.util.Random] // magic with tags +warning: there was one feature warning; re-run with -feature for details +tp: $r.global.Type = Array[scala.util.Random] + +scala> tp.memberType(Array_apply) // evidence +res1: $r.global.Type = (i: Int)scala.util.Random + +scala> val m = LIT(10) // treedsl +m: $r.treedsl.global.Literal = 10 + +scala> typed(m).tpe // typed is in scope +res2: $r.treedsl.global.Type = Int(10) + +scala> """escaping is hard, m'kah""" +res3: String = escaping is hard, m'kah + +scala> :quit diff --git a/test/files/run/repl-no-imports-no-predef-power.scala b/test/files/run/repl-no-imports-no-predef-power.scala new file mode 100644 index 000000000000..24d4dceef288 --- /dev/null +++ b/test/files/run/repl-no-imports-no-predef-power.scala @@ -0,0 +1,21 @@ +object Test extends scala.tools.partest.ReplTest { + + override def transformSettings(settings: scala.tools.nsc.Settings) = { + settings.noimports.value = true + settings.nopredef.value = true + settings + } + + def tripleQuote(s: String) = "\"\"\"" + s + "\"\"\"" + + def code = s""" +:power +// guarding against "error: reference to global is ambiguous" +global.emptyValDef // "it is imported twice in the same scope by ..." +val tp = ArrayClass[scala.util.Random] // magic with tags +tp.memberType(Array_apply) // evidence +val m = LIT(10) // treedsl +typed(m).tpe // typed is in scope +${tripleQuote("escaping is hard, m'kah")} + """.trim +} diff --git a/test/files/run/repl-no-imports-no-predef.check b/test/files/run/repl-no-imports-no-predef.check new file mode 100644 index 000000000000..c2c8d21c0a2c --- /dev/null +++ b/test/files/run/repl-no-imports-no-predef.check @@ -0,0 +1,360 @@ + +scala> 1 +res0: Int = 1 + +scala> 1.0 +res1: Double = 1.0 + +scala> () + +scala> "abc" +res3: String = abc + +scala> (1, 2) +res4: (Int, Int) = (1,2) + +scala> + +scala> { import scala.Predef.ArrowAssoc; 1 -> 2 } +res5: (Int, Int) = (1,2) + +scala> { import scala.Predef.ArrowAssoc; 1 → 2 } +res6: (Int, Int) = (1,2) + +scala> 1 -> 2 +:12: error: value -> is not a member of Int + 1 -> 2 + ^ + +scala> 1 → 2 +:12: error: value → is not a member of Int + 1 → 2 + ^ + +scala> + +scala> val answer = 42 +answer: Int = 42 + +scala> { import scala.StringContext; s"answer: $answer" } +res9: String = answer: 42 + +scala> s"answer: $answer" +:13: error: not found: value StringContext + s"answer: $answer" + ^ + +scala> + +scala> "abc" + true +res11: String = abctrue + +scala> + +scala> { import scala.Predef.any2stringadd; true + "abc" } +res12: String = trueabc + +scala> true + "abc" +:12: error: value + is not a member of Boolean + true + "abc" + ^ + +scala> + +scala> var x = 10 +x: Int = 10 + +scala> var y = 11 +y: Int = 11 + +scala> x = 12 +x: Int = 12 + +scala> y = 13 +y: Int = 13 + +scala> + +scala> 2 ; 3 +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 2 ;; + ^ +res14: Int = 3 + +scala> { 2 ; 3 } +:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + { 2 ; 3 } + ^ +res15: Int = 3 + +scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def +bippy = { + 1 + + 2 + + 3 } ; bippy+88+11 +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def + ^ +defined object Cow +defined class Moo +bippy: Int +res16: Int = 105 + +scala> + +scala> object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy +defined object Bovine +defined class Ruminant +res17: Int = 216 + +scala> Bovine.x = scala.List(Ruminant(5), Cow, new Moo) +Bovine.x: List[Any] = List(Ruminant(5), Cow, Moooooo) + +scala> Bovine.x +res18: List[Any] = List(Ruminant(5), Cow, Moooooo) + +scala> + +scala> (2) +res19: Int = 2 + +scala> (2 + 2) +res20: Int = 4 + +scala> ((2 + 2)) +res21: Int = 4 + +scala> ((2 + 2)) +res22: Int = 4 + +scala> ( (2 + 2)) +res23: Int = 4 + +scala> ( (2 + 2 ) ) +res24: Int = 4 + +scala> 5 ; ( (2 + 2 ) ) ; ((5)) +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; ( (2 + 2 ) ) ;; + ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 5 ; ( (2 + 2 ) ) ;; + ^ +res25: Int = 5 + +scala> (((2 + 2)), ((2 + 2))) +res26: (Int, Int) = (4,4) + +scala> (((2 + 2)), ((2 + 2)), 2) +res27: (Int, Int, Int) = (4,4,2) + +scala> (((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString) +res28: String = 4423 + +scala> + +scala> 55 ; ((2 + 2)) ; (1, 2, 3) +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; ((2 + 2)) ;; + ^ +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; ((2 + 2)) ;; + ^ +res29: (Int, Int, Int) = (1,2,3) + +scala> 55 ; (x: scala.Int) => x + 1 ; () => ((5)) +:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ; (x: scala.Int) => x + 1 ;; + ^ +res30: () => Int = + +scala> + +scala> () => 5 +res31: () => Int = + +scala> 55 ; () => 5 +:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses + 55 ;; + ^ +res32: () => Int = + +scala> () => { class X ; new X } +res33: () => AnyRef = + +scala> + +scala> def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z +foo: (x: Int)(y: Int)(z: Int)Int + +scala> foo(5)(10)(15)+foo(5)(10)(15) +res34: Int = 60 + +scala> + +scala> scala.List(1) ++ scala.List('a') +res35: List[AnyVal] = List(1, a) + +scala> + +scala> :paste < EOF +// Entering paste mode (EOF to finish) + +class C { def c = 42 } +EOF + +// Exiting paste mode, now interpreting. + +defined class C + +scala> new C().c +res36: Int = 42 + +scala> :paste <| EOF +// Entering paste mode (EOF to finish) + +class D { def d = 42 } +EOF + +// Exiting paste mode, now interpreting. + +defined class D + +scala> new D().d +res37: Int = 42 + +scala> + +scala> :paste < EOF +// Entering paste mode (EOF to finish) + +class Dingus +{ + private val x = 5 + def y = Dingus.x * 2 +} +object Dingus +{ + private val x = 55 +} +EOF + +// Exiting paste mode, now interpreting. + +defined class Dingus +defined object Dingus + +scala> val x = (new Dingus).y +x: Int = 110 + +scala> + +scala> val x1 = 1 +x1: Int = 1 + +scala> val x2 = 2 +x2: Int = 2 + +scala> val x3 = 3 +x3: Int = 3 + +scala> case class BippyBungus() +defined class BippyBungus + +scala> x1 + x2 + x3 +res38: Int = 6 + +scala> :reset +Resetting interpreter state. +Forgetting this session history: + +1 +1.0 +() +"abc" +(1, 2) +{ import scala.Predef.ArrowAssoc; 1 -> 2 } +{ import scala.Predef.ArrowAssoc; 1 → 2 } +val answer = 42 +{ import scala.StringContext; s"answer: $answer" } +"abc" + true +{ import scala.Predef.any2stringadd; true + "abc" } +var x = 10 +var y = 11 +x = 12 +y = 13 +2 ; 3 +{ 2 ; 3 } +5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def +bippy = { + 1 + + 2 + + 3 } ; bippy+88+11 +object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy +Bovine.x = scala.List(Ruminant(5), Cow, new Moo) +Bovine.x +(2) +(2 + 2) +((2 + 2)) + ((2 + 2)) + ( (2 + 2)) + ( (2 + 2 ) ) +5 ; ( (2 + 2 ) ) ; ((5)) +(((2 + 2)), ((2 + 2))) +(((2 + 2)), ((2 + 2)), 2) +(((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString) +55 ; ((2 + 2)) ; (1, 2, 3) +55 ; (x: scala.Int) => x + 1 ; () => ((5)) +() => 5 +55 ; () => 5 +() => { class X ; new X } +def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z +foo(5)(10)(15)+foo(5)(10)(15) +scala.List(1) ++ scala.List('a') +new C().c +new D().d +val x = (new Dingus).y +val x1 = 1 +val x2 = 2 +val x3 = 3 +case class BippyBungus() +x1 + x2 + x3 + +Forgetting all expression results and named terms: $intp, BippyBungus, Bovine, Cow, Dingus, Ruminant, answer, bippy, foo, x, x1, x2, x3, y +Forgetting defined types: BippyBungus, C, D, Dingus, Moo, Ruminant + +scala> x1 + x2 + x3 +:12: error: not found: value x1 + x1 + x2 + x3 + ^ +:12: error: not found: value x2 + x1 + x2 + x3 + ^ +:12: error: not found: value x3 + x1 + x2 + x3 + ^ + +scala> val x1 = 4 +x1: Int = 4 + +scala> new BippyBungus +:12: error: not found: type BippyBungus + new BippyBungus + ^ + +scala> class BippyBungus() { def f = 5 } +defined class BippyBungus + +scala> { new BippyBungus ; x1 } +res2: Int = 4 + +scala> :quit diff --git a/test/files/run/repl-no-imports-no-predef.scala b/test/files/run/repl-no-imports-no-predef.scala new file mode 100644 index 000000000000..39f43c534dc6 --- /dev/null +++ b/test/files/run/repl-no-imports-no-predef.scala @@ -0,0 +1,108 @@ +object Test extends scala.tools.partest.ReplTest { + + override def transformSettings(settings: scala.tools.nsc.Settings) = { + settings.noimports.value = true + settings.nopredef.value = true + settings + } + + // replace indylambda function names by + override def normalize(s: String) = """\$\$Lambda.*""".r.replaceAllIn(s, "") + + def code = """ +1 +1.0 +() +"abc" +(1, 2) + +{ import scala.Predef.ArrowAssoc; 1 -> 2 } +{ import scala.Predef.ArrowAssoc; 1 → 2 } +1 -> 2 +1 → 2 + +val answer = 42 +{ import scala.StringContext; s"answer: $answer" } +s"answer: $answer" + +"abc" + true + +{ import scala.Predef.any2stringadd; true + "abc" } +true + "abc" + +var x = 10 +var y = 11 +x = 12 +y = 13 + +2 ; 3 +{ 2 ; 3 } +5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def +bippy = { + 1 + + 2 + + 3 } ; bippy+88+11 + +object Bovine { var x: scala.List[_] = null } ; case class Ruminant(x: scala.Int) ; bippy * bippy * bippy +Bovine.x = scala.List(Ruminant(5), Cow, new Moo) +Bovine.x + +(2) +(2 + 2) +((2 + 2)) + ((2 + 2)) + ( (2 + 2)) + ( (2 + 2 ) ) +5 ; ( (2 + 2 ) ) ; ((5)) +(((2 + 2)), ((2 + 2))) +(((2 + 2)), ((2 + 2)), 2) +(((((2 + 2)), ((2 + 2)), 2).productIterator ++ scala.Iterator(3)).mkString) + +55 ; ((2 + 2)) ; (1, 2, 3) +55 ; (x: scala.Int) => x + 1 ; () => ((5)) + +() => 5 +55 ; () => 5 +() => { class X ; new X } + +def foo(x: scala.Int)(y: scala.Int)(z: scala.Int) = x+y+z +foo(5)(10)(15)+foo(5)(10)(15) + +scala.List(1) ++ scala.List('a') + +:paste < EOF +class C { def c = 42 } +EOF +new C().c +:paste <| EOF +class D { def d = 42 } +EOF +new D().d + +:paste < EOF +class Dingus +{ + private val x = 5 + def y = Dingus.x * 2 +} +object Dingus +{ + private val x = 55 +} +EOF +val x = (new Dingus).y + +val x1 = 1 +val x2 = 2 +val x3 = 3 +case class BippyBungus() +x1 + x2 + x3 +:reset +x1 + x2 + x3 +val x1 = 4 +new BippyBungus +class BippyBungus() { def f = 5 } +{ new BippyBungus ; x1 } + +""" +} diff --git a/test/files/run/repl-parens.scala b/test/files/run/repl-parens.scala index 43e642a806e4..613bb6f6afb2 100644 --- a/test/files/run/repl-parens.scala +++ b/test/files/run/repl-parens.scala @@ -1,6 +1,9 @@ import scala.tools.partest.ReplTest object Test extends ReplTest { + // replace indylambda function names by + override def normalize(s: String) = """\$\$Lambda.*""".r.replaceAllIn(s, "") + def code = """ (2) (2 + 2) @@ -26,11 +29,4 @@ foo(5)(10)(15)+foo(5)(10)(15) List(1) ++ List('a') """.trim - - // replace indylambda function names by - override def eval() = { - val lines = super.eval - val r = """\$\$Lambda.*""".r - lines.map(l => r.replaceAllIn(l, "")) - } } diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index d698ea668d5a..c5e92e9d796d 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -246,12 +246,12 @@ scala> case class Bingo() defined class Bingo scala> List(BippyBups(), PuppyPups(), Bingo()) // show -class $read extends Serializable { +class $read extends _root_.java.io.Serializable { def () = { super.; () }; - class $iw extends Serializable { + class $iw extends _root_.java.io.Serializable { def () = { super.; () @@ -262,7 +262,7 @@ class $read extends Serializable { import $line45.$read.INSTANCE.$iw.$iw.PuppyPups; import $line46.$read.INSTANCE.$iw.$iw.Bingo; import $line46.$read.INSTANCE.$iw.$iw.Bingo; - class $iw extends Serializable { + class $iw extends _root_.java.io.Serializable { def () = { super.; () diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala index 9b2d1c40be01..c6a7e419aa14 100644 --- a/test/files/run/t7747-repl.scala +++ b/test/files/run/t7747-repl.scala @@ -9,11 +9,7 @@ object Test extends ReplTest { } // replace indylambda function names by - override def eval() = { - val lines = super.eval - val r = """\$Lambda.*""".r - lines.map(l => r.replaceAllIn(l, "")) - } + override def normalize(s: String) = """\$Lambda.*""".r.replaceAllIn(s, "") def code = """ |var x = 10 diff --git a/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala b/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala index a8dc8eb3e071..01d17110d698 100644 --- a/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/ScriptedTest.scala @@ -13,6 +13,14 @@ class ScriptedTest { // same as by service discovery //new ScriptEngineManager().getEngineByName("scala").asInstanceOf[ScriptEngine with Compilable] + // scripted, but also -Yno-predef -Yno-imports + def scriptedNoNothing: ScriptEngine with Compilable = { + val settings = new Settings() + settings.noimports.value = true + settings.nopredef.value = true + Scripted(settings = settings) + } + @Test def eval() = { val engine = scripted engine.put("foo","bar") @@ -24,6 +32,17 @@ class ScriptedTest { assert("barbar" == c.eval()) assert("bazbaz" == c.eval(bindings)) } + @Test def evalNoNothing() = { + val engine = scriptedNoNothing + engine.put("foo","bar") + assert("bar" == engine.eval("foo")) + val bindings = engine.createBindings() + bindings.put("foo","baz") + assert("baz" == engine.eval("foo", bindings)) + val c = engine.compile("import scala.Predef.augmentString ; def f = foo.asInstanceOf[java.lang.String] ; f * 2") + assert("barbar" == c.eval()) + assert("bazbaz" == c.eval(bindings)) + } @Test def `SI-7933 multiple eval compiled script`() = { val engine = scripted val init = """val i = new java.util.concurrent.atomic.AtomicInteger""" From 0b79f4bf586268947b3e72750413e7106c4ad46e Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 26 May 2016 13:00:50 -0700 Subject: [PATCH 0088/2793] SI-9382 Zippy clean-up in aisle 2 & 3 Consolated JUnit tests and heeded comment about private def and code beauty. --- src/library/scala/runtime/Tuple2Zipped.scala | 39 +++++------ src/library/scala/runtime/Tuple3Zipped.scala | 46 +++++++------ test/files/run/tuple-zipped.scala | 41 ----------- .../collection/immutable/StreamTest.scala | 16 ----- test/junit/scala/runtime/ZippedTest.scala | 68 +++++++++++++++++++ 5 files changed, 113 insertions(+), 97 deletions(-) delete mode 100644 test/files/run/tuple-zipped.scala create mode 100644 test/junit/scala/runtime/ZippedTest.scala diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index 41ed9644aa28..52dd1da09e82 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -1,6 +1,6 @@ /* __ *\ ** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** +** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL ** ** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** ** /____/\___/_/ |_/____/_/ | | ** ** |/ ** @@ -34,14 +34,15 @@ object ZippedTraversable2 { } final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2])) extends AnyVal with ZippedTraversable2[El1, El2] { - // This would be better as "private def coll1 = colls._1" but - // SI-6215 precludes private methods in value classes. + private def coll1 = colls._1 + private def coll2 = colls._2 + def map[B, To](f: (El1, El2) => B)(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(colls._1.repr) - b.sizeHint(colls._1) - val elems2 = colls._2.iterator + val b = cbf(coll1.repr) + b.sizeHint(coll1) + val elems2 = coll2.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext) b += f(el1, elems2.next()) else @@ -52,10 +53,10 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 } def flatMap[B, To](f: (El1, El2) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(colls._1.repr) - val elems2 = colls._2.iterator + val b = cbf(coll1.repr) + val elems2 = coll2.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext) b ++= f(el1, elems2.next()) else @@ -66,11 +67,11 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 } def filter[To1, To2](f: (El1, El2) => Boolean)(implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2]): (To1, To2) = { - val b1 = cbf1(colls._1.repr) - val b2 = cbf2(colls._2.repr) - val elems2 = colls._2.iterator + val b1 = cbf1(coll1.repr) + val b2 = cbf2(coll2.repr) + val elems2 = coll2.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext) { val el2 = elems2.next() if (f(el1, el2)) { @@ -85,9 +86,9 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 } def exists(@deprecatedName('f) p: (El1, El2) => Boolean): Boolean = { - val elems2 = colls._2.iterator + val elems2 = coll2.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext) { if (p(el1, elems2.next())) return true @@ -101,9 +102,9 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 !exists((x, y) => !p(x, y)) def foreach[U](f: (El1, El2) => U): Unit = { - val elems2 = colls._2.iterator + val elems2 = coll2.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext) f(el1, elems2.next()) else @@ -111,7 +112,7 @@ final class Tuple2Zipped[El1, Repr1, El2, Repr2](val colls: (TraversableLike[El1 } } - override def toString = "(%s, %s).zipped".format(colls._1.toString, colls._2.toString) + override def toString = s"($coll1, $coll2).zipped" } object Tuple2Zipped { diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index 89f401ea8000..a4a86f8e55b0 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -34,12 +34,16 @@ object ZippedTraversable3 { final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (TraversableLike[El1, Repr1], IterableLike[El2, Repr2], IterableLike[El3, Repr3])) extends AnyVal with ZippedTraversable3[El1, El2, El3] { + private def coll1 = colls._1 + private def coll2 = colls._2 + private def coll3 = colls._3 + def map[B, To](f: (El1, El2, El3) => B)(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(colls._1.repr) - val elems2 = colls._2.iterator - val elems3 = colls._3.iterator + val b = cbf(coll1.repr) + val elems2 = coll2.iterator + val elems3 = coll3.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext && elems3.hasNext) b += f(el1, elems2.next(), elems3.next()) else @@ -49,11 +53,11 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers } def flatMap[B, To](f: (El1, El2, El3) => TraversableOnce[B])(implicit cbf: CBF[Repr1, B, To]): To = { - val b = cbf(colls._1.repr) - val elems2 = colls._2.iterator - val elems3 = colls._3.iterator + val b = cbf(coll1.repr) + val elems2 = coll2.iterator + val elems3 = coll3.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext && elems3.hasNext) b ++= f(el1, elems2.next(), elems3.next()) else @@ -66,14 +70,14 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers implicit cbf1: CBF[Repr1, El1, To1], cbf2: CBF[Repr2, El2, To2], cbf3: CBF[Repr3, El3, To3]): (To1, To2, To3) = { - val b1 = cbf1(colls._1.repr) - val b2 = cbf2(colls._2.repr) - val b3 = cbf3(colls._3.repr) - val elems2 = colls._2.iterator - val elems3 = colls._3.iterator + val b1 = cbf1(coll1.repr) + val b2 = cbf2(coll2.repr) + val b3 = cbf3(coll3.repr) + val elems2 = coll2.iterator + val elems3 = coll3.iterator def result = (b1.result(), b2.result(), b3.result()) - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext && elems3.hasNext) { val el2 = elems2.next() val el3 = elems3.next() @@ -91,10 +95,10 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers } def exists(@deprecatedName('f) p: (El1, El2, El3) => Boolean): Boolean = { - val elems2 = colls._2.iterator - val elems3 = colls._3.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext && elems3.hasNext) { if (p(el1, elems2.next(), elems3.next())) return true @@ -108,10 +112,10 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers !exists((x, y, z) => !p(x, y, z)) def foreach[U](f: (El1, El2, El3) => U): Unit = { - val elems2 = colls._2.iterator - val elems3 = colls._3.iterator + val elems2 = coll2.iterator + val elems3 = coll3.iterator - for (el1 <- colls._1) { + for (el1 <- coll1) { if (elems2.hasNext && elems3.hasNext) f(el1, elems2.next(), elems3.next()) else @@ -119,7 +123,7 @@ final class Tuple3Zipped[El1, Repr1, El2, Repr2, El3, Repr3](val colls: (Travers } } - override def toString: String = "(%s, %s, %s).zipped".format(colls._1.toString, colls._2.toString, colls._3.toString) + override def toString = s"($coll1, $coll2, $coll3).zipped" } object Tuple3Zipped { diff --git a/test/files/run/tuple-zipped.scala b/test/files/run/tuple-zipped.scala deleted file mode 100644 index 37ac52977f35..000000000000 --- a/test/files/run/tuple-zipped.scala +++ /dev/null @@ -1,41 +0,0 @@ - -import scala.language.postfixOps - -object Test { - val xs1 = List.range(1, 100) - val xs2 = xs1.view - val xs3 = xs1 take 10 - val ss1 = Stream from 1 - val ss2 = ss1.view - val ss3 = ss1 take 10 - val as1 = 1 to 100 toArray - val as2 = as1.view - val as3 = as1 take 10 - - def xss1 = List[Seq[Int]](xs1, xs2, xs3, ss1, ss2, ss3, as1, as2, as3) - def xss2 = List[Seq[Int]](xs1, xs2, xs3, ss3, as1, as2, as3) // no infinities - def xss3 = List[Seq[Int]](xs2, xs3, ss3, as1) // representative sampling - - def main(args: Array[String]): Unit = { - for (cc1 <- xss1 ; cc2 <- xss2) { - val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum - val sum2 = (cc1, cc2).zipped map (_ + _) sum - - assert(sum1 == sum2) - } - - for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) { - val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum - val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum - - assert(sum1 == sum2) - } - - assert((ss1, ss1).zipped exists ((x, y) => true)) - assert((ss1, ss1, ss1).zipped exists ((x, y, z) => true)) - - assert(!(ss1, ss2, 1 to 3).zipped.exists(_ + _ + _ > 100000)) - assert((1 to 3, ss1, ss2).zipped.forall(_ + _ + _ > 0)) - assert((ss1, 1 to 3, ss2).zipped.map(_ + _ + _).size == 3) - } -} diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala index 1b257aabc452..fad4e502eba1 100644 --- a/test/junit/scala/collection/immutable/StreamTest.scala +++ b/test/junit/scala/collection/immutable/StreamTest.scala @@ -107,20 +107,4 @@ class StreamTest { def withFilter_map_properly_lazy_in_tail: Unit = { assertStreamOpLazyInTail(_.withFilter(_ % 2 == 0).map(identity), List(1, 2)) } - - @Test - def test_si9379() { - class Boom { - private var i = -1 - def inc = { - i += 1 - if (i > 1000) throw new NoSuchElementException("Boom! Too many elements!") - i - } - } - val b = new Boom - val s = Stream.continually(b.inc) - // zipped.toString must allow s to short-circuit evaluation - assertTrue((s, s).zipped.toString contains s.toString) - } } diff --git a/test/junit/scala/runtime/ZippedTest.scala b/test/junit/scala/runtime/ZippedTest.scala new file mode 100644 index 000000000000..d3ce4945aa9f --- /dev/null +++ b/test/junit/scala/runtime/ZippedTest.scala @@ -0,0 +1,68 @@ + +package scala.runtime + +import scala.language.postfixOps + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +/** Tests Tuple?Zipped */ +@RunWith(classOf[JUnit4]) +class ZippedTest { + @Test + def crossZipped() { + + val xs1 = List.range(1, 100) + val xs2 = xs1.view + val xs3 = xs1 take 10 + val ss1 = Stream from 1 + val ss2 = ss1.view + val ss3 = ss1 take 10 + val as1 = 1 to 100 toArray + val as2 = as1.view + val as3 = as1 take 10 + + def xss1 = List[Seq[Int]](xs1, xs2, xs3, ss1, ss2, ss3, as1, as2, as3) + def xss2 = List[Seq[Int]](xs1, xs2, xs3, ss3, as1, as2, as3) // no infinities + def xss3 = List[Seq[Int]](xs2, xs3, ss3, as1) // representative sampling + + for (cc1 <- xss1 ; cc2 <- xss2) { + val sum1 = (cc1, cc2).zipped map { case (x, y) => x + y } sum + val sum2 = (cc1, cc2).zipped map (_ + _) sum + + assert(sum1 == sum2) + } + + for (cc1 <- xss1 ; cc2 <- xss2 ; cc3 <- xss3) { + val sum1 = (cc1, cc2, cc3).zipped map { case (x, y, z) => x + y + z } sum + val sum2 = (cc1, cc2, cc3).zipped map (_ + _ + _) sum + + assert(sum1 == sum2) + } + + assert((ss1, ss1).zipped exists ((x, y) => true)) + assert((ss1, ss1, ss1).zipped exists ((x, y, z) => true)) + + assert(!(ss1, ss2, 1 to 3).zipped.exists(_ + _ + _ > 100000)) + assert((1 to 3, ss1, ss2).zipped.forall(_ + _ + _ > 0)) + assert((ss1, 1 to 3, ss2).zipped.map(_ + _ + _).size == 3) + } + + @Test + def test_si9379() { + class Boom { + private var i = -1 + def inc = { + i += 1 + if (i > 1000) throw new NoSuchElementException("Boom! Too many elements!") + i + } + } + val b = new Boom + val s = Stream.continually(b.inc) + // zipped.toString must allow s to short-circuit evaluation + assertTrue((s, s).zipped.toString contains s.toString) + } +} From 100d6374c282d94497ee9f0d4a206d427951c74c Mon Sep 17 00:00:00 2001 From: Performant Data LLC Date: Sun, 22 May 2016 09:40:54 -0700 Subject: [PATCH 0089/2793] SI-9789 use quadratic probing in OpenHashMap The original probe sequence, taken from Python's hash table code, is exponential, jumping around in the hash table with poor memory locality. This replaces the probe algorithm with the more conventional quadratic probing. This also adds tests to the benchmarking code using AnyRef keys, which have pseudorandom hash codes (unlike Ints, whose hash code is simply the Int itself). The intensity of the benchmarking is reduced to make the tests complete within 9 hours, by removing unnecessary sampling. --- .../collection/mutable/OpenHashMap.scala | 18 +- .../src/main/scala/benchmark/KeySeq.scala | 24 ++ .../main/scala/benchmark/KeySeqBuilder.scala | 33 +++ .../mutable/OpenHashMapBenchmark.scala | 216 +++++++++++++----- .../mutable/OpenHashMapRunner.scala | 60 ++--- 5 files changed, 257 insertions(+), 94 deletions(-) create mode 100644 test/benchmarks/src/main/scala/benchmark/KeySeq.scala create mode 100644 test/benchmarks/src/main/scala/benchmark/KeySeqBuilder.scala diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index 5f8f5b9a0a32..c86357efadac 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -108,16 +108,13 @@ extends AbstractMap[Key, Value] * @param hash hash value for `key` */ private[this] def findIndex(key: Key, hash: Int): Int = { - var j = hash - var index = hash & mask - var perturb = index + var j = 0 while(table(index) != null && !(table(index).hash == hash && table(index).key == key)){ - j = 5 * j + 1 + perturb - perturb >>= 5 - index = j & mask + j += 1 + index = (index + j) & mask } index } @@ -172,20 +169,17 @@ extends AbstractMap[Key, Value] def get(key : Key) : Option[Value] = { val hash = hashOf(key) - - var j = hash var index = hash & mask - var perturb = index var entry = table(index) + var j = 0 while(entry != null){ if (entry.hash == hash && entry.key == key){ return entry.value } - j = 5 * j + 1 + perturb - perturb >>= 5 - index = j & mask + j += 1 + index = (index + j) & mask entry = table(index) } None diff --git a/test/benchmarks/src/main/scala/benchmark/KeySeq.scala b/test/benchmarks/src/main/scala/benchmark/KeySeq.scala new file mode 100644 index 000000000000..126b92b3b662 --- /dev/null +++ b/test/benchmarks/src/main/scala/benchmark/KeySeq.scala @@ -0,0 +1,24 @@ +package benchmark + +/** A sequence of keys. + * + * Tests of maps and sets require a sequence of keys that can be used + * to add entries and possibly to find them again. + * This type provides such a sequence. + * + * Note that this needn't be a "sequence" in the full sense of [[collection.Seq]], + * particularly in that it needn't extend [[PartialFunction]]. + * + * @tparam K the type of the keys + */ +trait KeySeq[K] { + /** Selects a key by its index in the sequence. + * Repeated calls with the same index return the same key (by reference equality). + * + * @param idx The index to select. Should be non-negative and less than `size`. + */ + def apply(idx: Int): K + + /** The size of this sequence. */ + def size: Int +} diff --git a/test/benchmarks/src/main/scala/benchmark/KeySeqBuilder.scala b/test/benchmarks/src/main/scala/benchmark/KeySeqBuilder.scala new file mode 100644 index 000000000000..95f6c7afd744 --- /dev/null +++ b/test/benchmarks/src/main/scala/benchmark/KeySeqBuilder.scala @@ -0,0 +1,33 @@ +package benchmark + +/** Builder of a [[KeySeq]] + * + * @tparam K the type of the keys + */ +trait KeySeqBuilder[K] { + /** Return a [[KeySeq]] having at least the given size. */ + def build(size: Int): KeySeq[K] +} + +object KeySeqBuilder { + /** Builder of a sequence of `Int` keys. + * Simply maps the sequence index to itself. + */ + implicit object IntKeySeqBuilder extends KeySeqBuilder[Int] { + def build(_size: Int) = new KeySeq[Int] { + def apply(idx: Int) = idx + def size = _size + } + } + + /** Builder of a sequence of `AnyRef` keys. */ + implicit object AnyRefKeySeqBuilder extends KeySeqBuilder[AnyRef] { + def build(_size: Int) = new KeySeq[AnyRef] { + private[this] val arr = new Array[AnyRef](size) + for (i <- 0 until size) arr(i) = new AnyRef() + + def apply(idx: Int) = arr(idx) + def size = _size + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala index 26e26b30654a..64e2244499a7 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapBenchmark.scala @@ -1,14 +1,12 @@ package scala.collection.mutable; -import java.util.concurrent.TimeUnit import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra.Blackhole -import org.openjdk.jmh.infra.BenchmarkParams -import org.openjdk.jol.info.GraphLayout -import org.openjdk.jol.info.GraphWalker -import org.openjdk.jol.info.GraphVisitor -import org.openjdk.jmh.infra.IterationParams +import org.openjdk.jmh.infra._ import org.openjdk.jmh.runner.IterationType +import org.openjdk.jol.info.GraphLayout + +import benchmark._ +import java.util.concurrent.TimeUnit /** Utilities for the [[OpenHashMapBenchmark]]. * @@ -16,7 +14,8 @@ import org.openjdk.jmh.runner.IterationType * instead of using the JMH harness, which iterates for a fixed length of time. */ private object OpenHashMapBenchmark { - /** State container for the `put()` bulk calling tests. + + /** Abstract state container for the `put()` bulk calling tests. * * Provides an array of adequately-sized, empty maps to each invocation, * so that hash table allocation won't be done during measurement. @@ -25,10 +24,11 @@ private object OpenHashMapBenchmark { * so that only the GCs caused by the invocation contribute to the measurement. * * Records the memory used by all the maps in the last invocation of each iteration. + * + * @tparam K type of the map keys to be used in the test */ @State(Scope.Thread) - @AuxCounters - class BulkPutState { + private[this] abstract class BulkPutState[K](implicit keyBuilder: KeySeqBuilder[K]) { /** A lower-bound estimate of the number of nanoseconds per `put()` call */ private[this] val nanosPerPut: Double = 5 @@ -39,35 +39,43 @@ private object OpenHashMapBenchmark { private[this] var size: Int = _ /** Total number of entries in all of the `maps` combined. */ - var mapEntries: Int = _ + private[this] var _mapEntries: Int = _ + protected def mapEntries = _mapEntries /** Number of operations performed in the current invocation. */ - var operations: Int = _ + private[this] var _operations: Int = _ + protected def operations = _operations /** Bytes of memory used in the object graphs of all the maps. */ - var memory: Long = _ + private[this] var _memory: Long = _ + protected def memory = _memory + + /** The sequence of keys to store into a map. */ + private[this] var _keys: KeySeq[K] = _ + def keys() = _keys - var maps: Array[OpenHashMap[Int,Int]] = null + var maps: Array[OpenHashMap[K,Int]] = null @Setup def threadSetup(params: BenchmarkParams) { size = params.getParam("size").toInt val n = math.ceil(minNanosPerInvocation / (nanosPerPut * size)).toInt - mapEntries = size * n + _mapEntries = size * n + _keys = keyBuilder.build(size) maps = new Array(n) } @Setup(Level.Iteration) def iterationSetup { - operations = 0 + _operations = 0 } @Setup(Level.Invocation) def setup(params: IterationParams) { - for (i <- 0 until maps.length) maps(i) = new OpenHashMap[Int,Int](size) + for (i <- 0 until maps.length) maps(i) = new OpenHashMap[K,Int](size) if (params.getType == IterationType.MEASUREMENT) { - operations += mapEntries + _operations += _mapEntries System.gc() // clean up after last invocation } } @@ -76,72 +84,124 @@ private object OpenHashMapBenchmark { def iterationTeardown(params: IterationParams) { if (params.getType == IterationType.MEASUREMENT) { // limit to smaller cases to avoid OOM - memory = if (mapEntries <= 1000000) GraphLayout.parseInstance(maps(0), maps.tail).totalSize else 0 + _memory = + if (_mapEntries <= 1000000) GraphLayout.parseInstance(maps(0), maps.tail).totalSize + else 0 } } } - /** State container for the `get()` bulk calling tests. + /** Abstract state container for the `get()` bulk calling tests. * * Provides a thread-scoped map of the expected size. * Performs a GC after loading the map. + * + * @tparam K type of the map keys to be used in the test */ @State(Scope.Thread) - class BulkGetState { - val map = new OpenHashMap[Int,Int].empty + private[this] abstract class BulkGetState[K](implicit keyBuilder: KeySeqBuilder[K]) { + /** The sequence of keys to store into a map. */ + private[this] var _keys: KeySeq[K] = _ + def keys() = _keys + + val map = new OpenHashMap[K,Int].empty /** Load the map with keys from `1` to `size`. */ @Setup def setup(params: BenchmarkParams) { val size = params.getParam("size").toInt - put_Int(map, 1, size) + _keys = keyBuilder.build(size) + put(map, keys, 0, size) System.gc() } } - /** State container for the `get()` bulk calling tests with deleted entries. + /** Abstract state container for the `get()` bulk calling tests with deleted entries. * * Provides a thread-scoped map of the expected size, from which entries have been removed. * Performs a GC after loading the map. + * + * @tparam K type of the map keys to be used in the test */ @State(Scope.Thread) - class BulkRemovedGetState { - val map = new OpenHashMap[Int,Int].empty + private[this] abstract class BulkRemovedGetState[K](implicit keyBuilder: KeySeqBuilder[K]) { + /** The sequence of keys to store into a map. */ + private[this] var _keys: KeySeq[K] = _ + def keys() = _keys + + val map = new OpenHashMap[K,Int].empty /** Load the map with keys from `1` to `size`, removing half of them. */ @Setup def setup(params: BenchmarkParams) { val size = params.getParam("size").toInt - put_remove_Int(map, size) + _keys = keyBuilder.build(size) + put_remove(map, keys) System.gc() } } - /** Put elements into the given map. */ - private def put_Int(map: OpenHashMap[Int,Int], from: Int, to: Int) { + /* In order to use `@AuxCounters` on a class hierarchy (as of JMH 1.11.3), + * it's necessary to place it on the injected (sub)class, and to make the + * counters visible as explicit public members of the that class. JMH doesn't + * scan the ancestor classes for counters. + */ + + @AuxCounters + private class IntBulkPutState extends BulkPutState[Int] { + override def mapEntries = super.mapEntries + override def operations = super.operations + override def memory = super.memory + } + private class IntBulkGetState extends BulkGetState[Int] + private class IntBulkRemovedGetState extends BulkRemovedGetState[Int] + + @AuxCounters + private class AnyRefBulkPutState extends BulkPutState[AnyRef] { + override def mapEntries = super.mapEntries + override def operations = super.operations + override def memory = super.memory + } + private class AnyRefBulkGetState extends BulkGetState[AnyRef] + private class AnyRefBulkRemovedGetState extends BulkRemovedGetState[AnyRef] + + + /** Put entries into the given map. + * Adds entries using a range of keys from the given list. + * + * @param from lowest index in the range of keys to add + * @param to highest index in the range of keys to add, plus one + */ + private[this] def put[K](map: OpenHashMap[K,Int], keys: KeySeq[K], from: Int, to: Int) { var i = from - while (i <= to) { // using a `for` expression instead adds significant overhead - map.put(i, i) + while (i < to) { // using a `for` expression instead adds significant overhead + map.put(keys(i), i) i += 1 } } - /** Put elements into the given map, removing half of them as they're added. + /** Put entries into the given map. + * Adds entries using all of the keys from the given list. + */ + private def put[K](map: OpenHashMap[K,Int], keys: KeySeq[K]): Unit = + put(map, keys, 0, keys.size) + + /** Put entries into the given map, removing half of them as they're added. * - * @param size number of entries to leave in the map on return + * @param keys list of keys to use */ - def put_remove_Int(map: OpenHashMap[Int,Int], size: Int) { - val blocks = 50 // should be a factor of `size` - val totalPuts = 2 * size // add twice as many, because we remove half of them - val blockSize: Int = totalPuts / blocks + private def put_remove[K](map: OpenHashMap[K,Int], keys: KeySeq[K]) { + val blocks = 25 // should be a non-trivial factor of `size` + val size = keys.size + val blockSize: Int = size / blocks var base = 0 - while (base < totalPuts) { - put_Int(map, base + 1, base + blockSize) + while (base < size) { + put(map, keys, base, base + blockSize) // remove every other entry - var i = base + 1 - while (i <= base + blockSize) { - map.remove(i) + var i = base + while (i < base + blockSize) { + map.remove(keys(i)) i += 2 } @@ -150,55 +210,99 @@ private object OpenHashMapBenchmark { } /** Get elements from the given map. */ - def get_Int(map: OpenHashMap[Int,Int], size: Int, bh: Blackhole) { - var i = 1 - while (i <= size) { - bh.consume(map.get(i).getOrElse(0)) + private def get[K](map: OpenHashMap[K,Int], keys: KeySeq[K]) = { + val size = keys.size + var i = 0 + var sum = 0 + while (i < size) { + sum += map.get(keys(i)).getOrElse(0) i += 1 } + sum } } /** Benchmark for the library's [[OpenHashMap]]. */ @BenchmarkMode(Array(Mode.AverageTime)) -@Fork(6) +@Fork(5) @Threads(1) @Warmup(iterations = 20) -@Measurement(iterations = 6) +@Measurement(iterations = 5) @OutputTimeUnit(TimeUnit.NANOSECONDS) @State(Scope.Benchmark) class OpenHashMapBenchmark { import OpenHashMapBenchmark._ - @Param(Array("25", "50", "100", "250", "1000", "2500", "10000", "25000", "100000", "250000", "1000000", "2500000", + @Param(Array("50", "100", "1000", "10000", "100000", "1000000", "2500000", "5000000", "7500000", "10000000", "25000000")) var size: Int = _ + // Tests with Int keys + /** Test putting elements to a map of `Int` to `Int`. */ @Benchmark - def put_Int(state: BulkPutState) { + def put_Int(state: IntBulkPutState) { var i = 0 while (i < state.maps.length) { - OpenHashMapBenchmark.put_Int(state.maps(i), 1, size) + put(state.maps(i), state.keys) i += 1 } } /** Test putting and removing elements to a growing map of `Int` to `Int`. */ @Benchmark - def put_remove_Int(state: BulkPutState) { + def put_remove_Int(state: IntBulkPutState) { var i = 0 while (i < state.maps.length) { - OpenHashMapBenchmark.put_remove_Int(state.maps(i), size) + put_remove(state.maps(i), state.keys) i += 1 } } /** Test getting elements from a map of `Int` to `Int`. */ @Benchmark - def put_get_Int(state: BulkGetState, bh: Blackhole) = OpenHashMapBenchmark.get_Int(state.map, size, bh) + def get_Int_after_put(state: IntBulkGetState) = + get(state.map, state.keys) + + /** Test getting elements from a map of `Int` to `Int` from which elements have been removed. + * Note that half of these queries will fail to find their keys, which have been removed. + */ + @Benchmark + def get_Int_after_put_remove(state: IntBulkRemovedGetState) = + get(state.map, state.keys) + + + // Tests with AnyRef keys + + /** Test putting elements to a map of `AnyRef` to `Int`. */ + @Benchmark + def put_AnyRef(state: AnyRefBulkPutState) { + var i = 0 + while (i < state.maps.length) { + put(state.maps(i), state.keys) + i += 1 + } + } + + /** Test putting and removing elements to a growing map of `AnyRef` to `Int`. */ + @Benchmark + def put_remove_AnyRef(state: AnyRefBulkPutState) { + var i = 0 + while (i < state.maps.length) { + put_remove(state.maps(i), state.keys) + i += 1 + } + } + + /** Test getting elements from a map of `AnyRef` to `Int`. */ + @Benchmark + def get_AnyRef_after_put(state: AnyRefBulkGetState) = + get(state.map, state.keys) - /** Test getting elements from a map of `Int` to `Int` from which elements have been removed. */ + /** Test getting elements from a map of `AnyRef` to `Int` from which elements have been removed. + * Note that half of these queries will fail to find their keys, which have been removed. + */ @Benchmark - def put_remove_get_Int(state: BulkRemovedGetState, bh: Blackhole) = OpenHashMapBenchmark.get_Int(state.map, size, bh) + def get_AnyRef_after_put_remove(state: AnyRefBulkRemovedGetState) = + get(state.map, state.keys) } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala index 1a58b18ee979..b14b733a8128 100644 --- a/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/OpenHashMapRunner.scala @@ -1,20 +1,18 @@ package scala.collection.mutable -import java.io.BufferedWriter import java.io.File -import java.io.FileOutputStream -import java.io.OutputStreamWriter import java.io.PrintWriter -import scala.collection.JavaConversions + import scala.language.existentials + +import org.openjdk.jmh.results.Result import org.openjdk.jmh.results.RunResult import org.openjdk.jmh.runner.Runner import org.openjdk.jmh.runner.options.CommandLineOptions -import org.openjdk.jmh.runner.options.Options -import benchmark.JmhRunner import org.openjdk.jmh.runner.options.OptionsBuilder import org.openjdk.jmh.runner.options.VerboseMode -import org.openjdk.jmh.results.Result + +import benchmark.JmhRunner /** Replacement JMH application that runs the [[OpenHashMap]] benchmark. * @@ -27,6 +25,7 @@ object OpenHashMapRunner extends JmhRunner { /** Qualifier to add to the name of a memory usage data set. */ private[this] val memoryDatasetQualifier = "-memory" + /** Adapter to the JMH result class that simplifies our method calls. */ private[this] implicit class MyRunResult(r: RunResult) { /** Return the dataset label. */ def label = r.getPrimaryResult.getLabel @@ -34,13 +33,13 @@ object OpenHashMapRunner extends JmhRunner { /** Return the value of the JMH parameter for the number of map entries per invocation. */ def size: String = r.getParams.getParam("size") - /** Return the operation counts. */ + /** Return the operation counts. Not every test tracks this. */ def operations = Option(r.getSecondaryResults.get("operations")) /** Return the number of map entries. */ def entries = r.getSecondaryResults.get("mapEntries") - /** Return the memory usage. */ + /** Return the memory usage. Only defined if memory usage was measured. */ def memory = Option(r.getSecondaryResults.get("memory")) } @@ -50,7 +49,6 @@ object OpenHashMapRunner extends JmhRunner { def main(args: Array[String]) { import scala.collection.JavaConversions._ - import scala.language.existentials val opts = new CommandLineOptions(args: _*) var builder = new OptionsBuilder().parent(opts).jvmArgsPrepend("-Xmx6000m") @@ -58,7 +56,12 @@ object OpenHashMapRunner extends JmhRunner { val results = new Runner(builder.build).run() - // Sort the results + /* Sort the JMH results into "data sets", each representing a complete test of one feature. + * Some results only measure CPU performance; while others also measure memory usage, and + * thus are split into two data sets. A data set is distinguished by its label, which is + * the label of the JMH result, for CPU performance, or that with an added suffix, for memory + * usage. + */ /** Map from data set name to data set. */ val datasetByName = Map.empty[String, Set[RunResult]] @@ -83,23 +86,28 @@ object OpenHashMapRunner extends JmhRunner { val f = new PrintWriter(outputFile, "UTF-8") try { - datasetByName.foreach(_ match { case (label: String, dataset: Iterable[RunResult]) => { - f.println(s"# [$label]") - - val isMemoryUsageDataset = label.endsWith(memoryDatasetQualifier) - dataset.foreach { r => - f.println(r.size + " " + ( - if (isMemoryUsageDataset) - stats(r.entries) + " " + stats(r.memory.get) - else - stats(r.operations getOrElse r.getPrimaryResult) - )) - } - - f.println(); f.println() // data set separator - }}) + datasetByName.foreach(_ match { + case (label: String, dataset: Iterable[RunResult]) => + outputDataset(f, label, dataset) + }) } finally { f.close() } } + + private[this] def outputDataset(f: PrintWriter, label: String, dataset: Iterable[RunResult]) { + f.println(s"# [$label]") + + val isMemoryUsageDataset = label.endsWith(memoryDatasetQualifier) + dataset.foreach { r => + f.println(r.size + " " + ( + if (isMemoryUsageDataset && !r.memory.get.getScore.isInfinite) + stats(r.entries) + " " + stats(r.memory.get) + else + stats(r.operations getOrElse r.getPrimaryResult) + )) + } + + f.println(); f.println() // data set separator + } } From fd6386a51e9a5b97a83198a310923cd012e1aab0 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 26 May 2016 21:53:19 -0700 Subject: [PATCH 0090/2793] SI-9794 Error advice uses decoded method name So much work went into polishing this error message, it's worth buffing the method name when it's an operator. The message now says `+` instead of `$plus`. --- .../scala/tools/nsc/typechecker/ContextErrors.scala | 2 +- test/files/neg/missing-arg-list.check | 7 ++++++- test/files/neg/missing-arg-list.scala | 3 +++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 90ccaefe432a..d519948a11f0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -632,7 +632,7 @@ trait ContextErrors { //adapt def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = { - val f = meth.name + val f = meth.name.decoded val paf = s"$f(${ meth.asMethod.paramLists map (_ map (_ => "_") mkString ",") mkString ")(" })" val advice = s""" |Unapplied methods are only converted to functions when a function type is expected. diff --git a/test/files/neg/missing-arg-list.check b/test/files/neg/missing-arg-list.check index 5a011c36f29b..229baac177ea 100644 --- a/test/files/neg/missing-arg-list.check +++ b/test/files/neg/missing-arg-list.check @@ -18,4 +18,9 @@ Unapplied methods are only converted to functions when a function type is expect You can make this conversion explicit by writing `h _` or `h(_,_,_)(_)` instead of `h`. val z = h ^ -four errors found +missing-arg-list.scala:15: error: missing argument list for method + in trait T +Unapplied methods are only converted to functions when a function type is expected. +You can make this conversion explicit by writing `+ _` or `+(_)` instead of `+`. + val p = + + ^ +5 errors found diff --git a/test/files/neg/missing-arg-list.scala b/test/files/neg/missing-arg-list.scala index c422dd32fe6a..44b83e429d31 100644 --- a/test/files/neg/missing-arg-list.scala +++ b/test/files/neg/missing-arg-list.scala @@ -10,4 +10,7 @@ trait T { val x = f val y = g val z = h + + def +(i: Int) = i + 42 + val p = + } From 6e9faf5157132ad575c98e5a9a0919ac38b7beb8 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Wed, 27 Apr 2016 21:25:57 +0200 Subject: [PATCH 0091/2793] Right-bias Either - Add operations like map, flatMap which assume right-bias - Deprecate {Left,Right}Projection - Deprecate left and right in favor of swap - Add contains, toOption, toTry, toSeq and filterOrElse - toSeq returns collection.immutable.Seq instead of collection.Seq - Don't add get There are no incompatible changes. The only possibility of breakage that exists is when people have added extension methods named map, flatMap etc. to Either in the past doing something different than the methods added to Either now. One detail that moved the scales in favor of deprecating LeftProjection and RightProjection was the desire to have toSeq return scala.collection.immutable.Seq instead of scala.collection.Seq like LeftProjection and RightProjection do. Therefore keeping LeftProjection and RightProjection would introduce inconsistency. filter is called filterOrElse because filtering in a for-comprehension doesn't work if the method needs an explicit argument. contains was added as safer alternative to if (either.isRight && either.right.get == $something) ... While adding filter with an implicit zero value is possible, it's dangerous as it would require that developers add a "naked" implicit value of type A to their scope and it would close the door to a future in which the Scala standard library ships with Monoid and filter could exist with an implicit Monoid parameter. --- .../nsc/backend/jvm/BackendReporting.scala | 13 +- src/library/scala/util/Either.scala | 312 +++++++++++++----- test/files/presentation/doc/doc.scala | 6 +- test/files/presentation/t7678/Runner.scala | 2 +- test/files/scalacheck/CheckEither.scala | 70 +++- 5 files changed, 309 insertions(+), 94 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala index 4287c24dc806..fd558587e9e0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala @@ -26,9 +26,7 @@ final class BackendReportingImpl(val global: Global) extends BackendReporting { /** * Utilities for error reporting. * - * Defines some tools to make error reporting with Either easier. Would be subsumed by a right-biased - * Either in the standard library (or scalaz \/) (Validation is different, it accumulates multiple - * errors). + * Defines some utility methods to make error reporting with Either easier. */ object BackendReporting { def methodSignature(classInternalName: InternalName, name: String, desc: String) = { @@ -42,19 +40,12 @@ object BackendReporting { def assertionError(message: String): Nothing = throw new AssertionError(message) implicit class RightBiasedEither[A, B](val v: Either[A, B]) extends AnyVal { - def map[C](f: B => C): Either[A, C] = v.right.map(f) - def flatMap[C](f: B => Either[A, C]): Either[A, C] = v.right.flatMap(f) def withFilter(f: B => Boolean)(implicit empty: A): Either[A, B] = v match { case Left(_) => v case Right(e) => if (f(e)) v else Left(empty) // scalaz.\/ requires an implicit Monoid m to get m.empty } - def foreach[U](f: B => U): Unit = v.right.foreach(f) - def getOrElse[C >: B](alt: => C): C = v.right.getOrElse(alt) - - /** - * Get the value, fail with an assertion if this is an error. - */ + /** Get the value, fail with an assertion if this is an error. */ def get: B = { assert(v.isRight, v.left.get) v.right.get diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 01da0c1ef2bd..6da39692c51b 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -32,12 +32,21 @@ package util * Left(in) * } * - * println( result match { - * case Right(x) => "You passed me the Int: " + x + ", which I will increment. " + x + " + 1 = " + (x+1) - * case Left(x) => "You passed me the String: " + x + * println(result match { + * case Right(x) => s"You passed me the Int: $x, which I will increment. $x + 1 = ${x+1}" + * case Left(x) => s"You passed me the String: $x" * }) * }}} * + * Either is right-biased, which means that Right is assumed to be the default case to + * operate on. If it is Left, operations like map, flatMap, ... return the Left value + * unchanged: + * + * {{{ + * Right(12).map(_ * 2) // Right(24) + * Left(23).map(_ * 2) // Left(23) + * }}} + * * A ''projection'' can be used to selectively operate on a value of type Either, * depending on whether it is of type Left or Right. For example, to transform an * Either using a function, in the case where it's a Left, one can first apply @@ -70,11 +79,13 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { /** * Projects this `Either` as a `Left`. */ + @deprecated("use swap instead", "2.12.0") def left = Either.LeftProjection(this) /** * Projects this `Either` as a `Right`. */ + @deprecated("Either is now right-biased", "2.12.0") def right = Either.RightProjection(this) /** @@ -83,8 +94,8 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * @example {{{ * val result: Either[Exception, Value] = possiblyFailingOperation() * log(result.fold( - * ex => "Operation failed with " + ex, - * v => "Operation produced value: " + v + * ex => s"Operation failed with $ex", + * v => s"Operation produced value: $v" * )) * }}} * @@ -92,9 +103,9 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * @param fb the function to apply if this is a `Right` * @return the results of applying the function */ - def fold[X](fa: A => X, fb: B => X) = this match { - case Left(a) => fa(a) + def fold[C](fa: A => C, fb: B => C): C = this match { case Right(b) => fb(b) + case Left(a) => fa(a) } /** @@ -105,8 +116,8 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * val r: Either[Int, String] = l.swap // Result: Right("left") * }}} */ - def swap = this match { - case Left(a) => Right(a) + def swap: Either[B, A] = this match { + case Left(a) => Right(a) case Right(b) => Left(b) } @@ -130,8 +141,9 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * This method, and `joinLeft`, are analogous to `Option#flatten` */ def joinRight[A1 >: A, B1 >: B, C](implicit ev: B1 <:< Either[A1, C]): Either[A1, C] = this match { - case Left(a) => Left(a) case Right(b) => b + case Left(a) => this.asInstanceOf[Either[A1, C]] + } /** @@ -155,7 +167,155 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { */ def joinLeft[A1 >: A, B1 >: B, C](implicit ev: A1 <:< Either[C, B1]): Either[C, B1] = this match { case Left(a) => a - case Right(b) => Right(b) + case Right(b) => this.asInstanceOf[Either[C, B1]] + } + + /** + * Executes the given side-effecting function if this is a `Right`. + * + * {{{ + * Right(12).foreach(x => println(x)) // prints "12" + * Left(12).foreach(x => println(x)) // doesn't print + * }}} + * @param f The side-effecting function to execute. + */ + def foreach[U](f: B => U): Unit = this match { + case Right(b) => f(b) + case Left(_) => + } + + /** + * Returns the value from this `Right` or the given argument if this is a `Left`. + * + * {{{ + * Right(12).getOrElse(17) // 12 + * Left(12).getOrElse(17) // 17 + * }}} + */ + def getOrElse[BB >: B](or: => BB): BB = this match { + case Right(b) => b + case Left(_) => or + } + + /** Returns `true` if this is a `Right` and its value is equal to `elem` (as determined by `==`), + * returns `false` otherwise. + * + * {{{ + * // Returns true because value of Right is "something" which equals "something". + * Right("something") contains "something" + * + * // Returns false because value of Right is "something" which does not equal "anything". + * Right("something") contains "anything" + * + * // Returns false because there is no value for Right. + * Left("something") contains "something" + * }}} + * + * @param elem the element to test. + * @return `true` if the option has an element that is equal (as determined by `==`) to `elem`, `false` otherwise. + */ + final def contains[AA >: A](elem: AA): Boolean = this match { + case Right(b) => b == elem + case Left(_) => false + } + + /** + * Returns `true` if `Left` or returns the result of the application of + * the given function to the `Right` value. + * + * {{{ + * Right(12).forall(_ > 10) // true + * Right(7).forall(_ > 10) // false + * Left(12).forall(_ > 10) // true + * }}} + */ + def forall(f: B => Boolean): Boolean = this match { + case Right(b) => f(b) + case Left(_) => true + } + + /** + * Returns `false` if `Left` or returns the result of the application of + * the given function to the `Right` value. + * + * {{{ + * Right(12).exists(_ > 10) // true + * Right(7).exists(_ > 10) // false + * Left(12).exists(_ > 10) // false + * }}} + */ + def exists(p: B => Boolean): Boolean = this match { + case Right(b) => p(b) + case Left(_) => false + } + + /** + * Binds the given function across `Right`. + * + * @param f The function to bind across `Right`. + */ + def flatMap[AA >: A, Y](f: B => Either[AA, Y]): Either[AA, Y] = this match { + case Right(b) => f(b) + case Left(a) => this.asInstanceOf[Either[AA, Y]] + } + + /** + * The given function is applied if this is a `Right`. + * + * {{{ + * Right(12).map(x => "flower") // Result: Right("flower") + * Left(12).map(x => "flower") // Result: Left(12) + * }}} + */ + def map[Y](f: B => Y): Either[A, Y] = this match { + case Right(b) => Right(f(b)) + case Left(a) => this.asInstanceOf[Either[A, Y]] + } + + /** Returns `Right` with the existing value of `Right` if this is a `Right` and the given predicate `p` holds for the right value, + * returns `Left(zero)` if this is a `Right` and the given predicate `p` does not hold for the right value, + * returns `Left` with the existing value of `Left` if this is a `Left`. + * + * {{{ + * Right(12).filterOrElse(_ > 10, -1) // Right(12) + * Right(7).filterOrElse(_ > 10, -1) // Left(-1) + * Left(12).filterOrElse(_ > 10, -1) // Left(12) + * }}} + */ + def filterOrElse[AA >: A](p: B => Boolean, zero: => AA): Either[AA, B] = this match { + case Right(b) => if (p(b)) this else Left(zero) + case Left(a) => this + } + + /** Returns a `Seq` containing the `Right` value if + * it exists or an empty `Seq` if this is a `Left`. + * + * {{{ + * Right(12).toSeq // Seq(12) + * Left(12).toSeq // Seq() + * }}} + */ + def toSeq: collection.immutable.Seq[B] = this match { + case Right(b) => collection.immutable.Seq(b) + case Left(_) => collection.immutable.Seq.empty + } + + /** Returns a `Some` containing the `Right` value + * if it exists or a `None` if this is a `Left`. + * + * {{{ + * Right(12).toOption // Some(12) + * Left(12).toOption // None + * }}} + */ + def toOption: Option[B] = this match { + case Right(b) => Some(b) + case Left(_) => None + } + + def toTry(implicit ev: A <:< Throwable): Try[B] = this match { + case Right(b) => Success(b) + case Left(a) => Failure(a) } /** @@ -186,7 +346,7 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * @version 1.0, 11/10/2008 */ final case class Left[+A, +B](a: A) extends Either[A, B] { - def isLeft = true + def isLeft = true def isRight = false } @@ -197,12 +357,26 @@ final case class Left[+A, +B](a: A) extends Either[A, B] { * @version 1.0, 11/10/2008 */ final case class Right[+A, +B](b: B) extends Either[A, B] { - def isLeft = false + def isLeft = false def isRight = true } object Either { + /** If the condition is satisfied, return the given `B` in `Right`, + * otherwise, return the given `A` in `Left`. + * + * {{{ + * val userInput: String = ... + * Either.cond( + * userInput.forall(_.isDigit) && userInput.size == 10, + * PhoneNumber(userInput), + * "The input (%s) does not look like a phone number".format(userInput) + * }}} + */ + def cond[X, Y](test: Boolean, right: => Y, left: => X): Either[X, Y] = + if (test) Right(right) else Left(left) + /** * Allows use of a `merge` method to extract values from Either instances * regardless of whether they are Left or Right. @@ -216,8 +390,8 @@ object Either { */ implicit class MergeableEither[A](private val x: Either[A, A]) extends AnyVal { def merge: A = x match { - case Left(a) => a case Right(a) => a + case Left(a) => a } } @@ -250,7 +424,7 @@ object Either { * }}} * * {{{ - * // using Either + * // using Either * def interactWithDB(x: Query): Either[Exception, Result] = * try { * Right(getResultFromDatabase(x)) @@ -270,6 +444,7 @@ object Either { * @author Tony Morris, Workingmouse * @version 1.0, 11/10/2008 */ + @deprecated("use swap instead", "2.12.0") final case class LeftProjection[+A, +B](e: Either[A, B]) { /** * Returns the value from this `Left` or throws `java.util.NoSuchElementException` @@ -282,9 +457,9 @@ object Either { * * @throws java.util.NoSuchElementException if the projection is [[scala.util.Right]] */ - def get = e match { - case Left(a) => a - case Right(_) => throw new NoSuchElementException("Either.left.value on Right") + def get: A = e match { + case Left(a) => a + case Right(_) => throw new NoSuchElementException("Either.left.get on Right") } /** @@ -296,14 +471,13 @@ object Either { * }}} * @param f The side-effecting function to execute. */ - def foreach[U](f: A => U) = e match { - case Left(a) => f(a) - case Right(_) => {} + def foreach[U](f: A => U): Unit = e match { + case Left(a) => f(a) + case Right(_) => } /** - * Returns the value from this `Left` or the given argument if this is a - * `Right`. + * Returns the value from this `Left` or the given argument if this is a `Right`. * * {{{ * Left(12).left.getOrElse(17) // 12 @@ -311,8 +485,8 @@ object Either { * }}} * */ - def getOrElse[AA >: A](or: => AA) = e match { - case Left(a) => a + def getOrElse[AA >: A](or: => AA): AA = e match { + case Left(a) => a case Right(_) => or } @@ -327,8 +501,8 @@ object Either { * }}} * */ - def forall(@deprecatedName('f) p: A => Boolean) = e match { - case Left(a) => p(a) + def forall(@deprecatedName('f) p: A => Boolean): Boolean = e match { + case Left(a) => p(a) case Right(_) => true } @@ -343,8 +517,8 @@ object Either { * }}} * */ - def exists(@deprecatedName('f) p: A => Boolean) = e match { - case Left(a) => p(a) + def exists(@deprecatedName('f) p: A => Boolean): Boolean = e match { + case Left(a) => p(a) case Right(_) => false } @@ -357,9 +531,9 @@ object Either { * }}} * @param f The function to bind across `Left`. */ - def flatMap[BB >: B, X](f: A => Either[X, BB]) = e match { - case Left(a) => f(a) - case Right(b) => Right(b) + def flatMap[BB >: B, X](f: A => Either[X, BB]): Either[X, BB] = e match { + case Left(a) => f(a) + case Right(b) => e.asInstanceOf[Either[X, BB]] } /** @@ -370,9 +544,9 @@ object Either { * Right[Int, Int](12).left.map(_ + 2) // Right(12) * }}} */ - def map[X](f: A => X) = e match { - case Left(a) => Left(f(a)) - case Right(b) => Right(b) + def map[X](f: A => X): Either[X, B] = e match { + case Left(a) => Left(f(a)) + case Right(b) => e.asInstanceOf[Either[X, B]] } /** @@ -386,7 +560,7 @@ object Either { * }}} */ def filter[Y](p: A => Boolean): Option[Either[A, Y]] = e match { - case Left(a) => if(p(a)) Some(Left(a)) else None + case Left(a) => if(p(a)) Some(Left(a)) else None case Right(b) => None } @@ -399,8 +573,8 @@ object Either { * Right(12).left.toSeq // Seq() * }}} */ - def toSeq = e match { - case Left(a) => Seq(a) + def toSeq: Seq[A] = e match { + case Left(a) => Seq(a) case Right(_) => Seq.empty } @@ -413,8 +587,8 @@ object Either { * Right(12).left.toOption // None * }}} */ - def toOption = e match { - case Left(a) => Some(a) + def toOption: Option[A] = e match { + case Left(a) => Some(a) case Right(_) => None } } @@ -434,6 +608,7 @@ object Either { * @author Tony Morris, Workingmouse * @version 1.0, 11/10/2008 */ + @deprecated("Either is now right-biased", "2.12.0") final case class RightProjection[+A, +B](e: Either[A, B]) { /** @@ -447,9 +622,9 @@ object Either { * * @throws java.util.NoSuchElementException if the projection is `Left`. */ - def get = e match { - case Left(_) => throw new NoSuchElementException("Either.right.value on Left") - case Right(a) => a + def get: B = e match { + case Right(b) => b + case Left(_) => throw new NoSuchElementException("Either.right.get on Left") } /** @@ -461,23 +636,22 @@ object Either { * }}} * @param f The side-effecting function to execute. */ - def foreach[U](f: B => U) = e match { - case Left(_) => {} + def foreach[U](f: B => U): Unit = e match { case Right(b) => f(b) + case Left(_) => } /** - * Returns the value from this `Right` or the given argument if this is a - * `Left`. + * Returns the value from this `Right` or the given argument if this is a `Left`. * * {{{ * Right(12).right.getOrElse(17) // 12 * Left(12).right.getOrElse(17) // 17 * }}} */ - def getOrElse[BB >: B](or: => BB) = e match { - case Left(_) => or + def getOrElse[BB >: B](or: => BB): BB = e match { case Right(b) => b + case Left(_) => or } /** @@ -490,9 +664,9 @@ object Either { * Left(12).right.forall(_ > 10) // true * }}} */ - def forall(f: B => Boolean) = e match { - case Left(_) => true + def forall(f: B => Boolean): Boolean = e match { case Right(b) => f(b) + case Left(_) => true } /** @@ -505,9 +679,9 @@ object Either { * Left(12).right.exists(_ > 10) // false * }}} */ - def exists(@deprecatedName('f) p: B => Boolean) = e match { - case Left(_) => false + def exists(@deprecatedName('f) p: B => Boolean): Boolean = e match { case Right(b) => p(b) + case Left(_) => false } /** @@ -515,9 +689,9 @@ object Either { * * @param f The function to bind across `Right`. */ - def flatMap[AA >: A, Y](f: B => Either[AA, Y]) = e match { - case Left(a) => Left(a) + def flatMap[AA >: A, Y](f: B => Either[AA, Y]): Either[AA, Y] = e match { case Right(b) => f(b) + case Left(a) => e.asInstanceOf[Either[AA, Y]] } /** @@ -528,9 +702,9 @@ object Either { * Left(12).right.map(x => "flower") // Result: Left(12) * }}} */ - def map[Y](f: B => Y) = e match { - case Left(a) => Left(a) + def map[Y](f: B => Y): Either[A, Y] = e match { case Right(b) => Right(f(b)) + case Left(a) => e.asInstanceOf[Either[A, Y]] } /** Returns `None` if this is a `Left` or if the @@ -544,8 +718,8 @@ object Either { * }}} */ def filter[X](p: B => Boolean): Option[Either[X, B]] = e match { - case Left(_) => None case Right(b) => if(p(b)) Some(Right(b)) else None + case Left(_) => None } /** Returns a `Seq` containing the `Right` value if @@ -556,9 +730,9 @@ object Either { * Left(12).right.toSeq // Seq() * }}} */ - def toSeq = e match { - case Left(_) => Seq.empty + def toSeq: Seq[B] = e match { case Right(b) => Seq(b) + case Left(_) => Seq.empty } /** Returns a `Some` containing the `Right` value @@ -569,23 +743,9 @@ object Either { * Left(12).right.toOption // None * }}} */ - def toOption = e match { - case Left(_) => None + def toOption: Option[B] = e match { case Right(b) => Some(b) + case Left(_) => None } } - - /** If the condition is satisfied, return the given `B` in `Right`, - * otherwise, return the given `A` in `Left`. - * - * {{{ - * val userInput: String = ... - * Either.cond( - * userInput.forall(_.isDigit) && userInput.size == 10, - * PhoneNumber(userInput), - * "The input (%s) does not look like a phone number".format(userInput) - * }}} - */ - def cond[A, B](test: Boolean, right: => B, left: => A): Either[A, B] = - if (test) Right(right) else Left(left) } diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala index ce431910ee94..8c60af557b96 100644 --- a/test/files/presentation/doc/doc.scala +++ b/test/files/presentation/doc/doc.scala @@ -62,7 +62,7 @@ object Test extends InteractiveTest { def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] = { val docResponse = new Response[(String, String, Position)] askDocComment(sym, source, sym.owner, fragments, docResponse) - docResponse.get.left.toOption flatMap { + docResponse.get.swap.toOption flatMap { case (expanded, raw, pos) => if (expanded.isEmpty) None @@ -85,13 +85,13 @@ object Test extends InteractiveTest { val batch = new BatchSourceFile(source.file, newText.toCharArray) val reloadResponse = new Response[Unit] compiler.askReload(List(batch), reloadResponse) - reloadResponse.get.left.toOption match { + reloadResponse.get.swap.toOption match { case None => println("Couldn't reload") case Some(_) => val parseResponse = new Response[Tree] askParsedEntered(batch, true, parseResponse) - parseResponse.get.left.toOption match { + parseResponse.get.swap.toOption match { case None => println("Couldn't parse") case Some(_) => diff --git a/test/files/presentation/t7678/Runner.scala b/test/files/presentation/t7678/Runner.scala index 14d6dc2a7051..c6736a65b020 100644 --- a/test/files/presentation/t7678/Runner.scala +++ b/test/files/presentation/t7678/Runner.scala @@ -7,7 +7,7 @@ object Test extends InteractiveTest { override def runDefaultTests() { def resolveTypeTagHyperlink() { - val sym = compiler.askForResponse(() => compiler.currentRun.runDefinitions.TypeTagClass).get.left.get + val sym = compiler.askForResponse(() => compiler.currentRun.runDefinitions.TypeTagClass).get.swap.getOrElse(???) val r = new Response[Position] compiler.askLinkPos(sym, new BatchSourceFile("", source), r) r.get diff --git a/test/files/scalacheck/CheckEither.scala b/test/files/scalacheck/CheckEither.scala index 48f732a22d6e..f0ec79704544 100644 --- a/test/files/scalacheck/CheckEither.scala +++ b/test/files/scalacheck/CheckEither.scala @@ -132,6 +132,58 @@ object Test extends Properties("Either") { case Right(a) => a })) + val prop_getOrElse = forAll((e: Either[Int, Int], or: Int) => e.getOrElse(or) == (e match { + case Left(_) => or + case Right(b) => b + })) + + val prop_contains = forAll((e: Either[Int, Int], n: Int) => + e.contains(n) == (e.isRight && e.right.get == n)) + + val prop_forall = forAll((e: Either[Int, Int]) => + e.forall(_ % 2 == 0) == (e.isLeft || e.right.get % 2 == 0)) + + val prop_exists = forAll((e: Either[Int, Int]) => + e.exists(_ % 2 == 0) == (e.isRight && e.right.get % 2 == 0)) + + val prop_flatMapLeftIdentity = forAll((e: Either[Int, Int], n: Int, s: String) => { + def f(x: Int) = if(x % 2 == 0) Left(s) else Right(s) + Right(n).flatMap(f(_)) == f(n)}) + + val prop_flatMapRightIdentity = forAll((e: Either[Int, Int]) => e.flatMap(Right(_)) == e) + + val prop_flatMapComposition = forAll((e: Either[Int, Int]) => { + def f(x: Int) = if(x % 2 == 0) Left(x) else Right(x) + def g(x: Int) = if(x % 7 == 0) Right(x) else Left(x) + e.flatMap(f(_)).flatMap(g(_)) == e.flatMap(f(_).flatMap(g(_)))}) + + val prop_mapIdentity = forAll((e: Either[Int, Int]) => e.map(x => x) == e) + + val prop_mapComposition = forAll((e: Either[Int, String]) => { + def f(s: String) = s.toLowerCase + def g(s: String) = s.reverse + e.map(x => f(g(x))) == e.map(x => g(x)).map(f(_))}) + + val prop_filterOrElse = forAll((e: Either[Int, Int], x: Int) => e.filterOrElse(_ % 2 == 0, -x) == + (if(e.isLeft) e + else if(e.right.get % 2 == 0) e + else Left(-x))) + + val prop_seq = forAll((e: Either[Int, Int]) => e.toSeq == (e match { + case Left(_) => collection.immutable.Seq.empty + case Right(b) => collection.immutable.Seq(b) + })) + + val prop_option = forAll((e: Either[Int, Int]) => e.toOption == (e match { + case Left(_) => None + case Right(b) => Some(b) + })) + + val prop_try = forAll((e: Either[Throwable, Int]) => e.toTry == (e match { + case Left(a) => util.Failure(a) + case Right(b) => util.Success(b) + })) + /** Hard to believe I'm "fixing" a test to reflect B before A ... */ val prop_Either_cond = forAll((c: Boolean, a: Int, b: Int) => Either.cond(c, a, b) == (if(c) Right(a) else Left(b))) @@ -169,9 +221,21 @@ object Test extends Properties("Either") { ("prop_Either_right", prop_Either_right), ("prop_Either_joinLeft", prop_Either_joinLeft), ("prop_Either_joinRight", prop_Either_joinRight), - ("prop_Either_reduce", prop_Either_reduce), - ("prop_Either_cond", prop_Either_cond) - ) + ("prop_Either_reduce", prop_Either_reduce), + ("prop_getOrElse", prop_getOrElse), + ("prop_contains", prop_contains), + ("prop_forall", prop_forall), + ("prop_exists", prop_exists), + ("prop_flatMapLeftIdentity", prop_flatMapLeftIdentity), + ("prop_flatMapRightIdentity", prop_flatMapRightIdentity), + ("prop_flatMapComposition", prop_flatMapComposition), + ("prop_mapIdentity", prop_mapIdentity), + ("prop_mapComposition", prop_mapComposition), + ("prop_filterOrElse", prop_filterOrElse), + ("prop_seq", prop_seq), + ("prop_option", prop_option), + ("prop_try", prop_try), + ("prop_Either_cond", prop_Either_cond)) for ((label, prop) <- tests) { property(label) = prop From 981e3c51d5040ea808b5f699718db55241ee42cf Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sat, 2 Apr 2016 12:40:25 +0200 Subject: [PATCH 0092/2793] SI-9483 Add `since` to `@deprecatedName` --- src/library/scala/deprecatedName.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala index a0d3aa829b28..da8b76efc90f 100644 --- a/src/library/scala/deprecatedName.scala +++ b/src/library/scala/deprecatedName.scala @@ -29,6 +29,4 @@ import scala.annotation.meta._ * @since 2.8.1 */ @param -class deprecatedName(name: Symbol) extends scala.annotation.StaticAnnotation { - def this() = this(Symbol("")) -} +class deprecatedName(name: Symbol = Symbol(""), since: String = "") extends scala.annotation.StaticAnnotation From 673350e08af72454fe9df87ae7f3292893e44d3c Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sat, 2 Apr 2016 13:38:15 +0200 Subject: [PATCH 0093/2793] SI-9084 Add `since` (if available) to deprecation warnings --- src/compiler/scala/tools/nsc/Reporting.scala | 5 ++-- .../tools/nsc/typechecker/Adaptations.scala | 2 +- .../tools/nsc/typechecker/NamesDefaults.scala | 5 ++-- .../tools/nsc/typechecker/RefChecks.scala | 7 +++-- .../scala/tools/nsc/typechecker/Typers.scala | 7 +++-- .../scala/collection/convert/package.scala | 12 ++++---- .../scala/reflect/internal/Symbols.scala | 27 ++++++++++-------- test/files/jvm/t8582.check | 2 +- test/files/neg/beanInfoDeprecation.check | 2 +- .../neg/classmanifests_new_deprecations.check | 16 +++++------ test/files/neg/delayed-init-ref.check | 2 +- test/files/neg/names-defaults-neg-warn.check | 4 +-- test/files/neg/names-defaults-neg.check | 6 ++-- test/files/neg/t4851.check | 8 +++--- test/files/neg/t6120.check | 4 +-- test/files/neg/t6162-inheritance.check | 2 +- test/files/neg/t6162-overriding.check | 2 +- test/files/neg/t6406-regextract.check | 2 +- test/files/neg/t7294b.check | 2 +- test/files/neg/t7783.check | 10 +++---- test/files/neg/t8035-deprecated.check | 6 ++-- test/files/neg/t8685.check | 28 +++++++++---------- test/files/neg/t9684.check | 4 +-- test/files/run/t3235-minimal.check | 8 +++--- test/files/run/t4542.check | 2 +- test/files/run/t4594-repl-settings.scala | 2 +- test/files/run/toolbox_console_reporter.check | 4 +-- test/files/run/toolbox_silent_reporter.check | 2 +- 28 files changed, 96 insertions(+), 87 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 8d0aedc76db8..325537a5a8d9 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -68,8 +68,9 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w // behold! the symbol that caused the deprecation warning (may not be deprecated itself) def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = _deprecationWarnings.warn(pos, msg) def deprecationWarning(pos: Position, sym: Symbol): Unit = { - val suffix = sym.deprecationMessage match { case Some(msg) => ": "+ msg case _ => "" } - deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$suffix") + val version = sym.deprecationVersion match { case Some(ver) => s" (since $ver)" case _ => "" } + val message = sym.deprecationMessage match { case Some(msg) => s": $msg" case _ => "" } + deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$version$message") } private[this] var reportedFeature = Set[Symbol]() diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 2f4d2283476a..46561de78f13 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -74,7 +74,7 @@ trait Adaptations { if (settings.future) context.error(t.pos, adaptWarningMessage("Adaptation of argument list by inserting () has been removed.", showAdaptation = false)) else { - val msg = "Adaptation of argument list by inserting () has been deprecated: " + ( + val msg = "Adaptation of argument list by inserting () is deprecated: " + ( if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous." else "this is unlikely to be what you want.") context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg)) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 5062289ed1f3..063cfd38054e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -565,14 +565,15 @@ trait NamesDefaults { self: Analyzer => case Some(`name`) => true case Some(nme.NO_NAME) => anonOK } + def since = param.deprecatedParamVersion.map(ver => s" (since $ver)").getOrElse("") def checkName = { val res = param.name == name - if (res && checkDeprecation(true)) warn(s"naming parameter $name has been deprecated.") + if (res && checkDeprecation(true)) warn(s"naming parameter $name is deprecated$since.") res } def checkAltName = { val res = checkDeprecation(false) - if (res) warn(s"the parameter name $name has been deprecated. Use ${param.name} instead.") + if (res) warn(s"the parameter name $name is deprecated$since: Use ${param.name} instead") res } !param.isSynthetic && (checkName || checkAltName) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index da269168ec61..3aea64a1f277 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -548,9 +548,10 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def checkOverrideDeprecated() { if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { - val suffix = other.deprecatedOverridingMessage map (": " + _) getOrElse "" - val msg = s"overriding ${other.fullLocationString} is deprecated$suffix" - currentRun.reporting.deprecationWarning(member.pos, other, msg) + val version = other.deprecatedOverridingVersion map (ver => s" (since $ver)") getOrElse "" + val message = other.deprecatedOverridingMessage map (msg => s": $msg") getOrElse "" + val report = s"overriding ${other.fullLocationString} is deprecated$version$message" + currentRun.reporting.deprecationWarning(member.pos, other, report) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1aed9c3a64cf..d44a0eaf59fd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1704,9 +1704,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { - val suffix = psym.deprecatedInheritanceMessage map (": " + _) getOrElse "" - val msg = s"inheritance from ${psym.fullLocationString} is deprecated$suffix" - context.deprecationWarning(parent.pos, psym, msg) + val version = psym.deprecatedInheritanceVersion map (ver => s" (since $ver)") getOrElse "" + val message = psym.deprecatedInheritanceMessage map (msg => s": $msg") getOrElse "" + val report = s"inheritance from ${psym.fullLocationString} is deprecated$version$message" + context.deprecationWarning(parent.pos, psym, report) } if (psym.isSealed && !phase.erasedTypes) diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala index 7f48023b58b6..fe1951b6cf70 100644 --- a/src/library/scala/collection/convert/package.scala +++ b/src/library/scala/collection/convert/package.scala @@ -10,17 +10,17 @@ package scala package collection package object convert { - @deprecated("Use JavaConverters", since="2.12") + @deprecated("use JavaConverters", since="2.12") val decorateAsJava = new DecorateAsJava { } - @deprecated("Use JavaConverters", since="2.12") + @deprecated("use JavaConverters", since="2.12") val decorateAsScala = new DecorateAsScala { } - @deprecated("Use JavaConverters", since="2.12") + @deprecated("use JavaConverters", since="2.12") val decorateAll = JavaConverters - @deprecated("Use JavaConverters or consider ImplicitConversionsToJava", since="2.12") + @deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12") val wrapAsJava = new WrapAsJava { } - @deprecated("Use JavaConverters or consider ImplicitConversionsToScala", since="2.12") + @deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12") val wrapAsScala = new WrapAsScala { } - @deprecated("Use JavaConverters or consider ImplicitConversions", since="2.12") + @deprecated("use JavaConverters or consider ImplicitConversions", since="2.12") val wrapAll = new WrapAsJava with WrapAsScala { } } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index ed514143822d..8f24b435b3c1 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -871,21 +871,26 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) } - def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr) - def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) - def hasBridgeAnnotation = hasAnnotation(BridgeClass) - def isDeprecated = hasAnnotation(DeprecatedAttr) - def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0) - def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1) - def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0 orElse Some(nme.NO_NAME)) + def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr) + def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) + def hasBridgeAnnotation = hasAnnotation(BridgeClass) + def isDeprecated = hasAnnotation(DeprecatedAttr) + def deprecationMessage = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 0) + def deprecationVersion = getAnnotation(DeprecatedAttr) flatMap (_ stringArg 1) + def deprecatedParamName = getAnnotation(DeprecatedNameAttr) flatMap (_ symbolArg 0 orElse Some(nme.NO_NAME)) + def deprecatedParamVersion = getAnnotation(DeprecatedNameAttr) flatMap (_ stringArg 1) def hasDeprecatedInheritanceAnnotation - = hasAnnotation(DeprecatedInheritanceAttr) + = hasAnnotation(DeprecatedInheritanceAttr) def deprecatedInheritanceMessage - = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0) + = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 0) + def deprecatedInheritanceVersion + = getAnnotation(DeprecatedInheritanceAttr) flatMap (_ stringArg 1) def hasDeprecatedOverridingAnnotation - = hasAnnotation(DeprecatedOverridingAttr) + = hasAnnotation(DeprecatedOverridingAttr) def deprecatedOverridingMessage - = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0) + = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 0) + def deprecatedOverridingVersion + = getAnnotation(DeprecatedOverridingAttr) flatMap (_ stringArg 1) // !!! when annotation arguments are not literal strings, but any sort of // assembly of strings, there is a fair chance they will turn up here not as diff --git a/test/files/jvm/t8582.check b/test/files/jvm/t8582.check index 0e4da90398e0..0a23cb0c9385 100644 --- a/test/files/jvm/t8582.check +++ b/test/files/jvm/t8582.check @@ -1,4 +1,4 @@ -t8582.scala:17: warning: class BeanInfo in package beans is deprecated: the generation of BeanInfo classes is no longer supported +t8582.scala:17: warning: class BeanInfo in package beans is deprecated (since 2.12.0): the generation of BeanInfo classes is no longer supported class C1 ^ getClass on module gives module class diff --git a/test/files/neg/beanInfoDeprecation.check b/test/files/neg/beanInfoDeprecation.check index 788b277818f0..a91cdabae2ac 100644 --- a/test/files/neg/beanInfoDeprecation.check +++ b/test/files/neg/beanInfoDeprecation.check @@ -1,4 +1,4 @@ -beanInfoDeprecation.scala:2: warning: class BeanInfo in package beans is deprecated: the generation of BeanInfo classes is no longer supported +beanInfoDeprecation.scala:2: warning: class BeanInfo in package beans is deprecated (since 2.12.0): the generation of BeanInfo classes is no longer supported class C ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check index fd1e2728c31b..1b63303fd4c8 100644 --- a/test/files/neg/classmanifests_new_deprecations.check +++ b/test/files/neg/classmanifests_new_deprecations.check @@ -1,25 +1,25 @@ -classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead def cm1[T: ClassManifest] = ??? ^ -classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? ^ -classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead val cm3: ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead def rcm1[T: scala.reflect.ClassManifest] = ??? ^ -classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ??? ^ -classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead val rcm3: scala.reflect.ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated: Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead type CM[T] = ClassManifest[T] ^ -classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated: Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead type RCM[T] = scala.reflect.ClassManifest[T] ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check index 90bc02796940..1b6ba5c3641f 100644 --- a/test/files/neg/delayed-init-ref.check +++ b/test/files/neg/delayed-init-ref.check @@ -4,7 +4,7 @@ delayed-init-ref.scala:17: warning: Selecting value vall from object O, which ex delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value println(vall) // warn ^ -delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated: DelayedInit semantics can be surprising. Support for `App` will continue. +delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1 trait Before extends DelayedInit { ^ diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check index 0f4edef84e55..14a1e50e6892 100644 --- a/test/files/neg/names-defaults-neg-warn.check +++ b/test/files/neg/names-defaults-neg-warn.check @@ -1,7 +1,7 @@ -names-defaults-neg-warn.scala:11: warning: the parameter name s has been deprecated. Use x instead. +names-defaults-neg-warn.scala:11: warning: the parameter name s is deprecated: Use x instead deprNam2.f(s = "dlfkj") ^ -names-defaults-neg-warn.scala:12: warning: the parameter name x has been deprecated. Use s instead. +names-defaults-neg-warn.scala:12: warning: the parameter name x is deprecated: Use s instead deprNam2.g(x = "dlkjf") ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index 875bc2ade00d..e6929cb52ea6 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -112,16 +112,16 @@ names-defaults-neg.scala:90: error: deprecated parameter name x has to be distin names-defaults-neg.scala:91: error: deprecated parameter name a has to be distinct from any other parameter name (deprecated or not). def deprNam2(a: String)(@deprecatedName('a) b: Int) = 1 ^ -names-defaults-neg.scala:93: warning: the parameter name y has been deprecated. Use b instead. +names-defaults-neg.scala:93: warning: the parameter name y is deprecated: Use b instead deprNam3(y = 10, b = 2) ^ names-defaults-neg.scala:93: error: parameter 'b' is already specified at parameter position 1 deprNam3(y = 10, b = 2) ^ -names-defaults-neg.scala:96: warning: naming parameter deprNam4Arg has been deprecated. +names-defaults-neg.scala:96: warning: naming parameter deprNam4Arg is deprecated. deprNam4(deprNam4Arg = null) ^ -names-defaults-neg.scala:98: warning: naming parameter deprNam5Arg has been deprecated. +names-defaults-neg.scala:98: warning: naming parameter deprNam5Arg is deprecated. deprNam5(deprNam5Arg = null) ^ names-defaults-neg.scala:102: error: unknown parameter name: m diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check index d5711a889bf4..ac0854f8105c 100644 --- a/test/files/neg/t4851.check +++ b/test/files/neg/t4851.check @@ -1,10 +1,10 @@ -S.scala:2: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous. +S.scala:2: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous. signature: J(x: Any): J given arguments: after adaptation: new J((): Unit) val x1 = new J ^ -S.scala:3: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous. +S.scala:3: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous. signature: J(x: Any): J given arguments: after adaptation: new J((): Unit) @@ -28,13 +28,13 @@ S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not b after adaptation: new Some((1, 2, 3): (Int, Int, Int)) val y2 = new Some(1, 2, 3) ^ -S.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want. +S.scala:9: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. signature: J2(x: T): J2[T] given arguments: after adaptation: new J2((): Unit) val z1 = new J2 ^ -S.scala:10: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want. +S.scala:10: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. signature: J2(x: T): J2[T] given arguments: after adaptation: new J2((): Unit) diff --git a/test/files/neg/t6120.check b/test/files/neg/t6120.check index f432fde32f47..b7a5d8bf17a5 100644 --- a/test/files/neg/t6120.check +++ b/test/files/neg/t6120.check @@ -6,13 +6,13 @@ See the Scaladoc for value scala.language.postfixOps for a discussion why the feature should be explicitly enabled. def f = null == null bippy ^ -t6120.scala:5: warning: method bippy in class BooleanOps is deprecated: bobo +t6120.scala:5: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo def f = null == null bippy ^ t6120.scala:5: warning: comparing values of types Null and Null using `==' will always yield true def f = null == null bippy ^ -t6120.scala:6: warning: method bippy in class BooleanOps is deprecated: bobo +t6120.scala:6: warning: method bippy in class BooleanOps is deprecated (since 2.11.0): bobo def g = true.bippy ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t6162-inheritance.check b/test/files/neg/t6162-inheritance.check index c9f4ddaec14b..9b0a8990da95 100644 --- a/test/files/neg/t6162-inheritance.check +++ b/test/files/neg/t6162-inheritance.check @@ -1,4 +1,4 @@ -usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated: `Foo` will be made final in a future version. +usage.scala:3: warning: inheritance from class Foo in package t6126 is deprecated (since 2.10.0): `Foo` will be made final in a future version. class SubFoo extends Foo ^ usage.scala:5: warning: inheritance from trait T in package t6126 is deprecated diff --git a/test/files/neg/t6162-overriding.check b/test/files/neg/t6162-overriding.check index 6bff75d88dc4..586bfb4b35fb 100644 --- a/test/files/neg/t6162-overriding.check +++ b/test/files/neg/t6162-overriding.check @@ -1,4 +1,4 @@ -t6162-overriding.scala:14: warning: overriding method bar in class Bar is deprecated: `bar` will be made private in a future version. +t6162-overriding.scala:14: warning: overriding method bar in class Bar is deprecated (since 2.10.0): `bar` will be made private in a future version. override def bar = 43 ^ t6162-overriding.scala:15: warning: overriding method baz in class Bar is deprecated diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check index 19425a68b051..b49b6656f540 100644 --- a/test/files/neg/t6406-regextract.check +++ b/test/files/neg/t6406-regextract.check @@ -1,4 +1,4 @@ -t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated: Extracting a match result from anything but a CharSequence or Match is deprecated +t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated (since 2.11.0): Extracting a match result from anything but a CharSequence or Match is deprecated List(1) collect { case r(i) => i } ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t7294b.check b/test/files/neg/t7294b.check index 0033b7212513..707266f0cccf 100644 --- a/test/files/neg/t7294b.check +++ b/test/files/neg/t7294b.check @@ -1,4 +1,4 @@ -t7294b.scala:1: warning: inheritance from class Tuple2 in package scala is deprecated: Tuples will be made final in a future version. +t7294b.scala:1: warning: inheritance from class Tuple2 in package scala is deprecated (since 2.11.0): Tuples will be made final in a future version. class C extends Tuple2[Int, Int](0, 0) ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t7783.check b/test/files/neg/t7783.check index 647cfee12152..18dc84a8dbe5 100644 --- a/test/files/neg/t7783.check +++ b/test/files/neg/t7783.check @@ -1,16 +1,16 @@ -t7783.scala:1: warning: type D in object O is deprecated: +t7783.scala:1: warning: type D in object O is deprecated (since ): object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil } ^ -t7783.scala:11: warning: type D in object O is deprecated: +t7783.scala:11: warning: type D in object O is deprecated (since ): type T = O.D ^ -t7783.scala:12: warning: type D in object O is deprecated: +t7783.scala:12: warning: type D in object O is deprecated (since ): locally(null: O.D) ^ -t7783.scala:13: warning: type D in object O is deprecated: +t7783.scala:13: warning: type D in object O is deprecated (since ): val x: O.D = null ^ -t7783.scala:14: warning: type D in object O is deprecated: +t7783.scala:14: warning: type D in object O is deprecated (since ): locally(null.asInstanceOf[O.D]) ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t8035-deprecated.check b/test/files/neg/t8035-deprecated.check index 01f27e5310e4..35aba5551db1 100644 --- a/test/files/neg/t8035-deprecated.check +++ b/test/files/neg/t8035-deprecated.check @@ -1,16 +1,16 @@ -t8035-deprecated.scala:2: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want. +t8035-deprecated.scala:2: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. signature: GenSetLike.apply(elem: A): Boolean given arguments: after adaptation: GenSetLike((): Unit) List(1,2,3).toSet() ^ -t8035-deprecated.scala:5: warning: Adaptation of argument list by inserting () has been deprecated: this is unlikely to be what you want. +t8035-deprecated.scala:5: warning: Adaptation of argument list by inserting () is deprecated: this is unlikely to be what you want. signature: A(x: T): Foo.A[T] given arguments: after adaptation: new A((): Unit) new A ^ -t8035-deprecated.scala:9: warning: Adaptation of argument list by inserting () has been deprecated: leaky (Object-receiving) target makes this especially dangerous. +t8035-deprecated.scala:9: warning: Adaptation of argument list by inserting () is deprecated: leaky (Object-receiving) target makes this especially dangerous. signature: Format.format(x$1: Any): String given arguments: after adaptation: Format.format((): Unit) diff --git a/test/files/neg/t8685.check b/test/files/neg/t8685.check index 1780a20b6eda..685fd2e951b8 100644 --- a/test/files/neg/t8685.check +++ b/test/files/neg/t8685.check @@ -1,43 +1,43 @@ -t8685.scala:6: warning: constructor D in class D is deprecated: ctor D is depr +t8685.scala:6: warning: constructor D in class D is deprecated (since now): ctor D is depr case class D @deprecated("ctor D is depr", since="now") (i: Int) ^ -t8685.scala:35: warning: class C is deprecated: class C is depr +t8685.scala:35: warning: class C is deprecated (since now): class C is depr def f = C(42) ^ -t8685.scala:37: warning: object E is deprecated: module E is depr +t8685.scala:37: warning: object E is deprecated (since now): module E is depr def h = E(42) ^ -t8685.scala:37: warning: class E is deprecated: class E is depr +t8685.scala:37: warning: class E is deprecated (since now): class E is depr def h = E(42) ^ -t8685.scala:38: warning: object F is deprecated: module F is depr +t8685.scala:38: warning: object F is deprecated (since now): module F is depr def i = F.G(42) ^ -t8685.scala:39: warning: object F in object Extra is deprecated: Extra module F is depr +t8685.scala:39: warning: object F in object Extra is deprecated (since now): Extra module F is depr def j = Extra.F.G(42) ^ -t8685.scala:43: warning: value gg in trait Applies is deprecated: member gg +t8685.scala:43: warning: value gg in trait Applies is deprecated (since now): member gg def k = this.gg.H(0) ^ -t8685.scala:45: warning: class K in object J is deprecated: Inner K is depr +t8685.scala:45: warning: class K in object J is deprecated (since now): Inner K is depr def l = J.K(42) ^ -t8685.scala:48: warning: class C is deprecated: class C is depr +t8685.scala:48: warning: class C is deprecated (since now): class C is depr def f = new C(42) ^ -t8685.scala:49: warning: constructor D in class D is deprecated: ctor D is depr +t8685.scala:49: warning: constructor D in class D is deprecated (since now): ctor D is depr def g = new D(42) ^ -t8685.scala:50: warning: class E is deprecated: class E is depr +t8685.scala:50: warning: class E is deprecated (since now): class E is depr def h = new E(42) ^ -t8685.scala:51: warning: object F is deprecated: module F is depr +t8685.scala:51: warning: object F is deprecated (since now): module F is depr def i = new F.G(42) ^ -t8685.scala:52: warning: object F in object Extra is deprecated: Extra module F is depr +t8685.scala:52: warning: object F in object Extra is deprecated (since now): Extra module F is depr def j = new Extra.F.G(42) ^ -t8685.scala:53: warning: class K in object J is deprecated: Inner K is depr +t8685.scala:53: warning: class K in object J is deprecated (since now): Inner K is depr def l = new J.K(42) ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t9684.check b/test/files/neg/t9684.check index 833ca3341a1d..ab36479a472a 100644 --- a/test/files/neg/t9684.check +++ b/test/files/neg/t9684.check @@ -1,7 +1,7 @@ -t9684.scala:6: warning: object JavaConversions in package collection is deprecated: Use JavaConverters +t9684.scala:6: warning: object JavaConversions in package collection is deprecated (since 2.12): Use JavaConverters null.asInstanceOf[java.util.List[Int]] : Buffer[Int] ^ -t9684.scala:8: warning: object JavaConversions in package collection is deprecated: Use JavaConverters +t9684.scala:8: warning: object JavaConversions in package collection is deprecated (since 2.12): Use JavaConverters null.asInstanceOf[Iterable[Int]] : java.util.Collection[Int] ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/run/t3235-minimal.check b/test/files/run/t3235-minimal.check index d7f716002f0c..5b0657219161 100644 --- a/test/files/run/t3235-minimal.check +++ b/test/files/run/t3235-minimal.check @@ -1,12 +1,12 @@ -t3235-minimal.scala:3: warning: method round in class RichInt is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +t3235-minimal.scala:3: warning: method round in class RichInt is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? assert(123456789.round == 123456789) ^ -t3235-minimal.scala:4: warning: method round in package math is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? +t3235-minimal.scala:4: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? assert(math.round(123456789) == 123456789) ^ -t3235-minimal.scala:5: warning: method round in class RichLong is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +t3235-minimal.scala:5: warning: method round in class RichLong is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? assert(1234567890123456789L.round == 1234567890123456789L) ^ -t3235-minimal.scala:6: warning: method round in package math is deprecated: This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? +t3235-minimal.scala:6: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? assert(math.round(1234567890123456789L) == 1234567890123456789L) ^ diff --git a/test/files/run/t4542.check b/test/files/run/t4542.check index 6e099222b08e..942de545b517 100644 --- a/test/files/run/t4542.check +++ b/test/files/run/t4542.check @@ -5,7 +5,7 @@ scala> @deprecated("foooo", "ReplTest version 1.0-FINAL") class Foo() { defined class Foo scala> val f = new Foo -:12: warning: class Foo is deprecated: foooo +:12: warning: class Foo is deprecated (since ReplTest version 1.0-FINAL): foooo val f = new Foo ^ f: Foo = Bippy diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala index f2d1a8b3f806..8ececce24af3 100644 --- a/test/files/run/t4594-repl-settings.scala +++ b/test/files/run/t4594-repl-settings.scala @@ -15,7 +15,7 @@ object Test extends SessionTest { |scala> :settings -deprecation | |scala> def b = depp - |:12: warning: method depp is deprecated: Please don't do that. + |:12: warning: method depp is deprecated (since Time began.): Please don't do that. | def b = depp | ^ |b: String diff --git a/test/files/run/toolbox_console_reporter.check b/test/files/run/toolbox_console_reporter.check index 1395c6874072..fca10ba458de 100644 --- a/test/files/run/toolbox_console_reporter.check +++ b/test/files/run/toolbox_console_reporter.check @@ -1,8 +1,8 @@ hello ============compiler console============= -warning: method foo in object Utils is deprecated: test +warning: method foo in object Utils is deprecated (since 2.10.0): test ========================================= ============compiler messages============ -Info(NoPosition,method foo in object Utils is deprecated: test,WARNING) +Info(NoPosition,method foo in object Utils is deprecated (since 2.10.0): test,WARNING) ========================================= diff --git a/test/files/run/toolbox_silent_reporter.check b/test/files/run/toolbox_silent_reporter.check index 2d05b1e3f864..dff89f635fa8 100644 --- a/test/files/run/toolbox_silent_reporter.check +++ b/test/files/run/toolbox_silent_reporter.check @@ -1,4 +1,4 @@ hello ============compiler messages============ -Info(NoPosition,method foo in object Utils is deprecated: test,WARNING) +Info(NoPosition,method foo in object Utils is deprecated (since 2.10.0): test,WARNING) ========================================= From 5562e1a2eb07b9a541b3eac85a809847e2d48763 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 19 May 2016 14:57:25 +0200 Subject: [PATCH 0094/2793] Lower-case spelling of @deprecated messages --- .../tools/nsc/typechecker/NamesDefaults.scala | 2 +- src/library/scala/App.scala | 2 +- src/library/scala/Console.scala | 40 +++++----- src/library/scala/DelayedInit.scala | 4 +- src/library/scala/Function.scala | 8 +- src/library/scala/NotNull.scala | 2 +- src/library/scala/Predef.scala | 72 +++++++++--------- src/library/scala/Responder.scala | 4 +- src/library/scala/annotation/bridge.scala | 2 +- .../scala/collection/IterableProxy.scala | 2 +- .../scala/collection/IterableProxyLike.scala | 2 +- src/library/scala/collection/MapProxy.scala | 2 +- .../scala/collection/MapProxyLike.scala | 2 +- src/library/scala/collection/SeqProxy.scala | 2 +- .../scala/collection/SeqProxyLike.scala | 2 +- src/library/scala/collection/SetProxy.scala | 2 +- .../scala/collection/SetProxyLike.scala | 2 +- .../scala/collection/TraversableProxy.scala | 2 +- .../collection/TraversableProxyLike.scala | 2 +- .../scala/collection/concurrent/TrieMap.scala | 10 +-- .../generic/IterableForwarder.scala | 2 +- .../collection/generic/SeqForwarder.scala | 2 +- .../generic/TraversableForwarder.scala | 2 +- .../scala/collection/immutable/MapProxy.scala | 2 +- .../scala/collection/immutable/PagedSeq.scala | 4 +- .../scala/collection/immutable/Range.scala | 16 ++-- .../scala/collection/immutable/SetProxy.scala | 2 +- .../scala/collection/immutable/Stack.scala | 2 +- .../collection/immutable/StringLike.scala | 2 +- .../scala/collection/mutable/BufferLike.scala | 2 +- .../collection/mutable/BufferProxy.scala | 4 +- .../collection/mutable/DefaultMapModel.scala | 2 +- .../collection/mutable/DoubleLinkedList.scala | 4 +- .../mutable/DoubleLinkedListLike.scala | 2 +- .../mutable/ImmutableMapAdaptor.scala | 2 +- .../mutable/ImmutableSetAdaptor.scala | 2 +- .../scala/collection/mutable/LinkedList.scala | 4 +- .../collection/mutable/LinkedListLike.scala | 2 +- .../scala/collection/mutable/MapProxy.scala | 2 +- .../collection/mutable/ObservableBuffer.scala | 5 +- .../collection/mutable/ObservableMap.scala | 5 +- .../collection/mutable/ObservableSet.scala | 5 +- .../collection/mutable/PriorityQueue.scala | 8 +- .../scala/collection/mutable/Queue.scala | 2 +- .../scala/collection/mutable/QueueProxy.scala | 2 +- .../scala/collection/mutable/SetLike.scala | 2 +- .../scala/collection/mutable/SetProxy.scala | 2 +- .../scala/collection/mutable/StackProxy.scala | 2 +- .../mutable/SynchronizedBuffer.scala | 4 +- .../collection/mutable/SynchronizedMap.scala | 4 +- .../mutable/SynchronizedQueue.scala | 2 +- .../collection/mutable/SynchronizedSet.scala | 4 +- .../mutable/SynchronizedStack.scala | 2 +- .../collection/parallel/ParIterableLike.scala | 2 +- .../collection/parallel/TaskSupport.scala | 2 +- .../scala/collection/parallel/Tasks.scala | 10 +-- .../scala/collection/parallel/package.scala | 6 +- .../scala/collection/script/Location.scala | 10 +-- .../scala/collection/script/Message.scala | 12 +-- .../scala/collection/script/Scriptable.scala | 2 +- .../scala/concurrent/ExecutionContext.scala | 2 +- src/library/scala/concurrent/Future.scala | 10 +-- src/library/scala/concurrent/Lock.scala | 2 +- src/library/scala/concurrent/SyncVar.scala | 4 +- .../scala/concurrent/forkjoin/package.scala | 20 ++--- src/library/scala/concurrent/package.scala | 14 ++-- src/library/scala/io/Position.scala | 2 +- src/library/scala/math/BigDecimal.scala | 16 ++-- src/library/scala/math/package.scala | 8 +- .../reflect/ClassManifestDeprecatedApis.scala | 24 +++--- src/library/scala/reflect/Manifest.scala | 4 +- src/library/scala/reflect/package.scala | 6 +- src/library/scala/runtime/RichException.scala | 2 +- src/library/scala/runtime/RichInt.scala | 4 +- src/library/scala/runtime/RichLong.scala | 4 +- .../scala/runtime/SeqCharSequence.scala | 2 +- src/library/scala/runtime/StringAdd.scala | 2 +- src/library/scala/runtime/StringFormat.scala | 2 +- src/library/scala/sys/SystemProperties.scala | 2 +- .../scala/sys/process/ProcessBuilder.scala | 16 ++-- src/library/scala/text/Document.scala | 16 ++-- src/library/scala/util/MurmurHash.scala | 4 +- src/library/scala/util/matching/Regex.scala | 8 +- .../scala/reflect/api/Annotations.scala | 58 +++++++------- src/reflect/scala/reflect/api/FlagSets.scala | 2 +- src/reflect/scala/reflect/api/Internals.scala | 76 +++++++++---------- src/reflect/scala/reflect/api/Names.scala | 12 +-- src/reflect/scala/reflect/api/Position.scala | 32 ++++---- .../scala/reflect/api/StandardNames.scala | 4 +- src/reflect/scala/reflect/api/Symbols.scala | 12 +-- src/reflect/scala/reflect/api/Trees.scala | 54 ++++++------- src/reflect/scala/reflect/api/Types.scala | 38 +++++----- .../reflect/internal/AnnotationCheckers.scala | 6 +- .../scala/reflect/internal/Definitions.scala | 4 +- .../scala/reflect/internal/Flags.scala | 4 +- .../scala/reflect/internal/HasFlags.scala | 4 +- .../scala/reflect/internal/Mirrors.scala | 2 +- .../scala/reflect/internal/Scopes.scala | 2 +- .../scala/reflect/internal/StdNames.scala | 16 ++-- .../scala/reflect/internal/SymbolTable.scala | 4 +- .../scala/reflect/internal/Symbols.scala | 12 +-- .../scala/reflect/internal/Trees.scala | 6 +- .../scala/reflect/internal/Types.scala | 2 +- .../internal/annotations/package.scala | 4 +- .../scala/reflect/internal/tpe/TypeMaps.scala | 2 +- .../reflect/internal/util/Position.scala | 6 +- .../scala/reflect/macros/ExprUtils.scala | 26 +++---- src/reflect/scala/reflect/macros/Names.scala | 6 +- src/reflect/scala/reflect/macros/Typers.scala | 4 +- .../scala/reflect/macros/Universe.scala | 44 +++++------ .../scala/reflect/macros/package.scala | 4 +- .../neg/classmanifests_new_deprecations.check | 16 ++-- test/files/neg/delayed-init-ref.check | 3 +- test/files/neg/names-defaults-neg-warn.check | 4 +- test/files/neg/names-defaults-neg.check | 2 +- test/files/neg/t6406-regextract.check | 2 +- test/files/run/t3235-minimal.check | 8 +- 117 files changed, 484 insertions(+), 490 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 063cfd38054e..d7405c28d532 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -573,7 +573,7 @@ trait NamesDefaults { self: Analyzer => } def checkAltName = { val res = checkDeprecation(false) - if (res) warn(s"the parameter name $name is deprecated$since: Use ${param.name} instead") + if (res) warn(s"the parameter name $name is deprecated$since: use ${param.name} instead") res } !param.isSynthetic && (checkName || checkAltName) diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala index 62245322daec..52ef9ca60f2e 100644 --- a/src/library/scala/App.scala +++ b/src/library/scala/App.scala @@ -59,7 +59,7 @@ trait App extends DelayedInit { * themselves define a `delayedInit` method. * @param body the initialization code to be stored for later execution */ - @deprecated("The delayedInit mechanism will disappear.", "2.11.0") + @deprecated("the delayedInit mechanism will disappear", "2.11.0") override def delayedInit(body: => Unit) { initCode += (() => body) } diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala index 0b079aae1590..bc702cfaad41 100644 --- a/src/library/scala/Console.scala +++ b/src/library/scala/Console.scala @@ -287,54 +287,54 @@ private[scala] abstract class DeprecatedConsole { protected def setErrDirect(err: PrintStream): Unit protected def setInDirect(in: BufferedReader): Unit - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean = StdIn.readBoolean() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte = StdIn.readByte() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readChar(): Char = StdIn.readChar() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double = StdIn.readDouble() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float = StdIn.readFloat() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readInt(): Int = StdIn.readInt() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*) - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readLong(): Long = StdIn.readLong() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readShort(): Short = StdIn.readShort() - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any] = StdIn.readf(format) - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any = StdIn.readf1(format) - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any) = StdIn.readf2(format) - @deprecated("Use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any) = StdIn.readf3(format) + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readBoolean(): Boolean = StdIn.readBoolean() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readByte(): Byte = StdIn.readByte() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readChar(): Char = StdIn.readChar() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readDouble(): Double = StdIn.readDouble() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readFloat(): Float = StdIn.readFloat() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readInt(): Int = StdIn.readInt() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(): String = StdIn.readLine() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLine(text: String, args: Any*): String = StdIn.readLine(text, args: _*) + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readLong(): Long = StdIn.readLong() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readShort(): Short = StdIn.readShort() + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf(format: String): List[Any] = StdIn.readf(format) + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf1(format: String): Any = StdIn.readf1(format) + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf2(format: String): (Any, Any) = StdIn.readf2(format) + @deprecated("use the method in scala.io.StdIn", "2.11.0") def readf3(format: String): (Any, Any, Any) = StdIn.readf3(format) /** Sets the default output stream. * * @param out the new output stream. */ - @deprecated("Use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out) + @deprecated("use withOut", "2.11.0") def setOut(out: PrintStream): Unit = setOutDirect(out) /** Sets the default output stream. * * @param out the new output stream. */ - @deprecated("Use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out)) + @deprecated("use withOut", "2.11.0") def setOut(out: OutputStream): Unit = setOutDirect(new PrintStream(out)) /** Sets the default error stream. * * @param err the new error stream. */ - @deprecated("Use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err) + @deprecated("use withErr", "2.11.0") def setErr(err: PrintStream): Unit = setErrDirect(err) /** Sets the default error stream. * * @param err the new error stream. */ - @deprecated("Use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err)) + @deprecated("use withErr", "2.11.0") def setErr(err: OutputStream): Unit = setErrDirect(new PrintStream(err)) /** Sets the default input stream. * * @param reader specifies the new input stream. */ - @deprecated("Use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader)) + @deprecated("use withIn", "2.11.0") def setIn(reader: Reader): Unit = setInDirect(new BufferedReader(reader)) /** Sets the default input stream. * * @param in the new input stream. */ - @deprecated("Use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in))) + @deprecated("use withIn", "2.11.0") def setIn(in: InputStream): Unit = setInDirect(new BufferedReader(new InputStreamReader(in))) } diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala index 7f976b073f37..8dc841a7e38c 100644 --- a/src/library/scala/DelayedInit.scala +++ b/src/library/scala/DelayedInit.scala @@ -43,7 +43,7 @@ package scala * * @author Martin Odersky */ -@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue.\nSee the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0") +@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0") trait DelayedInit { def delayedInit(x: => Unit): Unit -} \ No newline at end of file +} diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala index 7bd12a2719a0..f28897c20bd3 100644 --- a/src/library/scala/Function.scala +++ b/src/library/scala/Function.scala @@ -72,7 +72,7 @@ object Function { * @note These functions are slotted for deprecation, but it is on * hold pending superior type inference for tupling anonymous functions. */ - // @deprecated("Use `f.tupled` instead") + // @deprecated("use `f.tupled` instead") def tupled[a1, a2, b](f: (a1, a2) => b): Tuple2[a1, a2] => b = { case Tuple2(x1, x2) => f(x1, x2) } @@ -80,7 +80,7 @@ object Function { /** Tupling for functions of arity 3. This transforms a function * of arity 3 into a unary function that takes a triple of arguments. */ - // @deprecated("Use `f.tupled` instead") + // @deprecated("use `f.tupled` instead") def tupled[a1, a2, a3, b](f: (a1, a2, a3) => b): Tuple3[a1, a2, a3] => b = { case Tuple3(x1, x2, x3) => f(x1, x2, x3) } @@ -88,7 +88,7 @@ object Function { /** Tupling for functions of arity 4. This transforms a function * of arity 4 into a unary function that takes a 4-tuple of arguments. */ - // @deprecated("Use `f.tupled` instead") + // @deprecated("use `f.tupled` instead") def tupled[a1, a2, a3, a4, b](f: (a1, a2, a3, a4) => b): Tuple4[a1, a2, a3, a4] => b = { case Tuple4(x1, x2, x3, x4) => f(x1, x2, x3, x4) } @@ -96,7 +96,7 @@ object Function { /** Tupling for functions of arity 5. This transforms a function * of arity 5 into a unary function that takes a 5-tuple of arguments. */ - // @deprecated("Use `f.tupled` instead") + // @deprecated("use `f.tupled` instead") def tupled[a1, a2, a3, a4, a5, b](f: (a1, a2, a3, a4, a5) => b): Tuple5[a1, a2, a3, a4, a5] => b = { case Tuple5(x1, x2, x3, x4, x5) => f(x1, x2, x3, x4, x5) } diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala index 3cbe9ed4ac09..6a9be79281ad 100644 --- a/src/library/scala/NotNull.scala +++ b/src/library/scala/NotNull.scala @@ -13,5 +13,5 @@ package scala * @since 2.5 */ -@deprecated("This trait will be removed", "2.11.0") +@deprecated("this trait will be removed", "2.11.0") trait NotNull extends Any {} diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 216f6663b5d5..5f1a6b0bbbd5 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -153,31 +153,31 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { // Manifest types, companions, and incantations for summoning @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") - @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") + @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") type ClassManifest[T] = scala.reflect.ClassManifest[T] // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") type OptManifest[T] = scala.reflect.OptManifest[T] @annotation.implicitNotFound(msg = "No Manifest available for ${T}.") // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") type Manifest[T] = scala.reflect.Manifest[T] - @deprecated("Use `scala.reflect.ClassTag` instead", "2.10.0") + @deprecated("use `scala.reflect.ClassTag` instead", "2.10.0") val ClassManifest = scala.reflect.ClassManifest // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + // @deprecated("use `scala.reflect.ClassTag` (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") val Manifest = scala.reflect.Manifest // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") val NoManifest = scala.reflect.NoManifest // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") + // @deprecated("use scala.reflect.classTag[T] and scala.reflect.runtime.universe.typeTag[T] instead", "2.10.0") def manifest[T](implicit m: Manifest[T]) = m - @deprecated("Use scala.reflect.classTag[T] instead", "2.10.0") + @deprecated("use scala.reflect.classTag[T] instead", "2.10.0") def classManifest[T](implicit m: ClassManifest[T]) = m // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("This notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") + // @deprecated("this notion doesn't have a corresponding concept in 2.10, because scala.reflect.runtime.universe.TypeTag can capture arbitrary types. Use type tags instead of manifests, and there will be no need in opt manifests.", "2.10.0") def optManifest[T](implicit m: OptManifest[T]) = m // Minor variations on identity functions @@ -285,17 +285,17 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { // tupling ------------------------------------------------------------ - @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0") + @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") type Pair[+A, +B] = Tuple2[A, B] - @deprecated("Use built-in tuple syntax or Tuple2 instead", "2.11.0") + @deprecated("use built-in tuple syntax or Tuple2 instead", "2.11.0") object Pair { def apply[A, B](x: A, y: B) = Tuple2(x, y) def unapply[A, B](x: Tuple2[A, B]): Option[Tuple2[A, B]] = Some(x) } - @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0") + @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") type Triple[+A, +B, +C] = Tuple3[A, B, C] - @deprecated("Use built-in tuple syntax or Tuple3 instead", "2.11.0") + @deprecated("use built-in tuple syntax or Tuple3 instead", "2.11.0") object Triple { def apply[A, B, C](x: A, y: B, z: C) = Tuple3(x, y, z) def unapply[A, B, C](x: Tuple3[A, B, C]): Option[Tuple3[A, B, C]] = Some(x) @@ -334,7 +334,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { implicit final class RichException(private val self: Throwable) extends AnyVal { import scala.compat.Platform.EOL - @deprecated("Use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) + @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) } /** @group implicit-classes-char */ @@ -508,7 +508,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { /** @group type-constraints */ implicit def $conforms[A]: A <:< A = singleton_<:<.asInstanceOf[A <:< A] - @deprecated("Use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") + @deprecated("use `implicitly[T <:< U]` or `identity` instead.", "2.11.0") def conforms[A]: A <:< A = $conforms[A] /** An instance of `A =:= B` witnesses that the types `A` and `B` are equal. @@ -542,27 +542,27 @@ private[scala] trait DeprecatedPredef { self: Predef.type => // Deprecated stubs for any who may have been calling these methods directly. - @deprecated("Use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) - @deprecated("Use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) - @deprecated("Use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) - @deprecated("Use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) - @deprecated("Use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) - @deprecated("Use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) - - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) - @deprecated("Use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) + @deprecated("use `ArrowAssoc`", "2.11.0") def any2ArrowAssoc[A](x: A): ArrowAssoc[A] = new ArrowAssoc(x) + @deprecated("use `Ensuring`", "2.11.0") def any2Ensuring[A](x: A): Ensuring[A] = new Ensuring(x) + @deprecated("use `StringFormat`", "2.11.0") def any2stringfmt(x: Any): StringFormat[Any] = new StringFormat(x) + @deprecated("use `Throwable` directly", "2.11.0") def exceptionWrapper(exc: Throwable) = new RichException(exc) + @deprecated("use `SeqCharSequence`", "2.11.0") def seqToCharSequence(xs: scala.collection.IndexedSeq[Char]): CharSequence = new SeqCharSequence(xs) + @deprecated("use `ArrayCharSequence`", "2.11.0") def arrayToCharSequence(xs: Array[Char]): CharSequence = new ArrayCharSequence(xs) + + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(): String = StdIn.readLine() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLine(text: String, args: Any*) = StdIn.readLine(text, args: _*) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readBoolean() = StdIn.readBoolean() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readByte() = StdIn.readByte() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readShort() = StdIn.readShort() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readChar() = StdIn.readChar() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readInt() = StdIn.readInt() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readLong() = StdIn.readLong() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readFloat() = StdIn.readFloat() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readDouble() = StdIn.readDouble() + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf(format: String) = StdIn.readf(format) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf1(format: String) = StdIn.readf1(format) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf2(format: String) = StdIn.readf2(format) + @deprecated("use the method in `scala.io.StdIn`", "2.11.0") def readf3(format: String) = StdIn.readf3(format) } /** The `LowPriorityImplicits` class provides implicit values that diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala index 8a658e252ad7..eb8260dc9ab8 100644 --- a/src/library/scala/Responder.scala +++ b/src/library/scala/Responder.scala @@ -18,7 +18,7 @@ package scala * @see class Responder * @since 2.1 */ -@deprecated("This object will be removed", "2.11.0") +@deprecated("this object will be removed", "2.11.0") object Responder { /** Creates a responder that answer continuations with the constant `a`. @@ -59,7 +59,7 @@ object Responder { * @version 1.0 * @since 2.1 */ -@deprecated("This class will be removed", "2.11.0") +@deprecated("this class will be removed", "2.11.0") abstract class Responder[+A] extends Serializable { def respond(k: A => Unit): Unit diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala index 9f25e2beb3b7..c0c6dba42439 100644 --- a/src/library/scala/annotation/bridge.scala +++ b/src/library/scala/annotation/bridge.scala @@ -10,5 +10,5 @@ package scala.annotation /** If this annotation is present on a method, it will be treated as a bridge method. */ -@deprecated("Reconsider whether using this annotation will accomplish anything", "2.10.0") +@deprecated("reconsider whether using this annotation will accomplish anything", "2.10.0") private[scala] class bridge extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala index 97aa830c5a0b..5f4d69c4117c 100644 --- a/src/library/scala/collection/IterableProxy.scala +++ b/src/library/scala/collection/IterableProxy.scala @@ -16,5 +16,5 @@ package collection * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") trait IterableProxy[+A] extends Iterable[A] with IterableProxyLike[A, Iterable[A]] diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala index 334b511fb9ef..f87089cba832 100644 --- a/src/library/scala/collection/IterableProxyLike.scala +++ b/src/library/scala/collection/IterableProxyLike.scala @@ -22,7 +22,7 @@ import generic._ * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait IterableProxyLike[+A, +Repr <: IterableLike[A, Repr] with Iterable[A]] extends IterableLike[A, Repr] with TraversableProxyLike[A, Repr] { diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala index 26a7c710ee43..2faf6899734f 100644 --- a/src/library/scala/collection/MapProxy.scala +++ b/src/library/scala/collection/MapProxy.scala @@ -17,5 +17,5 @@ package collection * @version 1.0, 21/07/2003 * @since 1 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala index dd80a538e357..73a69357882d 100644 --- a/src/library/scala/collection/MapProxyLike.scala +++ b/src/library/scala/collection/MapProxyLike.scala @@ -18,7 +18,7 @@ package collection * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait MapProxyLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] extends MapLike[A, B, This] with IterableProxyLike[(A, B), This] diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala index f728ba858542..f2b39c7b55ff 100644 --- a/src/library/scala/collection/SeqProxy.scala +++ b/src/library/scala/collection/SeqProxy.scala @@ -18,5 +18,5 @@ package collection * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait SeqProxy[+A] extends Seq[A] with SeqProxyLike[A, Seq[A]] diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala index b01d227d10a9..b493c707968e 100644 --- a/src/library/scala/collection/SeqProxyLike.scala +++ b/src/library/scala/collection/SeqProxyLike.scala @@ -23,7 +23,7 @@ import generic._ * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait SeqProxyLike[+A, +Repr <: SeqLike[A, Repr] with Seq[A]] extends SeqLike[A, Repr] with IterableProxyLike[A, Repr] { override def size = self.size override def toSeq: Seq[A] = self.toSeq diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala index e17fb215b972..4a3fc17a78b1 100644 --- a/src/library/scala/collection/SetProxy.scala +++ b/src/library/scala/collection/SetProxy.scala @@ -17,5 +17,5 @@ package collection * @author Martin Odersky * @version 2.0, 01/01/2007 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala index 4cd215cd89eb..fa23fe545026 100644 --- a/src/library/scala/collection/SetProxyLike.scala +++ b/src/library/scala/collection/SetProxyLike.scala @@ -17,7 +17,7 @@ package collection * @author Martin Odersky * @version 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This] { def empty: This override def contains(elem: A): Boolean = self.contains(elem) diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala index 9eec685d101d..0c7219c5f943 100644 --- a/src/library/scala/collection/TraversableProxy.scala +++ b/src/library/scala/collection/TraversableProxy.scala @@ -21,5 +21,5 @@ package collection * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.3") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") trait TraversableProxy[+A] extends Traversable[A] with TraversableProxyLike[A, Traversable[A]] diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala index fa470ea238d7..c8b641f88bab 100644 --- a/src/library/scala/collection/TraversableProxyLike.scala +++ b/src/library/scala/collection/TraversableProxyLike.scala @@ -24,7 +24,7 @@ import scala.reflect.ClassTag * @version 2.8 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait TraversableProxyLike[+A, +Repr <: TraversableLike[A, Repr] with Traversable[A]] extends TraversableLike[A, Repr] with Proxy { def self: Repr diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 5dc01547e6c9..769d7b0dacbf 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -639,7 +639,7 @@ extends scala.collection.concurrent.Map[K, V] private var rootupdater = rtupd def hashing = hashingobj def equality = equalityobj - @deprecated("This field will be made private", "2.12.0") + @deprecated("this field will be made private", "2.12.0") @volatile /*private*/ var root = r def this(hashf: Hashing[K], ef: Equiv[K]) = this( @@ -684,13 +684,13 @@ extends scala.collection.concurrent.Map[K, V] } while (obj != TrieMapSerializationEnd) } - @deprecated("This method will be made private", "2.12.0") + @deprecated("this method will be made private", "2.12.0") /*private*/ def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) - @deprecated("This method will be made private", "2.12.0") + @deprecated("this method will be made private", "2.12.0") /*private[collection]*/ def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) - @deprecated("This method will be made private", "2.12.0") + @deprecated("this method will be made private", "2.12.0") /*private[concurrent]*/ def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { val r = /*READ*/root r match { @@ -1085,7 +1085,7 @@ private[collection] class TrieMapIterator[K, V](var level: Int, private var ct: Seq(this) } - @deprecated("This method will be removed", "2.12.0") + @deprecated("this method will be removed", "2.12.0") def printDebug() { println("ctrie iterator") println(stackpos.mkString(",")) diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala index 7387dbe66745..7f6eb6e131be 100644 --- a/src/library/scala/collection/generic/IterableForwarder.scala +++ b/src/library/scala/collection/generic/IterableForwarder.scala @@ -26,7 +26,7 @@ import scala.collection._ * @version 2.8 * @since 2.8 */ -@deprecated("Forwarding is inherently unreliable since it is not automated and methods can be forgotten.", "2.11.0") +@deprecated("forwarding is inherently unreliable since it is not automated and methods can be forgotten", "2.11.0") trait IterableForwarder[+A] extends Iterable[A] with TraversableForwarder[A] { /** The iterable object to which calls are forwarded */ diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala index e21e2ea0168d..cee93d2ddbcf 100644 --- a/src/library/scala/collection/generic/SeqForwarder.scala +++ b/src/library/scala/collection/generic/SeqForwarder.scala @@ -25,7 +25,7 @@ import scala.collection.immutable.Range * @version 2.8 * @since 2.8 */ -@deprecated("Forwarding is inherently unreliable since it is not automated and new methods can be forgotten.", "2.11.0") +@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") trait SeqForwarder[+A] extends Seq[A] with IterableForwarder[A] { protected override def underlying: Seq[A] diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala index 359ea402b6d3..b94507d6ef5b 100644 --- a/src/library/scala/collection/generic/TraversableForwarder.scala +++ b/src/library/scala/collection/generic/TraversableForwarder.scala @@ -27,7 +27,7 @@ import scala.reflect.ClassTag * @version 2.8 * @since 2.8 */ -@deprecated("Forwarding is inherently unreliable since it is not automated and new methods can be forgotten.", "2.11.0") +@deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") trait TraversableForwarder[+A] extends Traversable[A] { /** The traversable object to which calls are forwarded. */ protected def underlying: Traversable[A] diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala index d126b9e7a6c5..0d1c17d4b337 100644 --- a/src/library/scala/collection/immutable/MapProxy.scala +++ b/src/library/scala/collection/immutable/MapProxy.scala @@ -23,7 +23,7 @@ package immutable * @version 2.0, 31/12/2006 * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] { override def repr = this private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] = diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index fab5ad47ebf8..01854b17978a 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -22,7 +22,7 @@ import scala.reflect.ClassTag * `fromIterator` and `fromIterable` provide generalised instances of `PagedSeq` * @since 2.7 */ -@deprecated("This object will be moved to the scala-parser-combinators module", "2.11.8") +@deprecated("this object will be moved to the scala-parser-combinators module", "2.11.8") object PagedSeq { final val UndeterminedEnd = Int.MaxValue @@ -126,7 +126,7 @@ import PagedSeq._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@deprecated("This class will be moved to the scala-parser-combinators module", "2.11.8") +@deprecated("this class will be moved to the scala-parser-combinators module", "2.11.8") class PagedSeq[T: ClassTag] protected( more: (Array[T], Int, Int) => Int, first1: Page[T], diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 47be5b507ebc..36e2fa25dd2e 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -33,7 +33,7 @@ import scala.collection.parallel.immutable.ParRange * `init`) are also permitted on overfull ranges. * * @param start the start of this range. - * @param end the end of the range. For exclusive ranges, e.g. + * @param end the end of the range. For exclusive ranges, e.g. * `Range(0,3)` or `(0 until 3)`, this is one * step past the last one in the range. For inclusive * ranges, e.g. `Range.inclusive(0,3)` or `(0 to 3)`, @@ -80,7 +80,7 @@ extends scala.collection.AbstractSeq[Int] || (start < end && step < 0) || (start == end && !isInclusive) ) - @deprecated("This method will be made private, use `length` instead.", "2.11") + @deprecated("this method will be made private, use `length` instead.", "2.11") final val numRangeElements: Int = { if (step == 0) throw new IllegalArgumentException("step cannot be 0.") else if (isEmpty) 0 @@ -90,8 +90,8 @@ extends scala.collection.AbstractSeq[Int] else len.toInt } } - @deprecated("This method will be made private, use `last` instead.", "2.11") - final val lastElement = + @deprecated("this method will be made private, use `last` instead.", "2.11") + final val lastElement = if (isEmpty) start - step else step match { case 1 => if (isInclusive) end else end-1 @@ -102,8 +102,8 @@ extends scala.collection.AbstractSeq[Int] else if (isInclusive) end else end - step } - - @deprecated("This method will be made private.", "2.11") + + @deprecated("this method will be made private.", "2.11") final val terminalElement = lastElement + step /** The last element of this range. This method will return the correct value @@ -197,7 +197,7 @@ extends scala.collection.AbstractSeq[Int] copy(locationAfterN(n), end, step) } ) - + /** Creates a new range containing the elements starting at `from` up to but not including `until`. * * $doesNotUseBuilders @@ -214,7 +214,7 @@ extends scala.collection.AbstractSeq[Int] if (from >= until) newEmptyRange(fromValue) else new Range.Inclusive(fromValue, locationAfterN(until-1), step) } - + /** Creates a new range containing all the elements of this range except the last one. * * $doesNotUseBuilders diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala index d505185e1d65..e1cf3620a256 100644 --- a/src/library/scala/collection/immutable/SetProxy.scala +++ b/src/library/scala/collection/immutable/SetProxy.scala @@ -22,7 +22,7 @@ package immutable * * @since 2.8 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] { override def repr = this private def newProxy[B >: A](newSelf: Set[B]): SetProxy[B] = diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala index 1c28093b2c51..02bdadb5dd11 100644 --- a/src/library/scala/collection/immutable/Stack.scala +++ b/src/library/scala/collection/immutable/Stack.scala @@ -46,7 +46,7 @@ object Stack extends SeqFactory[Stack] { * @define willNotTerminateInf */ @SerialVersionUID(1976480595012942526L) -@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0") +@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use List instead: stack push x becomes x :: list; stack.pop is list.tail.", "2.11.0") class Stack[+A] protected (protected val elems: List[A]) extends AbstractSeq[A] with LinearSeq[A] diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 3c7507f480da..b468b09a9d1a 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -133,7 +133,7 @@ self => * end characters; i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. */ - @deprecated("Use `lines` instead.","2.11.0") + @deprecated("use `lines` instead","2.11.0") def linesIterator: Iterator[String] = linesWithSeparators map (line => new WrappedString(line).stripLineEnd) diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index 98c9771a056e..c78d59297be9 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -187,7 +187,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] * * @param cmd the message to send. */ - @deprecated("Scripting is deprecated.", "2.11.0") + @deprecated("scripting is deprecated", "2.11.0") def <<(cmd: Message[A]): Unit = cmd match { case Include(Start, x) => prepend(x) case Include(End, x) => append(x) diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index 2d52831d37ac..60f0e297466d 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -26,7 +26,7 @@ import script._ * @define Coll `BufferProxy` * @define coll buffer proxy */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait BufferProxy[A] extends Buffer[A] with Proxy { def self: Buffer[A] @@ -130,7 +130,7 @@ trait BufferProxy[A] extends Buffer[A] with Proxy { * * @param cmd the message to send. */ - @deprecated("Scripting is deprecated.", "2.11.0") + @deprecated("scripting is deprecated", "2.11.0") override def <<(cmd: Message[A]) { self << cmd } /** Return a clone of this buffer. diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala index 00886205403d..7f832c0766a9 100644 --- a/src/library/scala/collection/mutable/DefaultMapModel.scala +++ b/src/library/scala/collection/mutable/DefaultMapModel.scala @@ -19,7 +19,7 @@ package mutable * @version 1.0, 08/07/2003 * @since 1 */ -@deprecated("This trait will be removed.", "2.11.0") +@deprecated("this trait will be removed", "2.11.0") trait DefaultMapModel[A, B] extends Map[A, B] { type Entry = DefaultEntry[A, B] diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index fd95e74fbcb2..537cebd90342 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -41,7 +41,7 @@ import generic._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") +@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") @SerialVersionUID(-8144992287952814767L) class DoubleLinkedList[A]() extends AbstractSeq[A] with LinearSeq[A] @@ -78,7 +78,7 @@ class DoubleLinkedList[A]() extends AbstractSeq[A] * @define coll double linked list * @define Coll `DoubleLinkedList` */ -@deprecated("Low-level linked lists are deprecated.", "2.11.0") +@deprecated("low-level linked lists are deprecated", "2.11.0") object DoubleLinkedList extends SeqFactory[DoubleLinkedList] { /** $genericCanBuildFromInfo */ implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, DoubleLinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala index aafe34f50a60..e85ef05319ca 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala @@ -56,7 +56,7 @@ import scala.annotation.migration * @define Coll `DoubleLinkedList` * @define coll double linked list */ -@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") +@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") trait DoubleLinkedListLike[A, This <: Seq[A] with DoubleLinkedListLike[A, This]] extends SeqLike[A, This] with LinkedListLike[A, This] { self => /** A reference to the node in the linked list preceding the current node. */ diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala index 9ece8b133584..7ab4dd2d9df2 100644 --- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala @@ -25,7 +25,7 @@ import scala.annotation.migration * @version 2.0, 01/01/2007 * @since 1 */ -@deprecated("Adaptors are inherently unreliable and prone to performance problems.", "2.11.0") +@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") class ImmutableMapAdaptor[A, B](protected var imap: immutable.Map[A, B]) extends AbstractMap[A, B] with Map[A, B] diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala index d7eec70b15f7..aa21c4cc112d 100644 --- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala @@ -20,7 +20,7 @@ package mutable * @version 1.0, 21/07/2003 * @since 1 */ -@deprecated("Adaptors are inherently unreliable and prone to performance problems.", "2.11.0") +@deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") class ImmutableSetAdaptor[A](protected var set: immutable.Set[A]) extends AbstractSet[A] with Set[A] diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala index b3500367af1a..5d03cd44102e 100644 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ b/src/library/scala/collection/mutable/LinkedList.scala @@ -76,7 +76,7 @@ import generic._ * }}} */ @SerialVersionUID(-7308240733518833071L) -@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") +@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") class LinkedList[A]() extends AbstractSeq[A] with LinearSeq[A] with GenericTraversableTemplate[A, LinkedList] @@ -114,7 +114,7 @@ class LinkedList[A]() extends AbstractSeq[A] * @define Coll `LinkedList` * @define coll linked list */ -@deprecated("Low-level linked lists are deprecated.", "2.11.0") +@deprecated("low-level linked lists are deprecated", "2.11.0") object LinkedList extends SeqFactory[LinkedList] { override def empty[A]: LinkedList[A] = new LinkedList[A] implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, LinkedList[A]] = ReusableCBF.asInstanceOf[GenericCanBuildFrom[A]] diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index d0748b8a9f73..27c4466c9968 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -55,7 +55,7 @@ import scala.annotation.tailrec * * }}} */ -@deprecated("Low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features.", "2.11.0") +@deprecated("low-level linked lists are deprecated due to idiosyncrasies in interface and incomplete features", "2.11.0") trait LinkedListLike[A, This <: Seq[A] with LinkedListLike[A, This]] extends SeqLike[A, This] { self => var elem: A = _ diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala index 552cd9769b8f..63b14d328a94 100644 --- a/src/library/scala/collection/mutable/MapProxy.scala +++ b/src/library/scala/collection/mutable/MapProxy.scala @@ -20,7 +20,7 @@ package mutable * @version 2.0, 31/12/2006 * @since 1 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait MapProxy[A, B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] { private def newProxy[B1 >: B](newSelf: Map[A, B1]): MapProxy[A, B1] = new MapProxy[A, B1] { val self = newSelf } diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala index 9c3247f83b61..53d26f4c6f00 100644 --- a/src/library/scala/collection/mutable/ObservableBuffer.scala +++ b/src/library/scala/collection/mutable/ObservableBuffer.scala @@ -23,9 +23,8 @@ import script._ * @version 1.0, 08/07/2003 * @since 1 */ -@deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0") -trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable] -{ +@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") +trait ObservableBuffer[A] extends Buffer[A] with Publisher[Message[A] with Undoable] { type Pub <: ObservableBuffer[A] abstract override def +=(element: A): this.type = { diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala index 7509b725688b..421302b70036 100644 --- a/src/library/scala/collection/mutable/ObservableMap.scala +++ b/src/library/scala/collection/mutable/ObservableMap.scala @@ -25,9 +25,8 @@ import script._ * @version 2.0, 31/12/2006 * @since 1 */ -@deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0") -trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable] -{ +@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") +trait ObservableMap[A, B] extends Map[A, B] with Publisher[Message[(A, B)] with Undoable] { type Pub <: ObservableMap[A, B] diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala index 19b4a5e39f3f..eb55a1f822a5 100644 --- a/src/library/scala/collection/mutable/ObservableSet.scala +++ b/src/library/scala/collection/mutable/ObservableSet.scala @@ -23,9 +23,8 @@ import script._ * @version 1.0, 08/07/2003 * @since 1 */ -@deprecated("Observables are deprecated because scripting is deprecated.", "2.11.0") -trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] -{ +@deprecated("observables are deprecated because scripting is deprecated", "2.11.0") +trait ObservableSet[A] extends Set[A] with Publisher[Message[A] with Undoable] { type Pub <: ObservableSet[A] diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index a6c0fc207791..107a2bfa0e88 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -360,10 +360,8 @@ object PriorityQueue extends OrderedTraversableFactory[PriorityQueue] { * @version 1.0, 03/05/2004 * @since 1 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") -sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A] - with Proxy -{ +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") +sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends PriorityQueue[A] with Proxy { def self: PriorityQueue[A] /** Creates a new iterator over all elements contained in this @@ -449,7 +447,7 @@ sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends P * @define Coll `SynchronizedPriorityQueue` * @define coll synchronized priority queue */ -@deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0") +@deprecated("Comprehensive synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentSkipListSet as an alternative.", "2.11.0") sealed class SynchronizedPriorityQueue[A](implicit ord: Ordering[A]) extends PriorityQueue[A] { /** Checks if the queue is empty. diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index ad60173b641c..fd5fe9aecc9c 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -143,7 +143,7 @@ extends MutableList[A] /** Return the proper suffix of this list which starts with the first element that satisfies `p`. * That element is unlinked from the list. If no element satisfies `p`, return None. */ - @deprecated("extractFirst inappropriately exposes implementation details. Use dequeue or dequeueAll.", "2.11.0") + @deprecated("extractFirst inappropriately exposes implementation details. Use dequeue or dequeueAll.", "2.11.0") def extractFirst(start: LinkedList[A], p: A => Boolean): Option[LinkedList[A]] = { if (isEmpty) None else { diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala index 22ff3306d541..e780cc2cf05f 100644 --- a/src/library/scala/collection/mutable/QueueProxy.scala +++ b/src/library/scala/collection/mutable/QueueProxy.scala @@ -21,7 +21,7 @@ package mutable * @version 1.1, 03/05/2004 * @since 1 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait QueueProxy[A] extends Queue[A] with Proxy { def self: Queue[A] diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index a19130e74207..0797a8315461 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -224,7 +224,7 @@ trait SetLike[A, +This <: SetLike[A, This] with Set[A]] * @throws UnsupportedOperationException * if the message was not understood. */ - @deprecated("Scripting is deprecated.", "2.11.0") + @deprecated("scripting is deprecated", "2.11.0") def <<(cmd: Message[A]): Unit = cmd match { case Include(_, x) => this += x case Remove(_, x) => this -= x diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala index 74279507ff12..43b6aa57af68 100644 --- a/src/library/scala/collection/mutable/SetProxy.scala +++ b/src/library/scala/collection/mutable/SetProxy.scala @@ -18,7 +18,7 @@ package mutable * @version 1.1, 09/05/2004 * @since 1 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] { override def repr = this override def empty = new SetProxy[A] { val self = SetProxy.this.self.empty } diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala index 81e63b05d27d..ac52bbba2197 100644 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ b/src/library/scala/collection/mutable/StackProxy.scala @@ -19,7 +19,7 @@ package mutable * @version 1.0, 10/05/2004 * @since 1 */ -@deprecated("Proxying is deprecated due to lack of use and compiler-level support.", "2.11.0") +@deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait StackProxy[A] extends Stack[A] with Proxy { def self: Stack[A] diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala index 8c646b0ce514..9c27f8b003f9 100644 --- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala @@ -25,7 +25,7 @@ import script._ * @define Coll `SynchronizedBuffer` * @define coll synchronized buffer */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") +@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") trait SynchronizedBuffer[A] extends Buffer[A] { import scala.collection.Traversable @@ -162,7 +162,7 @@ trait SynchronizedBuffer[A] extends Buffer[A] { super.clear() } - @deprecated("Scripting is deprecated.", "2.11.0") + @deprecated("scripting is deprecated", "2.11.0") override def <<(cmd: Message[A]): Unit = synchronized { super.<<(cmd) } diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala index 9876296ebe45..8618798dbd95 100644 --- a/src/library/scala/collection/mutable/SynchronizedMap.scala +++ b/src/library/scala/collection/mutable/SynchronizedMap.scala @@ -24,7 +24,7 @@ import scala.annotation.migration * @define Coll `SynchronizedMap` * @define coll synchronized map */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0") +@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap as an alternative.", "2.11.0") trait SynchronizedMap[A, B] extends Map[A, B] { abstract override def get(key: A): Option[B] = synchronized { super.get(key) } @@ -54,7 +54,7 @@ trait SynchronizedMap[A, B] extends Map[A, B] { override def contains(key: A): Boolean = synchronized {super.contains(key) } override def isDefinedAt(key: A) = synchronized { super.isDefinedAt(key) } - // @deprecated("See Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) } + // @deprecated("see Map.+ for explanation") override def +(kv: (A, B)): this.type = synchronized[this.type] { super.+(kv) } // can't override -, -- same type! // @deprecated override def -(key: A): Self = synchronized { super.-(key) } diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala index 48e40ab27f5a..ee44f07df214 100644 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala @@ -25,7 +25,7 @@ package mutable * @define Coll `SynchronizedQueue` * @define coll synchronized queue */ -@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") +@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.ConcurrentLinkedQueue as an alternative.", "2.11.0") class SynchronizedQueue[A] extends Queue[A] { /** Checks if the queue is empty. * diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index dd842f26cebf..399630eb3c72 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -24,7 +24,7 @@ import script._ * @define Coll `SynchronizedSet` * @define coll synchronized set */ -@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0") +@deprecated("Synchronization via traits is deprecated as it is inherently unreliable. Consider java.util.concurrent.ConcurrentHashMap[A,Unit] as an alternative.", "2.11.0") trait SynchronizedSet[A] extends Set[A] { abstract override def size: Int = synchronized { super.size @@ -94,7 +94,7 @@ trait SynchronizedSet[A] extends Set[A] { super.toString } - @deprecated("Scripting is deprecated.", "2.11.0") + @deprecated("scripting is deprecated", "2.11.0") override def <<(cmd: Message[A]): Unit = synchronized { super.<<(cmd) } diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala index c77a6fad620c..2954a1f768b6 100644 --- a/src/library/scala/collection/mutable/SynchronizedStack.scala +++ b/src/library/scala/collection/mutable/SynchronizedStack.scala @@ -25,7 +25,7 @@ package mutable * @define Coll `SynchronizedStack` * @define coll synchronized stack */ -@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0") +@deprecated("Synchronization via selective overriding of methods is inherently unreliable. Consider java.util.concurrent.LinkedBlockingDequeue instead.", "2.11.0") class SynchronizedStack[A] extends Stack[A] { /** Checks if the stack is empty. diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 2ed7bc075e07..2e60089df5d6 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -842,7 +842,7 @@ self: ParIterableLike[T, Repr, Sequential] => tasksupport.executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.resultWithTaskSupport }) } - @deprecated("Use .seq.view instead", "2.11.0") + @deprecated("use .seq.view instead", "2.11.0") def view = seq.view override def toArray[U >: T: ClassTag]: Array[U] = { diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala index 6ab694de04c5..728605af7b93 100644 --- a/src/library/scala/collection/parallel/TaskSupport.scala +++ b/src/library/scala/collection/parallel/TaskSupport.scala @@ -60,7 +60,7 @@ extends TaskSupport with AdaptiveWorkStealingForkJoinTasks * * @see [[scala.collection.parallel.TaskSupport]] for more information. */ -@deprecated("Use `ForkJoinTaskSupport` instead.", "2.11.0") +@deprecated("use `ForkJoinTaskSupport` instead", "2.11.0") class ThreadPoolTaskSupport(val environment: ThreadPoolExecutor = ThreadPoolTasks.defaultThreadPool) extends TaskSupport with AdaptiveWorkStealingThreadPoolTasks diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 2a4e40dd1617..f472c6be5c77 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -208,7 +208,7 @@ trait AdaptiveWorkStealingTasks extends Tasks { /** An implementation of tasks objects based on the Java thread pooling API. */ -@deprecated("Use `ForkJoinTasks` instead.", "2.11.0") +@deprecated("use `ForkJoinTasks` instead", "2.11.0") trait ThreadPoolTasks extends Tasks { import java.util.concurrent._ @@ -317,7 +317,7 @@ trait ThreadPoolTasks extends Tasks { } -@deprecated("Use `ForkJoinTasks` instead.", "2.11.0") +@deprecated("use `ForkJoinTasks` instead", "2.11.0") object ThreadPoolTasks { import java.util.concurrent._ @@ -445,7 +445,7 @@ trait AdaptiveWorkStealingForkJoinTasks extends ForkJoinTasks with AdaptiveWorkS def newWrappedTask[R, Tp](b: Task[R, Tp]) = new WrappedTask[R, Tp](b) } -@deprecated("Use `AdaptiveWorkStealingForkJoinTasks` instead.", "2.11.0") +@deprecated("use `AdaptiveWorkStealingForkJoinTasks` instead", "2.11.0") trait AdaptiveWorkStealingThreadPoolTasks extends ThreadPoolTasks with AdaptiveWorkStealingTasks { class WrappedTask[R, Tp](val body: Task[R, Tp]) @@ -523,7 +523,7 @@ private[parallel] final class FutureTasks(executor: ExecutionContext) extends Ta } /** This tasks implementation uses execution contexts to spawn a parallel computation. - * + * * As an optimization, it internally checks whether the execution context is the * standard implementation based on fork/join pools, and if it is, creates a * `ForkJoinTaskSupport` that shares the same pool to forward its request to it. @@ -537,7 +537,7 @@ trait ExecutionContextTasks extends Tasks { val environment: ExecutionContext /** A driver serves as a target for this proxy `Tasks` object. - * + * * If the execution context has the standard implementation and uses fork/join pools, * the driver is `ForkJoinTaskSupport` with the same pool, as an optimization. * Otherwise, the driver will be a Scala `Future`-based implementation. diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index ba64ca505b99..eaa87b675af8 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -114,9 +114,9 @@ package parallel { def ifParSeq[R](isbody: ParSeq[T] => R): Otherwise[R] } - @deprecated("This trait will be removed.", "2.11.0") + @deprecated("this trait will be removed", "2.11.0") trait ThrowableOps { - @deprecated("This method will be removed.", "2.11.0") + @deprecated("this method will be removed", "2.11.0") def alongWith(that: Throwable): Throwable } @@ -135,7 +135,7 @@ package parallel { } /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ - @deprecated("This class will be removed.", "2.11.0") + @deprecated("this class will be removed.", "2.11.0") final case class CompositeThrowable(throwables: Set[Throwable]) extends Exception( "Multiple exceptions thrown during a parallel computation: " + throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n") diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala index bed74bf9ca9f..8a0b10c331ee 100644 --- a/src/library/scala/collection/script/Location.scala +++ b/src/library/scala/collection/script/Location.scala @@ -18,17 +18,17 @@ package script * @since 2.8 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") sealed abstract class Location -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case object Start extends Location -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case object End extends Location -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case object NoLo extends Location -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case class Index(n: Int) extends Location diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala index 3fc2a0ec7e18..a6ba9d95233e 100644 --- a/src/library/scala/collection/script/Message.scala +++ b/src/library/scala/collection/script/Message.scala @@ -21,7 +21,7 @@ import mutable.ArrayBuffer * @version 1.0, 08/07/2003 * @since 2.8 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") trait Message[+A] /** This observable update refers to inclusion operations that add new elements @@ -30,7 +30,7 @@ trait Message[+A] * @author Matthias Zenger * @version 1.0, 08/07/2003 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case class Include[+A](location: Location, elem: A) extends Message[A] { def this(elem: A) = this(NoLo, elem) } @@ -41,7 +41,7 @@ case class Include[+A](location: Location, elem: A) extends Message[A] { * @author Matthias Zenger * @version 1.0, 08/07/2003 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case class Update[+A](location: Location, elem: A) extends Message[A] { def this(elem: A) = this(NoLo, elem) } @@ -52,7 +52,7 @@ case class Update[+A](location: Location, elem: A) extends Message[A] { * @author Matthias Zenger * @version 1.0, 08/07/2003 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case class Remove[+A](location: Location, elem: A) extends Message[A] { def this(elem: A) = this(NoLo, elem) } @@ -62,7 +62,7 @@ case class Remove[+A](location: Location, elem: A) extends Message[A] { * @author Matthias Zenger * @version 1.0, 08/07/2003 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") case class Reset[+A]() extends Message[A] /** Objects of this class represent compound messages consisting @@ -71,7 +71,7 @@ case class Reset[+A]() extends Message[A] * @author Matthias Zenger * @version 1.0, 10/05/2004 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") class Script[A] extends ArrayBuffer[Message[A]] with Message[A] { override def toString(): String = { diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala index 4db75ddd3ef6..8965286b0db0 100644 --- a/src/library/scala/collection/script/Scriptable.scala +++ b/src/library/scala/collection/script/Scriptable.scala @@ -17,7 +17,7 @@ package script * @version 1.0, 09/05/2004 * @since 2.8 */ -@deprecated("Scripting is deprecated.", "2.11.0") +@deprecated("scripting is deprecated", "2.11.0") trait Scriptable[A] { /** Send a message to this scriptable object. */ diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index e022b94ea8ef..f2c3284f9236 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -87,7 +87,7 @@ trait ExecutionContext { * constructed, so that it doesn't need any additional * preparation later. */ - @deprecated("Preparation of ExecutionContexts will be removed.", "2.12") + @deprecated("preparation of ExecutionContexts will be removed", "2.12") def prepare(): ExecutionContext = this } diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 8abd7feeb7ac..93e9fddcb3a8 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -274,8 +274,8 @@ trait Future[+T] extends Awaitable[T] { * val g = f map { x: String => x + " is now!" } * }}} * - * Note that a for comprehension involving a `Future` - * may expand to include a call to `map` and or `flatMap` + * Note that a for comprehension involving a `Future` + * may expand to include a call to `map` and or `flatMap` * and `withFilter`. See [[scala.concurrent.Future#flatMap]] for an example of such a comprehension. * * @@ -688,7 +688,7 @@ object Future { * @param p the predicate which indicates if it's a match * @return the `Future` holding the optional result of the search */ - @deprecated("Use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12") + @deprecated("use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12") def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { val futuresBuffer = futures.toBuffer if (futuresBuffer.isEmpty) successful[Option[T]](None) @@ -775,7 +775,7 @@ object Future { * @param op the fold operation to be applied to the zero and futures * @return the `Future` holding the result of the fold */ - @deprecated("Use Future.foldLeft instead", "2.12") + @deprecated("use Future.foldLeft instead", "2.12") def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { if (futures.isEmpty) successful(zero) else sequence(futures).map(_.foldLeft(zero)(op)) @@ -794,7 +794,7 @@ object Future { * @param op the reduce operation which is applied to the results of the futures * @return the `Future` holding the result of the reduce */ - @deprecated("Use Future.reduceLeft instead", "2.12") + @deprecated("use Future.reduceLeft instead", "2.12") def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection")) else sequence(futures).map(_ reduceLeft op) diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala index 8d18da2d387f..757fb94cc774 100644 --- a/src/library/scala/concurrent/Lock.scala +++ b/src/library/scala/concurrent/Lock.scala @@ -15,7 +15,7 @@ package scala.concurrent * @author Martin Odersky * @version 1.0, 10/03/2003 */ -@deprecated("Use java.util.concurrent.locks.Lock", "2.11.2") +@deprecated("use java.util.concurrent.locks.Lock", "2.11.2") class Lock { var available = true diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 127e6b58d269..5fabf553bd5c 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -91,7 +91,7 @@ class SyncVar[A] { // [Heather] the reason why: it doesn't take into consideration // whether or not the SyncVar is already defined. So, set has been // deprecated in order to eventually be able to make "setting" private - @deprecated("Use `put` instead, as `set` is potentially error-prone", "2.10.0") + @deprecated("use `put` instead, as `set` is potentially error-prone", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below def set(x: A): Unit = setVal(x) @@ -111,7 +111,7 @@ class SyncVar[A] { // [Heather] the reason why: it doesn't take into consideration // whether or not the SyncVar is already defined. So, unset has been // deprecated in order to eventually be able to make "unsetting" private - @deprecated("Use `take` instead, as `unset` is potentially error-prone", "2.10.0") + @deprecated("use `take` instead, as `unset` is potentially error-prone", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below def unset(): Unit = synchronized { isDefined = false diff --git a/src/library/scala/concurrent/forkjoin/package.scala b/src/library/scala/concurrent/forkjoin/package.scala index 7f4524fccf16..1915e25d7b8d 100644 --- a/src/library/scala/concurrent/forkjoin/package.scala +++ b/src/library/scala/concurrent/forkjoin/package.scala @@ -11,9 +11,9 @@ import java.util.{concurrent => juc} import java.util.Collection package object forkjoin { - @deprecated("Use java.util.concurrent.ForkJoinPool directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0") type ForkJoinPool = juc.ForkJoinPool - @deprecated("Use java.util.concurrent.ForkJoinPool directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ForkJoinPool directly, instead of this alias", "2.12.0") object ForkJoinPool { type ForkJoinWorkerThreadFactory = juc.ForkJoinPool.ForkJoinWorkerThreadFactory type ManagedBlocker = juc.ForkJoinPool.ManagedBlocker @@ -22,9 +22,9 @@ package object forkjoin { def managedBlock(blocker: ManagedBlocker): Unit = juc.ForkJoinPool.managedBlock(blocker) } - @deprecated("Use java.util.concurrent.ForkJoinTask directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0") type ForkJoinTask[T] = juc.ForkJoinTask[T] - @deprecated("Use java.util.concurrent.ForkJoinTask directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ForkJoinTask directly, instead of this alias", "2.12.0") object ForkJoinTask { def adapt(runnable: Runnable): ForkJoinTask[_] = juc.ForkJoinTask.adapt(runnable) def adapt[T](callable: juc.Callable[_ <: T]): ForkJoinTask[T] = juc.ForkJoinTask.adapt(callable) @@ -39,18 +39,18 @@ package object forkjoin { def invokeAll[T](tasks: ForkJoinTask[T]*): Unit = juc.ForkJoinTask.invokeAll(tasks: _*) } - @deprecated("Use java.util.concurrent.ForkJoinWorkerThread directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ForkJoinWorkerThread directly, instead of this alias", "2.12.0") type ForkJoinWorkerThread = juc.ForkJoinWorkerThread - @deprecated("Use java.util.concurrent.LinkedTransferQueue directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.LinkedTransferQueue directly, instead of this alias", "2.12.0") type LinkedTransferQueue[T] = juc.LinkedTransferQueue[T] - @deprecated("Use java.util.concurrent.RecursiveAction directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.RecursiveAction directly, instead of this alias", "2.12.0") type RecursiveAction = juc.RecursiveAction - @deprecated("Use java.util.concurrent.RecursiveTask directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.RecursiveTask directly, instead of this alias", "2.12.0") type RecursiveTask[T] = juc.RecursiveTask[T] - @deprecated("Use java.util.concurrent.ThreadLocalRandom directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0") type ThreadLocalRandom = juc.ThreadLocalRandom - @deprecated("Use java.util.concurrent.ThreadLocalRandom directly, instead of this alias.", "2.12.0") + @deprecated("use java.util.concurrent.ThreadLocalRandom directly, instead of this alias", "2.12.0") object ThreadLocalRandom { // For source compatibility, current must declare the empty argument list. // Having no argument list makes more sense since it doesn't have any side effects, diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index d159dda414a9..667a7547ac58 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -20,11 +20,11 @@ import scala.annotation.implicitNotFound * [[http://docs.scala-lang.org/overviews/core/futures.html]]. * * == Common Imports == - * + * * When working with Futures, you will often find that importing the whole concurrent * package is convenient, furthermore you are likely to need an implicit ExecutionContext * in scope for many operations involving Futures and Promises: - * + * * {{{ * import scala.concurrent._ * import ExecutionContext.Implicits.global @@ -41,7 +41,7 @@ import scala.annotation.implicitNotFound * }}} * * == Using Futures For Non-blocking Computation == - * + * * Basic use of futures is easy with the factory method on Future, which executes a * provided function asynchronously, handing you back a future result of that function * without blocking the current thread. In order to create the Future you will need @@ -50,7 +50,7 @@ import scala.annotation.implicitNotFound * {{{ * import scala.concurrent._ * import ExecutionContext.Implicits.global // implicit execution context - * + * * val firstZebra: Future[Int] = Future { * val source = scala.io.Source.fromFile("/etc/dictionaries-common/words") * source.toSeq.indexOfSlice("zebra") @@ -80,7 +80,7 @@ import scala.annotation.implicitNotFound * animalRange.onSuccess { * case x if x > 500000 => println("It's a long way from Aardvark to Zebra") * } - * }}} + * }}} */ package object concurrent { type ExecutionException = java.util.concurrent.ExecutionException @@ -96,7 +96,7 @@ package object concurrent { * @param executor the execution context on which the future is run * @return the `Future` holding the result of the computation */ - @deprecated("Use `Future { ... }` instead.", "2.11.0") + @deprecated("use `Future { ... }` instead", "2.11.0") // removal planned for 2.13.0 def future[T](body: =>T)(implicit @deprecatedName('execctx) executor: ExecutionContext): Future[T] = Future[T](body) @@ -105,7 +105,7 @@ package object concurrent { * @tparam T the type of the value in the promise * @return the newly created `Promise` object */ - @deprecated("Use `Promise[T]()` instead.", "2.11.0") + @deprecated("use `Promise[T]()` instead", "2.11.0") // removal planned for 2.13.0 def promise[T](): Promise[T] = Promise[T]() diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala index 011d0f17af17..0435ca95ad8e 100644 --- a/src/library/scala/io/Position.scala +++ b/src/library/scala/io/Position.scala @@ -33,7 +33,7 @@ package io * }}} * @author Burak Emir (translated from work by Matthias Zenger and others) */ -@deprecated("This class will be removed.", "2.10.0") +@deprecated("this class will be removed", "2.10.0") private[scala] abstract class Position { /** Definable behavior for overflow conditions. */ diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index e769dfb8cbe0..8e03017f0964 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -148,7 +148,7 @@ object BigDecimal { * @param mc the `MathContext` used for future computations * @return the constructed `BigDecimal` */ - @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.","2.11") + @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.", "2.11") def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc) /** Constructs a `BigDecimal` using the java BigDecimal static @@ -163,14 +163,14 @@ object BigDecimal { * valueOf constructor. This is unlikely to do what you want; * use `valueOf(f.toDouble)` or `decimal(f)` instead. */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).","2.11") + @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11") def valueOf(f: Float): BigDecimal = valueOf(f.toDouble) /** Constructs a `BigDecimal` using the java BigDecimal static * valueOf constructor. This is unlikely to do what you want; * use `valueOf(f.toDouble)` or `decimal(f)` instead. */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).","2.11") + @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11") def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc) @@ -259,10 +259,10 @@ object BigDecimal { */ def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc) - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11") + @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11") def apply(x: Float): BigDecimal = apply(x.toDouble) - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11") + @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11") def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc) /** Translates a character array representation of a `BigDecimal` @@ -329,7 +329,7 @@ object BigDecimal { /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */ def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext) - @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11") + @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11") def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc) /** Implicit conversion from `Int` to `BigDecimal`. */ @@ -467,7 +467,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ * `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat`, depending on the intended meaning. * By default, `decimal` creation is used, so `isDecimalFloat` is probably what you want. */ - @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11") + @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11") def isValidFloat = { val f = toFloat !f.isInfinity && bigDecimal.compareTo(new BigDec(f.toDouble)) == 0 @@ -476,7 +476,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ * `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning. * By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want. */ - @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11") + @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11") def isValidDouble = { val d = toDouble !d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0 diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala index 54c81ed6134c..0e39af2febd9 100644 --- a/src/library/scala/math/package.scala +++ b/src/library/scala/math/package.scala @@ -65,9 +65,9 @@ package object math { * @return the value `logₑ(x)` where `e` is Eulers number */ def log(x: Double): Double = java.lang.Math.log(x) - + /** Returns the square root of a `double` value. - * + * * @param x the number to take the square root of * @return the value √x */ @@ -106,7 +106,7 @@ package object math { def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y) /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`. */ - @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?", "2.11.0") + @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?", "2.11.0") def round(x: Long): Long = x /** Returns the closest `Int` to the argument. @@ -115,7 +115,7 @@ package object math { * @return the value of the argument rounded to the nearest `Int` value. */ def round(x: Float): Int = java.lang.Math.round(x) - + /** Returns the closest `Long` to the argument. * * @param x a floating-point value to be rounded to a `Long`. diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala index 82ec87280631..30a99340cc49 100644 --- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala +++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala @@ -12,12 +12,12 @@ package reflect import scala.collection.mutable.{ WrappedArray, ArrayBuilder } import java.lang.{ Class => jClass } -@deprecated("Use scala.reflect.ClassTag instead", "2.10.0") +@deprecated("use scala.reflect.ClassTag instead", "2.10.0") trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { self: ClassManifest[T] => // Still in use in target test.junit.comp. - @deprecated("Use runtimeClass instead", "2.10.0") + @deprecated("use runtimeClass instead", "2.10.0") def erasure: jClass[_] = runtimeClass private def subtype(sub: jClass[_], sup: jClass[_]): Boolean = { @@ -44,7 +44,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { * of the type represented by `that` manifest, subject to the limitations * described in the header. */ - @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") + @deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") def <:<(that: ClassManifest[_]): Boolean = { // All types which could conform to these types will override <:<. def cannotMatch = { @@ -78,7 +78,7 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { * of the type represented by `that` manifest, subject to the limitations * described in the header. */ - @deprecated("Use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") + @deprecated("use scala.reflect.runtime.universe.TypeTag for subtype checking instead", "2.10.0") def >:>(that: ClassManifest[_]): Boolean = that <:< this @@ -90,44 +90,44 @@ trait ClassManifestDeprecatedApis[T] extends OptManifest[T] { protected def arrayClass[T](tp: jClass[_]): jClass[Array[T]] = java.lang.reflect.Array.newInstance(tp, 0).getClass.asInstanceOf[jClass[Array[T]]] - @deprecated("Use wrap instead", "2.10.0") + @deprecated("use wrap instead", "2.10.0") def arrayManifest: ClassManifest[Array[T]] = ClassManifest.classType[Array[T]](arrayClass[T](runtimeClass), this) override def newArray(len: Int): Array[T] = java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - @deprecated("Use wrap.newArray instead", "2.10.0") + @deprecated("use wrap.newArray instead", "2.10.0") def newArray2(len: Int): Array[Array[T]] = java.lang.reflect.Array.newInstance(arrayClass[T](runtimeClass), len) .asInstanceOf[Array[Array[T]]] - @deprecated("Use wrap.wrap.newArray instead", "2.10.0") + @deprecated("use wrap.wrap.newArray instead", "2.10.0") def newArray3(len: Int): Array[Array[Array[T]]] = java.lang.reflect.Array.newInstance(arrayClass[Array[T]](arrayClass[T](runtimeClass)), len) .asInstanceOf[Array[Array[Array[T]]]] - @deprecated("Use wrap.wrap.wrap.newArray instead", "2.10.0") + @deprecated("use wrap.wrap.wrap.newArray instead", "2.10.0") def newArray4(len: Int): Array[Array[Array[Array[T]]]] = java.lang.reflect.Array.newInstance(arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass))), len) .asInstanceOf[Array[Array[Array[Array[T]]]]] - @deprecated("Use wrap.wrap.wrap.wrap.newArray instead", "2.10.0") + @deprecated("use wrap.wrap.wrap.wrap.newArray instead", "2.10.0") def newArray5(len: Int): Array[Array[Array[Array[Array[T]]]]] = java.lang.reflect.Array.newInstance(arrayClass[Array[Array[Array[T]]]](arrayClass[Array[Array[T]]](arrayClass[Array[T]](arrayClass[T](runtimeClass)))), len) .asInstanceOf[Array[Array[Array[Array[Array[T]]]]]] - @deprecated("Create WrappedArray directly instead", "2.10.0") + @deprecated("create WrappedArray directly instead", "2.10.0") def newWrappedArray(len: Int): WrappedArray[T] = // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests new WrappedArray.ofRef[T with AnyRef](newArray(len).asInstanceOf[Array[T with AnyRef]]).asInstanceOf[WrappedArray[T]] - @deprecated("Use ArrayBuilder.make(this) instead", "2.10.0") + @deprecated("use ArrayBuilder.make(this) instead", "2.10.0") def newArrayBuilder(): ArrayBuilder[T] = // it's safe to assume T <: AnyRef here because the method is overridden for all value type manifests new ArrayBuilder.ofRef[T with AnyRef]()(this.asInstanceOf[ClassManifest[T with AnyRef]]).asInstanceOf[ArrayBuilder[T]] - @deprecated("Use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0") + @deprecated("use scala.reflect.runtime.universe.TypeTag to capture type structure instead", "2.10.0") def typeArguments: List[OptManifest[_]] = List() protected def argString = diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index e0998534631e..369676c27398 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -41,7 +41,7 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray } */ @scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") // TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("Use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") +// @deprecated("use scala.reflect.ClassTag (to capture erasures) or scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") trait Manifest[T] extends ClassManifest[T] with Equals { override def typeArguments: List[Manifest[_]] = Nil @@ -63,7 +63,7 @@ trait Manifest[T] extends ClassManifest[T] with Equals { } // TODO undeprecated until Scala reflection becomes non-experimental -// @deprecated("Use type tags and manually check the corresponding class or type instead", "2.10.0") +// @deprecated("use type tags and manually check the corresponding class or type instead", "2.10.0") @SerialVersionUID(1L) abstract class AnyValManifest[T <: AnyVal](override val toString: String) extends Manifest[T] with Equals { override def <:<(that: ClassManifest[_]): Boolean = diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 509d181d87d2..88cdfb0ed495 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -25,21 +25,21 @@ package object reflect { * be wrong when variance is involved or when a subtype has a different * number of type arguments than a supertype. */ - @deprecated("Use scala.reflect.ClassTag instead", "2.10.0") + @deprecated("use scala.reflect.ClassTag instead", "2.10.0") @annotation.implicitNotFound(msg = "No ClassManifest available for ${T}.") type ClassManifest[T] = scala.reflect.ClassTag[T] /** The object `ClassManifest` defines factory methods for manifests. * It is intended for use by the compiler and should not be used in client code. */ - @deprecated("Use scala.reflect.ClassTag instead", "2.10.0") + @deprecated("use scala.reflect.ClassTag instead", "2.10.0") val ClassManifest = ClassManifestFactory /** The object `Manifest` defines factory methods for manifests. * It is intended for use by the compiler and should not be used in client code. */ // TODO undeprecated until Scala reflection becomes non-experimental - // @deprecated("Use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") + // @deprecated("use scala.reflect.ClassTag (to capture erasures), scala.reflect.runtime.universe.TypeTag (to capture types) or both instead", "2.10.0") val Manifest = ManifestFactory def classTag[T](implicit ctag: ClassTag[T]) = ctag diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala index f01788a4e918..2863fb6d7ce2 100644 --- a/src/library/scala/runtime/RichException.scala +++ b/src/library/scala/runtime/RichException.scala @@ -11,7 +11,7 @@ package runtime import scala.compat.Platform.EOL -@deprecated("Use Throwable#getStackTrace", "2.11.0") +@deprecated("use Throwable#getStackTrace", "2.11.0") final class RichException(exc: Throwable) { def getStackTraceString = exc.getStackTrace().mkString("", EOL, EOL) } diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala index cda9d2907a6e..37d236dfe90a 100644 --- a/src/library/scala/runtime/RichInt.scala +++ b/src/library/scala/runtime/RichInt.scala @@ -36,9 +36,9 @@ final class RichInt(val self: Int) extends AnyVal with ScalaNumberProxy[Int] wit override def max(that: Int): Int = math.max(self, that) override def min(that: Int): Int = math.min(self, that) override def signum: Int = math.signum(self) - + /** There is no reason to round an `Int`, but this method is provided to avoid accidental loss of precision from a detour through `Float`. */ - @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") + @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") def round: Int = self def toBinaryString: String = java.lang.Integer.toBinaryString(self) diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala index b405fcda3d5e..233ce231b4c8 100644 --- a/src/library/scala/runtime/RichLong.scala +++ b/src/library/scala/runtime/RichLong.scala @@ -32,9 +32,9 @@ final class RichLong(val self: Long) extends AnyVal with IntegralProxy[Long] { override def max(that: Long): Long = math.max(self, that) override def min(that: Long): Long = math.min(self, that) override def signum: Int = math.signum(self).toInt - + /** There is no reason to round a `Long`, but this method is provided to avoid accidental conversion to `Int` through `Float`. */ - @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") + @deprecated("this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value?", "2.11.0") def round: Long = self def toBinaryString: String = java.lang.Long.toBinaryString(self) diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala index 293bf950dbbc..7751bf815c8a 100644 --- a/src/library/scala/runtime/SeqCharSequence.scala +++ b/src/library/scala/runtime/SeqCharSequence.scala @@ -9,7 +9,7 @@ package scala package runtime -@deprecated("Use Predef.SeqCharSequence", "2.11.0") +@deprecated("use Predef.SeqCharSequence", "2.11.0") final class SeqCharSequence(val xs: scala.collection.IndexedSeq[Char]) extends CharSequence { def length: Int = xs.length def charAt(index: Int): Char = xs(index) diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala index d5b51a6e9210..37f077bcadfa 100644 --- a/src/library/scala/runtime/StringAdd.scala +++ b/src/library/scala/runtime/StringAdd.scala @@ -11,7 +11,7 @@ package runtime /** A wrapper class that adds string concatenation `+` to any value */ -@deprecated("Use Predef.StringAdd", "2.11.0") +@deprecated("use Predef.StringAdd", "2.11.0") final class StringAdd(val self: Any) extends AnyVal { def +(other: String) = String.valueOf(self) + other } diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala index de32ac7e86fe..5376c3f98201 100644 --- a/src/library/scala/runtime/StringFormat.scala +++ b/src/library/scala/runtime/StringFormat.scala @@ -12,7 +12,7 @@ package runtime /** A wrapper class that adds a `formatted` operation to any value */ -@deprecated("Use Predef.StringFormat", "2.11.0") +@deprecated("use Predef.StringFormat", "2.11.0") final class StringFormat(val self: Any) extends AnyVal { /** Returns string formatted according to given `format` string. * Format strings are as for `String.format` diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index ebe94651f984..e5606f3c3b74 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -87,7 +87,7 @@ object SystemProperties { lazy val preferIPv4Stack: BooleanProp = BooleanProp.keyExists(PreferIPv4StackKey) lazy val preferIPv6Addresses: BooleanProp = BooleanProp.keyExists(PreferIPv6AddressesKey) lazy val noTraceSuppression: BooleanProp = BooleanProp.valueIsTrue(NoTraceSuppressionKey) - @deprecated("Use noTraceSuppression", "2.12.0") + @deprecated("use noTraceSuppression", "2.12.0") def noTraceSupression = noTraceSuppression } diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index ac864950011d..35f3f4d7a5f4 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -172,9 +172,9 @@ trait ProcessBuilder extends Source with Sink { * and then throw an exception. */ def lineStream: Stream[String] - + /** Deprecated (renamed). Use `lineStream` instead. */ - @deprecated("Use lineStream instead.", "2.11.0") + @deprecated("use lineStream instead", "2.11.0") def lines: Stream[String] = lineStream /** Starts the process represented by this builder. The output is returned as @@ -184,9 +184,9 @@ trait ProcessBuilder extends Source with Sink { * to termination and then throw an exception. */ def lineStream(log: ProcessLogger): Stream[String] - + /** Deprecated (renamed). Use `lineStream(log: ProcessLogger)` instead. */ - @deprecated("Use stream instead.", "2.11.0") + @deprecated("use stream instead", "2.11.0") def lines(log: ProcessLogger): Stream[String] = lineStream(log) /** Starts the process represented by this builder. The output is returned as @@ -196,9 +196,9 @@ trait ProcessBuilder extends Source with Sink { * but will not throw an exception. */ def lineStream_! : Stream[String] - + /** Deprecated (renamed). Use `lineStream_!` instead. */ - @deprecated("Use lineStream_! instead.", "2.11.0") + @deprecated("use lineStream_! instead", "2.11.0") def lines_! : Stream[String] = lineStream_! /** Starts the process represented by this builder. The output is returned as @@ -208,9 +208,9 @@ trait ProcessBuilder extends Source with Sink { * to termination but will not throw an exception. */ def lineStream_!(log: ProcessLogger): Stream[String] - + /** Deprecated (renamed). Use `lineStream_!(log: ProcessLogger)` instead. */ - @deprecated("Use stream_! instead.", "2.11.0") + @deprecated("use stream_! instead", "2.11.0") def lines_!(log: ProcessLogger): Stream[String] = lineStream_!(log) /** Starts the process represented by this builder, blocks until it exits, and diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala index aa55ac4f0f14..0c747c99a83d 100644 --- a/src/library/scala/text/Document.scala +++ b/src/library/scala/text/Document.scala @@ -10,17 +10,17 @@ package scala.text import java.io.Writer -@deprecated("This object will be removed.", "2.11.0") +@deprecated("this object will be removed", "2.11.0") case object DocNil extends Document -@deprecated("This object will be removed.", "2.11.0") +@deprecated("this object will be removed", "2.11.0") case object DocBreak extends Document -@deprecated("This class will be removed.", "2.11.0") +@deprecated("this class will be removed", "2.11.0") case class DocText(txt: String) extends Document -@deprecated("This class will be removed.", "2.11.0") +@deprecated("this class will be removed", "2.11.0") case class DocGroup(doc: Document) extends Document -@deprecated("This class will be removed.", "2.11.0") +@deprecated("this class will be removed", "2.11.0") case class DocNest(indent: Int, doc: Document) extends Document -@deprecated("This class will be removed.", "2.11.0") +@deprecated("this class will be removed", "2.11.0") case class DocCons(hd: Document, tl: Document) extends Document /** @@ -30,7 +30,7 @@ case class DocCons(hd: Document, tl: Document) extends Document * @author Michel Schinz * @version 1.0 */ -@deprecated("This class will be removed.", "2.11.0") +@deprecated("this class will be removed", "2.11.0") abstract class Document { def ::(hd: Document): Document = DocCons(hd, this) def ::(hd: String): Document = DocCons(DocText(hd), this) @@ -103,7 +103,7 @@ abstract class Document { } } -@deprecated("This object will be removed.", "2.11.0") +@deprecated("this object will be removed", "2.11.0") object Document { /** The empty document */ def empty = DocNil diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala index 1b6db5d6aa78..013825292e99 100644 --- a/src/library/scala/util/MurmurHash.scala +++ b/src/library/scala/util/MurmurHash.scala @@ -28,7 +28,7 @@ import scala.collection.Iterator * or can take individual hash values with append. Its own hash code is * set equal to the hash code of whatever it is hashing. */ -@deprecated("Use the object MurmurHash3 instead.", "2.10.0") +@deprecated("use the object MurmurHash3 instead", "2.10.0") class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => Unit) { import MurmurHash._ @@ -81,7 +81,7 @@ class MurmurHash[@specialized(Int,Long,Float,Double) T](seed: Int) extends (T => * incorporate a new integer) to update the values. Only one method * needs to be called to finalize the hash. */ -@deprecated("Use the object MurmurHash3 instead.", "2.10.0") +@deprecated("use the object MurmurHash3 instead", "2.10.0") // NOTE: Used by SBT 0.13.0-M2 and below object MurmurHash { // Magic values used for MurmurHash's 32 bit hash. diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index bd55fb5d040a..c4a3f1effa4e 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -308,7 +308,7 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * @param target The string to match * @return The matches */ - @deprecated("Extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0") + @deprecated("extracting a match result from anything but a CharSequence or Match is deprecated", "2.11.0") def unapplySeq(target: Any): Option[List[String]] = target match { case s: CharSequence => val m = pattern matcher s @@ -321,16 +321,16 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends // @see UnanchoredRegex protected def runMatcher(m: Matcher) = m.matches() - /** Return all non-overlapping matches of this `Regex` in the given character + /** Return all non-overlapping matches of this `Regex` in the given character * sequence as a [[scala.util.matching.Regex.MatchIterator]], * which is a special [[scala.collection.Iterator]] that returns the * matched strings but can also be queried for more data about the last match, * such as capturing groups and start position. - * + * * A `MatchIterator` can also be converted into an iterator * that returns objects of type [[scala.util.matching.Regex.Match]], * such as is normally returned by `findAllMatchIn`. - * + * * Where potential matches overlap, the first possible match is returned, * followed by the next match that follows the input consumed by the * first match: diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala index b880fad756fb..a7a564785984 100644 --- a/src/reflect/scala/reflect/api/Annotations.scala +++ b/src/reflect/scala/reflect/api/Annotations.scala @@ -55,10 +55,10 @@ trait Annotations { self: Universe => abstract class AnnotationExtractor { def apply(tree: Tree): Annotation = treeToAnnotation(tree) - @deprecated("Use `apply(tree: Tree): Annotation` instead", "2.11.0") + @deprecated("use `apply(tree: Tree): Annotation` instead", "2.11.0") def apply(tpe: Type, scalaArgs: List[Tree], javaArgs: ListMap[Name, JavaArgument]): Annotation - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def unapply(ann: Annotation): Option[(Type, List[Tree], ListMap[Name, JavaArgument])] } @@ -71,19 +71,19 @@ trait Annotations { self: Universe => def tree: Tree = annotationToTree(this.asInstanceOf[Annotation]) /** The type of the annotation. */ - @deprecated("Use `tree.tpe` instead", "2.11.0") + @deprecated("use `tree.tpe` instead", "2.11.0") def tpe: Type /** Payload of the Scala annotation: a list of abstract syntax trees that represent the argument. * Empty for Java annotations. */ - @deprecated("Use `tree.children.tail` instead", "2.11.0") + @deprecated("use `tree.children.tail` instead", "2.11.0") def scalaArgs: List[Tree] /** Payload of the Java annotation: a list of name-value pairs. * Empty for Scala annotations. */ - @deprecated("Use `tree.children.tail` instead", "2.11.0") + @deprecated("use `tree.children.tail` instead", "2.11.0") def javaArgs: ListMap[Name, JavaArgument] } @@ -94,37 +94,37 @@ trait Annotations { self: Universe => * @template * @group Annotations */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") type JavaArgument >: Null <: AnyRef with JavaArgumentApi /** Has no special methods. Is here to provides erased identity for `CompoundType`. * @group API */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") trait JavaArgumentApi - /** A literal argument to a Java annotation as `"Use X instead"` in `@Deprecated("Use X instead")` + /** A literal argument to a Java annotation as `"use X instead"` in `@Deprecated("use X instead")` * @template * @group Annotations */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") type LiteralArgument >: Null <: LiteralArgumentApi with JavaArgument /** The constructor/extractor for `LiteralArgument` instances. * @group Extractors */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") val LiteralArgument: LiteralArgumentExtractor /** An extractor class to create and pattern match with syntax `LiteralArgument(value)` * where `value` is the constant argument. * @group Extractors */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") abstract class LiteralArgumentExtractor { - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def apply(value: Constant): LiteralArgument - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def unapply(arg: LiteralArgument): Option[Constant] } @@ -132,10 +132,10 @@ trait Annotations { self: Universe => * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page. * @group API */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") trait LiteralArgumentApi { /** The underlying compile-time constant value. */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def value: Constant } @@ -143,24 +143,24 @@ trait Annotations { self: Universe => * @template * @group Annotations */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") type ArrayArgument >: Null <: ArrayArgumentApi with JavaArgument /** The constructor/extractor for `ArrayArgument` instances. * @group Extractors */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") val ArrayArgument: ArrayArgumentExtractor /** An extractor class to create and pattern match with syntax `ArrayArgument(args)` * where `args` is the argument array. * @group Extractors */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") abstract class ArrayArgumentExtractor { - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def apply(args: Array[JavaArgument]): ArrayArgument - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def unapply(arg: ArrayArgument): Option[Array[JavaArgument]] } @@ -168,10 +168,10 @@ trait Annotations { self: Universe => * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page. * @group API */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") trait ArrayArgumentApi { /** The underlying array of Java annotation arguments. */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def args: Array[JavaArgument] } @@ -179,24 +179,24 @@ trait Annotations { self: Universe => * @template * @group Annotations */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") type NestedArgument >: Null <: NestedArgumentApi with JavaArgument /** The constructor/extractor for `NestedArgument` instances. * @group Extractors */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") val NestedArgument: NestedArgumentExtractor /** An extractor class to create and pattern match with syntax `NestedArgument(annotation)` * where `annotation` is the nested annotation. * @group Extractors */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") abstract class NestedArgumentExtractor { - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def apply(annotation: Annotation): NestedArgument - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def unapply(arg: NestedArgument): Option[Annotation] } @@ -204,10 +204,10 @@ trait Annotations { self: Universe => * The main source of information about annotations is the [[scala.reflect.api.Annotations]] page. * @group API */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") trait NestedArgumentApi { /** The underlying nested annotation. */ - @deprecated("Use `Annotation.tree` to inspect annotation arguments", "2.11.0") + @deprecated("use `Annotation.tree` to inspect annotation arguments", "2.11.0") def annotation: Annotation } } diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index 2d5d1d5d6bc0..14852c0231ce 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -173,7 +173,7 @@ trait FlagSets { self: Universe => * - the enum's class * - enum constants **/ - @deprecated("Use `isJavaEnum` on the corresponding symbol instead.", since = "2.11.8") + @deprecated("use `isJavaEnum` on the corresponding symbol instead", since = "2.11.8") val ENUM: FlagSet /** Flag indicating that tree represents a parameter of the primary constructor of some class diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala index 1457fdc13302..9b7112f01178 100644 --- a/src/reflect/scala/reflect/api/Internals.scala +++ b/src/reflect/scala/reflect/api/Internals.scala @@ -841,10 +841,10 @@ trait Internals { self: Universe => } } - @deprecated("Use `internal.reificationSupport` instead", "2.11.0") + @deprecated("use `internal.reificationSupport` instead", "2.11.0") val build: ReificationSupportApi - @deprecated("Use `internal.ReificationSupportApi` instead", "2.11.0") + @deprecated("use `internal.ReificationSupportApi` instead", "2.11.0") type BuildApi = ReificationSupportApi /** This trait provides support for importers, a facility to migrate reflection artifacts between universes. @@ -934,7 +934,7 @@ trait Internals { self: Universe => def importPosition(pos: from.Position): Position } - @deprecated("Use `internal.createImporter` instead", "2.11.0") + @deprecated("use `internal.createImporter` instead", "2.11.0") def mkImporter(from0: Universe): Importer { val from: from0.type } = internal.createImporter(from0) /** Marks underlying reference to id as boxed. @@ -1078,71 +1078,71 @@ trait Internals { self: Universe => implicit val token = new CompatToken /** @see [[InternalApi.typeTagToManifest]] */ - @deprecated("Use `internal.typeTagToManifest` instead", "2.11.0") + @deprecated("use `internal.typeTagToManifest` instead", "2.11.0") def typeTagToManifest[T: ClassTag](mirror: Any, tag: Universe#TypeTag[T]): Manifest[T] = internal.typeTagToManifest(mirror, tag) /** @see [[InternalApi.manifestToTypeTag]] */ - @deprecated("Use `internal.manifestToTypeTag` instead", "2.11.0") + @deprecated("use `internal.manifestToTypeTag` instead", "2.11.0") def manifestToTypeTag[T](mirror: Any, manifest: Manifest[T]): Universe#TypeTag[T] = internal.manifestToTypeTag(mirror, manifest) /** @see [[InternalApi.newScopeWith]] */ - @deprecated("Use `internal.newScopeWith` instead", "2.11.0") + @deprecated("use `internal.newScopeWith` instead", "2.11.0") def newScopeWith(elems: Symbol*): Scope = internal.newScopeWith(elems: _*) /** Scala 2.10 compatibility enrichments for BuildApi. */ implicit class CompatibleBuildApi(api: BuildApi) { /** @see [[BuildApi.setInfo]] */ - @deprecated("Use `internal.reificationSupport.setInfo` instead", "2.11.0") + @deprecated("use `internal.reificationSupport.setInfo` instead", "2.11.0") def setTypeSignature[S <: Symbol](sym: S, tpe: Type): S = internal.reificationSupport.setInfo(sym, tpe) /** @see [[BuildApi.FlagsRepr]] */ - @deprecated("Use `internal.reificationSupport.FlagsRepr` instead", "2.11.0") + @deprecated("use `internal.reificationSupport.FlagsRepr` instead", "2.11.0") def flagsFromBits(bits: Long): FlagSet = internal.reificationSupport.FlagsRepr(bits) /** @see [[BuildApi.noSelfType]] */ - @deprecated("Use `noSelfType` instead", "2.11.0") + @deprecated("use `noSelfType` instead", "2.11.0") def emptyValDef: ValDef = noSelfType /** @see [[BuildApi.mkThis]] */ - @deprecated("Use `internal.reificationSupport.mkThis` instead", "2.11.0") + @deprecated("use `internal.reificationSupport.mkThis` instead", "2.11.0") def This(sym: Symbol): Tree = internal.reificationSupport.mkThis(sym) /** @see [[BuildApi.mkSelect]] */ - @deprecated("Use `internal.reificationSupport.mkSelect` instead", "2.11.0") + @deprecated("use `internal.reificationSupport.mkSelect` instead", "2.11.0") def Select(qualifier: Tree, sym: Symbol): Select = internal.reificationSupport.mkSelect(qualifier, sym) /** @see [[BuildApi.mkIdent]] */ - @deprecated("Use `internal.reificationSupport.mkIdent` instead", "2.11.0") + @deprecated("use `internal.reificationSupport.mkIdent` instead", "2.11.0") def Ident(sym: Symbol): Ident = internal.reificationSupport.mkIdent(sym) /** @see [[BuildApi.mkTypeTree]] */ - @deprecated("Use `internal.reificationSupport.mkTypeTree` instead", "2.11.0") + @deprecated("use `internal.reificationSupport.mkTypeTree` instead", "2.11.0") def TypeTree(tp: Type): TypeTree = internal.reificationSupport.mkTypeTree(tp) } /** Scala 2.10 compatibility enrichments for Tree. */ implicit class CompatibleTree(tree: Tree) { /** @see [[InternalApi.freeTerms]] */ - @deprecated("Use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0") def freeTerms: List[FreeTermSymbol] = internal.freeTerms(tree) /** @see [[InternalApi.freeTypes]] */ - @deprecated("Use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.freeTerms` instead or import `internal.decorators._` for infix syntax", "2.11.0") def freeTypes: List[FreeTypeSymbol] = internal.freeTypes(tree) /** @see [[InternalApi.substituteSymbols]] */ - @deprecated("Use `internal.substituteSymbols` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.substituteSymbols` instead or import `internal.decorators._` for infix syntax", "2.11.0") def substituteSymbols(from: List[Symbol], to: List[Symbol]): Tree = internal.substituteSymbols(tree, from, to) /** @see [[InternalApi.substituteTypes]] */ - @deprecated("Use `internal.substituteTypes` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.substituteTypes` instead or import `internal.decorators._` for infix syntax", "2.11.0") def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to) /** @see [[InternalApi.substituteThis]] */ - @deprecated("Use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0") def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to) } @@ -1155,84 +1155,84 @@ trait Internals { self: Universe => def isOverride: Boolean = symbol.asInstanceOf[scala.reflect.internal.Symbols#Symbol].isOverride /** @see [[InternalApi.isFreeTerm]] */ - @deprecated("Use `internal.isFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.isFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0") def isFreeTerm: Boolean = internal.isFreeTerm(symbol) /** @see [[InternalApi.asFreeTerm]] */ - @deprecated("Use `internal.asFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.asFreeTerm` instead or import `internal.decorators._` for infix syntax", "2.11.0") def asFreeTerm: FreeTermSymbol = internal.asFreeTerm(symbol) /** @see [[InternalApi.isFreeType]] */ - @deprecated("Use `internal.isFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.isFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0") def isFreeType: Boolean = internal.isFreeType(symbol) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.asFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.asFreeType` instead or import `internal.decorators._` for infix syntax", "2.11.0") def asFreeType: FreeTypeSymbol = internal.asFreeType(symbol) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.newTermSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.newTermSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def newTermSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TermSymbol = internal.newTermSymbol(symbol, name, pos, flags) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.newModuleAndClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.newModuleAndClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def newModuleAndClassSymbol(name: Name, pos: Position = NoPosition, flags: FlagSet = NoFlags): (ModuleSymbol, ClassSymbol) = internal.newModuleAndClassSymbol(symbol, name, pos, flags) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.newMethodSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.newMethodSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def newMethodSymbol(name: TermName, pos: Position = NoPosition, flags: FlagSet = NoFlags): MethodSymbol = internal.newMethodSymbol(symbol, name, pos, flags) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.newTypeSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.newTypeSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def newTypeSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): TypeSymbol = internal.newTypeSymbol(symbol, name, pos, flags) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.newClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.newClassSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def newClassSymbol(name: TypeName, pos: Position = NoPosition, flags: FlagSet = NoFlags): ClassSymbol = internal.newClassSymbol(symbol, name, pos, flags) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.isErroneous` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.isErroneous` instead or import `internal.decorators._` for infix syntax", "2.11.0") def isErroneous: Boolean = internal.isErroneous(symbol) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.isSkolem` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.isSkolem` instead or import `internal.decorators._` for infix syntax", "2.11.0") def isSkolem: Boolean = internal.isSkolem(symbol) /** @see [[InternalApi.asFreeType]] */ - @deprecated("Use `internal.deSkolemize` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.deSkolemize` instead or import `internal.decorators._` for infix syntax", "2.11.0") def deSkolemize: Symbol = internal.deSkolemize(symbol) } /** @see [[InternalApi.singleType]] */ - @deprecated("Use `internal.singleType` instead", "2.11.0") + @deprecated("use `internal.singleType` instead", "2.11.0") def singleType(pre: Type, sym: Symbol): Type = internal.singleType(pre, sym) /** @see [[InternalApi.refinedType]] */ - @deprecated("Use `internal.refinedType` instead", "2.11.0") + @deprecated("use `internal.refinedType` instead", "2.11.0") def refinedType(parents: List[Type], owner: Symbol, decls: Scope, pos: Position): Type = internal.refinedType(parents, owner, decls, pos) /** @see [[InternalApi.refinedType]] */ - @deprecated("Use `internal.refinedType` instead", "2.11.0") + @deprecated("use `internal.refinedType` instead", "2.11.0") def refinedType(parents: List[Type], owner: Symbol): Type = internal.refinedType(parents, owner) /** @see [[InternalApi.typeRef]] */ - @deprecated("Use `internal.typeRef` instead", "2.11.0") + @deprecated("use `internal.typeRef` instead", "2.11.0") def typeRef(pre: Type, sym: Symbol, args: List[Type]): Type = internal.typeRef(pre, sym, args) /** @see [[InternalApi.intersectionType]] */ - @deprecated("Use `internal.intersectionType` instead", "2.11.0") + @deprecated("use `internal.intersectionType` instead", "2.11.0") def intersectionType(tps: List[Type]): Type = internal.intersectionType(tps) /** @see [[InternalApi.intersectionType]] */ - @deprecated("Use `internal.intersectionType` instead", "2.11.0") + @deprecated("use `internal.intersectionType` instead", "2.11.0") def intersectionType(tps: List[Type], owner: Symbol): Type = internal.intersectionType(tps, owner) /** @see [[InternalApi.polyType]] */ - @deprecated("Use `internal.polyType` instead", "2.11.0") + @deprecated("use `internal.polyType` instead", "2.11.0") def polyType(tparams: List[Symbol], tpe: Type): Type = internal.polyType(tparams, tpe) /** @see [[InternalApi.existentialAbstraction]] */ - @deprecated("Use `internal.existentialAbstraction` instead", "2.11.0") + @deprecated("use `internal.existentialAbstraction` instead", "2.11.0") def existentialAbstraction(tparams: List[Symbol], tpe0: Type): Type = internal.existentialAbstraction(tparams, tpe0) } } diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index cc0122528707..35009d7f591a 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -33,14 +33,14 @@ trait Names { * Enables an alternative notation `"map": TermName` as opposed to `TermName("map")`. * @group Names */ - @deprecated("Use explicit `TermName(s)` instead", "2.11.0") + @deprecated("use explicit `TermName(s)` instead", "2.11.0") implicit def stringToTermName(s: String): TermName = TermName(s) /** An implicit conversion from String to TypeName. * Enables an alternative notation `"List": TypeName` as opposed to `TypeName("List")`. * @group Names */ - @deprecated("Use explicit `TypeName(s)` instead", "2.11.0") + @deprecated("use explicit `TypeName(s)` instead", "2.11.0") implicit def stringToTypeName(s: String): TypeName = TypeName(s) /** The abstract type of names. @@ -87,13 +87,13 @@ trait Names { /** Replaces all occurrences of \$op_names in this name by corresponding operator symbols. * Example: `foo_\$plus\$eq` becomes `foo_+=` */ - @deprecated("Use `decodedName.toString` instead", "2.11.0") + @deprecated("use `decodedName.toString` instead", "2.11.0") def decoded: String /** Replaces all occurrences of operator symbols in this name by corresponding \$op_names. * Example: `foo_+=` becomes `foo_\$plus\$eq`. */ - @deprecated("Use `encodedName.toString` instead", "2.11.0") + @deprecated("use `encodedName.toString` instead", "2.11.0") def encoded: String /** The decoded name, still represented as a name. @@ -108,13 +108,13 @@ trait Names { /** Create a new term name. * @group Names */ - @deprecated("Use TermName instead", "2.11.0") + @deprecated("use TermName instead", "2.11.0") def newTermName(s: String): TermName /** Creates a new type name. * @group Names */ - @deprecated("Use TypeName instead", "2.11.0") + @deprecated("use TypeName instead", "2.11.0") def newTypeName(s: String): TypeName /** The constructor/extractor for `TermName` instances. diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala index 9d1b7c3812a0..2e02d4a26f2f 100644 --- a/src/reflect/scala/reflect/api/Position.scala +++ b/src/reflect/scala/reflect/api/Position.scala @@ -130,78 +130,78 @@ trait Position extends Attachments { * If isDefined is true, offset and source are both defined. * @group Common */ - @deprecated("Removed from the public API", "2.11.0") def isDefined: Boolean + @deprecated("removed from the public API", "2.11.0") def isDefined: Boolean /** The point (where the ^ is) of the position, or else `default` if undefined. * @group Common */ - @deprecated("Removed from the public API", "2.11.0") def pointOrElse(default: Int): Int + @deprecated("removed from the public API", "2.11.0") def pointOrElse(default: Int): Int /** The start of the position's range, or point if not a range position. */ - @deprecated("Removed from the public API", "2.11.0") def startOrPoint: Int + @deprecated("removed from the public API", "2.11.0") def startOrPoint: Int /** The end of the position's range, or point if not a range position. */ - @deprecated("Removed from the public API", "2.11.0") def endOrPoint: Int + @deprecated("removed from the public API", "2.11.0") def endOrPoint: Int /** If this is a range, the union with the other range, with the point of this position. * Otherwise, this position */ - @deprecated("Removed from the public API", "2.11.0") def union(pos: Pos): Pos + @deprecated("removed from the public API", "2.11.0") def union(pos: Pos): Pos /** If this is a range position, the offset position of its start. * Otherwise the position itself */ - @deprecated("Removed from the public API", "2.11.0") def focusStart: Pos + @deprecated("removed from the public API", "2.11.0") def focusStart: Pos /** If this is a range position, the offset position of its end. * Otherwise the position itself */ - @deprecated("Removed from the public API", "2.11.0") def focusEnd: Pos + @deprecated("removed from the public API", "2.11.0") def focusEnd: Pos /** Does this position include the given position `pos`? * This holds if `this` is a range position and its range [start..end] * is the same or covers the range of the given position, which may or may not be a range position. */ - @deprecated("Removed from the public API", "2.11.0") def includes(pos: Pos): Boolean + @deprecated("removed from the public API", "2.11.0") def includes(pos: Pos): Boolean /** Does this position properly include the given position `pos` ("properly" meaning their * ranges are not the same)? */ - @deprecated("Removed from the public API", "2.11.0") def properlyIncludes(pos: Pos): Boolean + @deprecated("removed from the public API", "2.11.0") def properlyIncludes(pos: Pos): Boolean /** Does this position precede that position? * This holds if both positions are defined and the end point of this position * is not larger than the start point of the given position. */ - @deprecated("Removed from the public API", "2.11.0") def precedes(pos: Pos): Boolean + @deprecated("removed from the public API", "2.11.0") def precedes(pos: Pos): Boolean /** Does this position properly precede the given position `pos` ("properly" meaning their ranges * do not share a common point). */ - @deprecated("Removed from the public API", "2.11.0") def properlyPrecedes(pos: Pos): Boolean + @deprecated("removed from the public API", "2.11.0") def properlyPrecedes(pos: Pos): Boolean /** Does this position overlap with that position? * This holds if both positions are ranges and there is an interval of * non-zero length that is shared by both position ranges. */ - @deprecated("Removed from the public API", "2.11.0") def overlaps(pos: Pos): Boolean + @deprecated("removed from the public API", "2.11.0") def overlaps(pos: Pos): Boolean /** Does this position cover the same range as that position? * Holds only if both position are ranges */ - @deprecated("Removed from the public API", "2.11.0") def sameRange(pos: Pos): Boolean + @deprecated("removed from the public API", "2.11.0") def sameRange(pos: Pos): Boolean /** Convert this to a position around `point` that spans a single source line */ - @deprecated("Removed from the public API", "2.11.0") def toSingleLine: Pos + @deprecated("removed from the public API", "2.11.0") def toSingleLine: Pos /** The content of the line this Position refers to. * @group Common */ - @deprecated("Removed from the public API", "2.11.0") def lineContent: String + @deprecated("removed from the public API", "2.11.0") def lineContent: String /** Show a textual representation of the position. */ - @deprecated("Use `universe.show(position)` instead", "2.11.0") def show: String + @deprecated("use `universe.show(position)` instead", "2.11.0") def show: String } diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala index 19bdfcae593b..38667ae15300 100644 --- a/src/reflect/scala/reflect/api/StandardNames.scala +++ b/src/reflect/scala/reflect/api/StandardNames.scala @@ -29,7 +29,7 @@ trait StandardNames { self: Universe => /** @see [[termNames]] */ - @deprecated("Use `termNames` instead", "2.11.0") + @deprecated("use `termNames` instead", "2.11.0") val nme: TermNamesApi /** A value containing all [[TermNamesApi standard term names]]. @@ -38,7 +38,7 @@ trait StandardNames { val termNames: TermNamesApi /** @see [[typeNames]] */ - @deprecated("Use `typeNames` instead", "2.11.0") + @deprecated("use `typeNames` instead", "2.11.0") val tpnme: TypeNamesApi /** A value containing all [[TypeNamesApi standard type names]]. diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index b9fb323a4ce7..79bf9e969ca9 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -280,7 +280,7 @@ trait Symbols { self: Universe => * * @group Basics */ - @deprecated("Use `pos.source.file` instead", "2.11.0") + @deprecated("use `pos.source.file` instead", "2.11.0") def associatedFile: scala.reflect.io.AbstractFile /** A list of annotations attached to this Symbol. @@ -298,7 +298,7 @@ trait Symbols { self: Universe => * * @group Basics */ - @deprecated("Use `companion` instead, but beware of possible changes in behavior", "2.11.0") + @deprecated("use `companion` instead, but beware of possible changes in behavior", "2.11.0") def companionSymbol: Symbol /** For a class: its companion object if exists. @@ -333,7 +333,7 @@ trait Symbols { self: Universe => def info: Type /** @see [[overrides]] */ - @deprecated("Use `overrides` instead", "2.11.0") + @deprecated("use `overrides` instead", "2.11.0") def allOverriddenSymbols: List[Symbol] /** Returns all symbols overridden by this symbol. @@ -726,7 +726,7 @@ trait Symbols { self: Universe => * * @group Type */ - @deprecated("Use isAbstract instead", "2.11.0") + @deprecated("use isAbstract instead", "2.11.0") def isAbstractType : Boolean /** Does this symbol represent an existentially bound type? @@ -767,7 +767,7 @@ trait Symbols { self: Universe => /** @see [[paramLists]] * * The name ending with "ss" indicates that the result type is a list of lists. */ - @deprecated("Use `paramLists` instead", "2.11.0") + @deprecated("use `paramLists` instead", "2.11.0") def paramss: List[List[Symbol]] /** All parameter lists of the method. @@ -864,7 +864,7 @@ trait Symbols { self: Universe => * * @group Class */ - @deprecated("Use isAbstract instead", "2.11.0") + @deprecated("use isAbstract instead", "2.11.0") def isAbstractClass: Boolean /** Does this symbol represent a case class? diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index a43195d9b630..a2d11cc60e27 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -388,7 +388,7 @@ trait Trees { self: Universe => def unapply(classDef: ClassDef): Option[(Modifiers, TypeName, List[TypeDef], Template)] /** @see [[InternalApi.classDef]] */ - @deprecated("Use `internal.classDef` instead", "2.11.0") + @deprecated("use `internal.classDef` instead", "2.11.0") def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ClassDef = internal.classDef(sym, impl) } @@ -437,7 +437,7 @@ trait Trees { self: Universe => def unapply(moduleDef: ModuleDef): Option[(Modifiers, TermName, Template)] /** @see [[InternalApi.moduleDef]] */ - @deprecated("Use `internal.moduleDef` instead", "2.11.0") + @deprecated("use `internal.moduleDef` instead", "2.11.0") def apply(sym: Symbol, impl: Template)(implicit token: CompatToken): ModuleDef = internal.moduleDef(sym, impl) } @@ -517,11 +517,11 @@ trait Trees { self: Universe => def unapply(valDef: ValDef): Option[(Modifiers, TermName, Tree, Tree)] /** @see [[InternalApi.valDef]] */ - @deprecated("Use `internal.valDef` instead", "2.11.0") + @deprecated("use `internal.valDef` instead", "2.11.0") def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): ValDef = internal.valDef(sym, rhs) /** @see [[InternalApi.valDef]] */ - @deprecated("Use `internal.valDef` instead", "2.11.0") + @deprecated("use `internal.valDef` instead", "2.11.0") def apply(sym: Symbol)(implicit token: CompatToken): ValDef = internal.valDef(sym) } @@ -568,23 +568,23 @@ trait Trees { self: Universe => def unapply(defDef: DefDef): Option[(Modifiers, TermName, List[TypeDef], List[List[ValDef]], Tree, Tree)] /** @see [[InternalApi.defDef]] */ - @deprecated("Use `internal.defDef` instead", "2.11.0") + @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, mods: Modifiers, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, vparamss, rhs) /** @see [[InternalApi.defDef]] */ - @deprecated("Use `internal.defDef` instead", "2.11.0") + @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, vparamss: List[List[ValDef]], rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, vparamss, rhs) /** @see [[InternalApi.defDef]] */ - @deprecated("Use `internal.defDef` instead", "2.11.0") + @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, mods: Modifiers, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, mods, rhs) /** @see [[InternalApi.defDef]] */ - @deprecated("Use `internal.defDef` instead", "2.11.0") + @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs) /** @see [[InternalApi.defDef]] */ - @deprecated("Use `internal.defDef` instead", "2.11.0") + @deprecated("use `internal.defDef` instead", "2.11.0") def apply(sym: Symbol, rhs: List[List[Symbol]] => Tree)(implicit token: CompatToken): DefDef = internal.defDef(sym, rhs) } @@ -640,11 +640,11 @@ trait Trees { self: Universe => def unapply(typeDef: TypeDef): Option[(Modifiers, TypeName, List[TypeDef], Tree)] /** @see [[InternalApi.typeDef]] */ - @deprecated("Use `internal.typeDef` instead", "2.11.0") + @deprecated("use `internal.typeDef` instead", "2.11.0") def apply(sym: Symbol, rhs: Tree)(implicit token: CompatToken): TypeDef = internal.typeDef(sym, rhs) /** @see [[InternalApi.typeDef]] */ - @deprecated("Use `internal.typeDef` instead", "2.11.0") + @deprecated("use `internal.typeDef` instead", "2.11.0") def apply(sym: Symbol)(implicit token: CompatToken): TypeDef = internal.typeDef(sym) } @@ -708,7 +708,7 @@ trait Trees { self: Universe => def unapply(labelDef: LabelDef): Option[(TermName, List[Ident], Tree)] /** @see [[InternalApi.labelDef]] */ - @deprecated("Use `internal.labelDef` instead", "2.11.0") + @deprecated("use `internal.labelDef` instead", "2.11.0") def apply(sym: Symbol, params: List[Symbol], rhs: Tree)(implicit token: CompatToken): LabelDef = internal.labelDef(sym, params, rhs) } @@ -2104,7 +2104,7 @@ trait Trees { self: Universe => */ val noSelfType: ValDef - @deprecated("Use `noSelfType` instead", "2.11.0") + @deprecated("use `noSelfType` instead", "2.11.0") val emptyValDef: ValDef /** An empty superclass constructor call corresponding to: @@ -2122,68 +2122,68 @@ trait Trees { self: Universe => * Flattens directly nested blocks. * @group Factories */ - @deprecated("Use q\"{..$stats}\" instead. Flatten directly nested blocks manually if needed", "2.10.1") + @deprecated("use q\"{..$stats}\" instead. Flatten directly nested blocks manually if needed", "2.10.1") def Block(stats: Tree*): Block /** A factory method for `CaseDef` nodes. * @group Factories */ - @deprecated("Use cq\"$pat => $body\" instead", "2.10.1") + @deprecated("use cq\"$pat => $body\" instead", "2.10.1") def CaseDef(pat: Tree, body: Tree): CaseDef /** A factory method for `Bind` nodes. * @group Factories */ - @deprecated("Use the canonical Bind constructor to create a bind and then initialize its symbol manually", "2.10.1") + @deprecated("use the canonical Bind constructor to create a bind and then initialize its symbol manually", "2.10.1") def Bind(sym: Symbol, body: Tree): Bind /** A factory method for `Try` nodes. * @group Factories */ - @deprecated("Convert cases into casedefs and use q\"try $body catch { case ..$newcases }\" instead", "2.10.1") + @deprecated("convert cases into casedefs and use q\"try $body catch { case ..$newcases }\" instead", "2.10.1") def Try(body: Tree, cases: (Tree, Tree)*): Try /** A factory method for `Throw` nodes. * @group Factories */ - @deprecated("Use q\"throw new $tpe(..$args)\" instead", "2.10.1") + @deprecated("use q\"throw new $tpe(..$args)\" instead", "2.10.1") def Throw(tpe: Type, args: Tree*): Throw /** Factory method for object creation `new tpt(args_1)...(args_n)` * A `New(t, as)` is expanded to: `(new t).(as)` * @group Factories */ - @deprecated("Use q\"new $tpt(...$argss)\" instead", "2.10.1") + @deprecated("use q\"new $tpt(...$argss)\" instead", "2.10.1") def New(tpt: Tree, argss: List[List[Tree]]): Tree /** 0-1 argument list new, based on a type. * @group Factories */ - @deprecated("Use q\"new $tpe(..$args)\" instead", "2.10.1") + @deprecated("use q\"new $tpe(..$args)\" instead", "2.10.1") def New(tpe: Type, args: Tree*): Tree /** 0-1 argument list new, based on a symbol. * @group Factories */ - @deprecated("Use q\"new ${sym.toType}(..$args)\" instead", "2.10.1") + @deprecated("use q\"new ${sym.toType}(..$args)\" instead", "2.10.1") def New(sym: Symbol, args: Tree*): Tree /** A factory method for `Apply` nodes. * @group Factories */ - @deprecated("Use q\"$sym(..$args)\" instead", "2.10.1") + @deprecated("use q\"$sym(..$args)\" instead", "2.10.1") def Apply(sym: Symbol, args: Tree*): Tree /** 0-1 argument list new, based on a type tree. * @group Factories */ - @deprecated("Use q\"new $tpt(..$args)\" instead", "2.10.1") + @deprecated("use q\"new $tpt(..$args)\" instead", "2.10.1") def ApplyConstructor(tpt: Tree, args: List[Tree]): Tree /** A factory method for `Super` nodes. * @group Factories */ - @deprecated("Use q\"$sym.super[$mix].x\".qualifier instead", "2.10.1") + @deprecated("use q\"$sym.super[$mix].x\".qualifier instead", "2.10.1") def Super(sym: Symbol, mix: TypeName): Tree /** A factory method for `This` nodes. @@ -2195,7 +2195,7 @@ trait Trees { self: Universe => * The string `name` argument is assumed to represent a [[scala.reflect.api.Names#TermName `TermName`]]. * @group Factories */ - @deprecated("Use Select(tree, TermName(name)) instead", "2.10.1") + @deprecated("use Select(tree, TermName(name)) instead", "2.10.1") def Select(qualifier: Tree, name: String): Select /** A factory method for `Select` nodes. @@ -2206,7 +2206,7 @@ trait Trees { self: Universe => /** A factory method for `Ident` nodes. * @group Factories */ - @deprecated("Use Ident(TermName(name)) instead", "2.10.1") + @deprecated("use Ident(TermName(name)) instead", "2.10.1") def Ident(name: String): Ident /** A factory method for `Ident` nodes. @@ -2653,7 +2653,7 @@ trait Trees { self: Universe => */ val Modifiers: ModifiersExtractor - @deprecated("Use ModifiersExtractor instead", "2.11.0") + @deprecated("use ModifiersExtractor instead", "2.11.0") type ModifiersCreator = ModifiersExtractor /** An extractor class to create and pattern match with syntax `Modifiers(flags, privateWithin, annotations)`. diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index f9b49f173071..ff61ae1901a4 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -95,7 +95,7 @@ trait Types { def typeSymbol: Symbol /** @see [[decl]] */ - @deprecated("Use `decl` instead", "2.11.0") + @deprecated("use `decl` instead", "2.11.0") def declaration(name: Name): Symbol /** The defined or declared members with name `name` in this type; @@ -105,7 +105,7 @@ trait Types { def decl(name: Name): Symbol /** @see [[decls]] */ - @deprecated("Use `decls` instead", "2.11.0") + @deprecated("use `decls` instead", "2.11.0") def declarations: MemberScope /** A `Scope` containing directly declared members of this type. @@ -150,7 +150,7 @@ trait Types { * TypeRef(pre, , List()) is replaced by * PolyType(X, TypeRef(pre, , List(X))) */ - @deprecated("Use `dealias` or `etaExpand` instead", "2.11.0") + @deprecated("use `dealias` or `etaExpand` instead", "2.11.0") def normalize: Type /** Converts higher-kinded TypeRefs to PolyTypes. @@ -263,7 +263,7 @@ trait Types { def typeArgs: List[Type] /** @see [[paramLists]] */ - @deprecated("Use `paramLists` instead", "2.11.0") + @deprecated("use `paramLists` instead", "2.11.0") def paramss: List[List[Symbol]] /** For a method or poly type, a list of its value parameter sections, @@ -430,7 +430,7 @@ trait Types { def unapply(tpe: ThisType): Option[Symbol] /** @see [[InternalApi.thisType]] */ - @deprecated("Use `internal.thisType` instead", "2.11.0") + @deprecated("use `internal.thisType` instead", "2.11.0") def apply(sym: Symbol)(implicit token: CompatToken): Type = internal.thisType(sym) } @@ -469,7 +469,7 @@ trait Types { def unapply(tpe: SingleType): Option[(Type, Symbol)] /** @see [[InternalApi.singleType]] */ - @deprecated("Use `ClassSymbol.thisPrefix` or `internal.singleType` instead", "2.11.0") + @deprecated("use `ClassSymbol.thisPrefix` or `internal.singleType` instead", "2.11.0") def apply(pre: Type, sym: Symbol)(implicit token: CompatToken): Type = internal.singleType(pre, sym) } @@ -509,7 +509,7 @@ trait Types { def unapply(tpe: SuperType): Option[(Type, Type)] /** @see [[InternalApi.superType]] */ - @deprecated("Use `ClassSymbol.superPrefix` or `internal.superType` instead", "2.11.0") + @deprecated("use `ClassSymbol.superPrefix` or `internal.superType` instead", "2.11.0") def apply(thistpe: Type, supertpe: Type)(implicit token: CompatToken): Type = internal.superType(thistpe, supertpe) } @@ -552,7 +552,7 @@ trait Types { def unapply(tpe: ConstantType): Option[Constant] /** @see [[InternalApi.constantType]] */ - @deprecated("Use `value.tpe` or `internal.constantType` instead", "2.11.0") + @deprecated("use `value.tpe` or `internal.constantType` instead", "2.11.0") def apply(value: Constant)(implicit token: CompatToken): ConstantType = internal.constantType(value) } @@ -595,7 +595,7 @@ trait Types { def unapply(tpe: TypeRef): Option[(Type, Symbol, List[Type])] /** @see [[InternalApi.typeRef]] */ - @deprecated("Use `internal.typeRef` instead", "2.11.0") + @deprecated("use `internal.typeRef` instead", "2.11.0") def apply(pre: Type, sym: Symbol, args: List[Type])(implicit token: CompatToken): Type = internal.typeRef(pre, sym, args) } @@ -655,11 +655,11 @@ trait Types { def unapply(tpe: RefinedType): Option[(List[Type], Scope)] /** @see [[InternalApi.refinedType]] */ - @deprecated("Use `internal.refinedType` instead", "2.11.0") + @deprecated("use `internal.refinedType` instead", "2.11.0") def apply(parents: List[Type], decls: Scope)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls) /** @see [[InternalApi.refinedType]] */ - @deprecated("Use `internal.refinedType` instead", "2.11.0") + @deprecated("use `internal.refinedType` instead", "2.11.0") def apply(parents: List[Type], decls: Scope, clazz: Symbol)(implicit token: CompatToken): RefinedType = internal.refinedType(parents, decls, clazz) } @@ -704,7 +704,7 @@ trait Types { def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)] /** @see [[InternalApi.classInfoType]] */ - @deprecated("Use `internal.classInfoType` instead", "2.11.0") + @deprecated("use `internal.classInfoType` instead", "2.11.0") def apply(parents: List[Type], decls: Scope, typeSymbol: Symbol)(implicit token: CompatToken): ClassInfoType = internal.classInfoType(parents, decls, typeSymbol) } @@ -753,7 +753,7 @@ trait Types { def unapply(tpe: MethodType): Option[(List[Symbol], Type)] /** @see [[InternalApi.methodType]] */ - @deprecated("Use `internal.methodType` instead", "2.11.0") + @deprecated("use `internal.methodType` instead", "2.11.0") def apply(params: List[Symbol], resultType: Type)(implicit token: CompatToken): MethodType = internal.methodType(params, resultType) } @@ -789,7 +789,7 @@ trait Types { def unapply(tpe: NullaryMethodType): Option[(Type)] /** @see [[InternalApi.nullaryMethodType]] */ - @deprecated("Use `internal.nullaryMethodType` instead", "2.11.0") + @deprecated("use `internal.nullaryMethodType` instead", "2.11.0") def apply(resultType: Type)(implicit token: CompatToken): NullaryMethodType = internal.nullaryMethodType(resultType) } @@ -823,7 +823,7 @@ trait Types { def unapply(tpe: PolyType): Option[(List[Symbol], Type)] /** @see [[InternalApi.polyType]] */ - @deprecated("Use `internal.polyType` instead", "2.11.0") + @deprecated("use `internal.polyType` instead", "2.11.0") def apply(typeParams: List[Symbol], resultType: Type)(implicit token: CompatToken): PolyType = internal.polyType(typeParams, resultType) } @@ -861,7 +861,7 @@ trait Types { def unapply(tpe: ExistentialType): Option[(List[Symbol], Type)] /** @see [[InternalApi.existentialType]] */ - @deprecated("Use `internal.existentialType` instead", "2.11.0") + @deprecated("use `internal.existentialType` instead", "2.11.0") def apply(quantified: List[Symbol], underlying: Type)(implicit token: CompatToken): ExistentialType = internal.existentialType(quantified, underlying) } @@ -899,7 +899,7 @@ trait Types { def unapply(tpe: AnnotatedType): Option[(List[Annotation], Type)] /** @see [[InternalApi.annotatedType]] */ - @deprecated("Use `internal.annotatedType` instead", "2.11.0") + @deprecated("use `internal.annotatedType` instead", "2.11.0") def apply(annotations: List[Annotation], underlying: Type)(implicit token: CompatToken): AnnotatedType = internal.annotatedType(annotations, underlying) } @@ -943,7 +943,7 @@ trait Types { def unapply(tpe: TypeBounds): Option[(Type, Type)] /** @see [[InternalApi.typeBounds]] */ - @deprecated("Use `internal.typeBounds` instead", "2.11.0") + @deprecated("use `internal.typeBounds` instead", "2.11.0") def apply(lo: Type, hi: Type)(implicit token: CompatToken): TypeBounds = internal.typeBounds(lo, hi) } @@ -996,7 +996,7 @@ trait Types { def unapply(tpe: BoundedWildcardType): Option[TypeBounds] /** @see [[InternalApi.boundedWildcardType]] */ - @deprecated("Use `internal.boundedWildcardType` instead", "2.11.0") + @deprecated("use `internal.boundedWildcardType` instead", "2.11.0") def apply(bounds: TypeBounds)(implicit token: CompatToken): BoundedWildcardType = internal.boundedWildcardType(bounds) } diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala index 1ba014d19db0..9a6caff16064 100644 --- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala @@ -45,14 +45,14 @@ trait AnnotationCheckers { * Modify the type that has thus far been inferred for a tree. All this should * do is add annotations. */ - @deprecated("Create an AnalyzerPlugin and use pluginsTyped", "2.10.1") + @deprecated("create an AnalyzerPlugin and use pluginsTyped", "2.10.1") def addAnnotations(tree: Tree, tpe: Type): Type = tpe /** * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the * given type tp, taking into account the given mode (see method adapt in trait Typers). */ - @deprecated("Create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1") + @deprecated("create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1") def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false /** @@ -62,7 +62,7 @@ trait AnnotationCheckers { * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing * class cannot do the adapting, it should return the tree unchanged. */ - @deprecated("Create an AnalyzerPlugin and use adaptAnnotations", "2.10.1") + @deprecated("create an AnalyzerPlugin and use adaptAnnotations", "2.10.1") def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree /** diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index db8ac9b0cbe7..ca6c893d130d 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -714,12 +714,12 @@ trait Definitions extends api.StandardDefinitions { def productProj(z:Symbol, j: Int): TermSymbol = getMemberValue(z, nme.productAccessorName(j)) /** if tpe <: ProductN[T1,...,TN], returns List(T1,...,TN) else Nil */ - @deprecated("No longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match { + @deprecated("no longer used", "2.11.0") def getProductArgs(tpe: Type): List[Type] = tpe.baseClasses find isProductNSymbol match { case Some(x) => tpe.baseType(x).typeArgs case _ => Nil } - @deprecated("No longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match { + @deprecated("no longer used", "2.11.0") def unapplyUnwrap(tpe:Type) = tpe.finalResultType.dealiasWiden match { case RefinedType(p :: _, _) => p.dealiasWiden case tp => tp } diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index f058acb7c018..230d30c74ec3 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -477,7 +477,7 @@ class Flags extends ModifierFlags { else "private[" + privateWithin + "]" ) - @deprecated("Use flagString on the flag-carrying member", "2.10.0") + @deprecated("use flagString on the flag-carrying member", "2.10.0") private[scala] def flagsToString(flags: Long, privateWithin: String): String = { val access = accessString(flags, privateWithin) val nonAccess = flagsToString(flags & ~AccessFlags) @@ -485,7 +485,7 @@ class Flags extends ModifierFlags { List(nonAccess, access) filterNot (_ == "") mkString " " } - @deprecated("Use flagString on the flag-carrying member", "2.10.0") + @deprecated("use flagString on the flag-carrying member", "2.10.0") private[scala] def flagsToString(flags: Long): String = { // Fast path for common case if (flags == 0L) "" else { diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala index 673ea4fdefc4..e793586e1814 100644 --- a/src/reflect/scala/reflect/internal/HasFlags.scala +++ b/src/reflect/scala/reflect/internal/HasFlags.scala @@ -84,7 +84,7 @@ trait HasFlags { def hasDefault = hasFlag(DEFAULTPARAM) && hasFlag(METHOD | PARAM) // Second condition disambiguates with TRAIT def hasJavaEnumFlag = hasFlag(JAVA_ENUM) def hasJavaAnnotationFlag = hasFlag(JAVA_ANNOTATION) - @deprecated("Use isLocalToThis instead", "2.11.0") + @deprecated("use isLocalToThis instead", "2.11.0") def hasLocalFlag = hasFlag(LOCAL) def isLocalToThis = hasFlag(LOCAL) def hasModuleFlag = hasFlag(MODULE) @@ -109,7 +109,7 @@ trait HasFlags { def isOverride = hasFlag(OVERRIDE) def isParamAccessor = hasFlag(PARAMACCESSOR) def isPrivate = hasFlag(PRIVATE) - @deprecated ("Use `hasPackageFlag` instead", "2.11.0") + @deprecated ("use `hasPackageFlag` instead", "2.11.0") def isPackage = hasFlag(PACKAGE) def isPrivateLocal = hasAllFlags(PrivateLocal) def isProtected = hasFlag(PROTECTED) diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 0f0f16574ee6..756300d4036f 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -175,7 +175,7 @@ trait Mirrors extends api.Mirrors { def getPackageIfDefined(fullname: TermName): Symbol = wrapMissing(getPackage(fullname)) - @deprecated("Use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol = + @deprecated("use getPackage", "2.11.0") def getRequiredPackage(fullname: String): ModuleSymbol = getPackage(newTermNameCached(fullname)) def getPackageObject(fullname: String): ModuleSymbol = getPackageObject(newTermName(fullname)) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 103f885ad4a9..a7bb127506e5 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -380,7 +380,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => if (toList forall p) this else newScopeWith(toList filter p: _*) ) - @deprecated("Use `toList.reverse` instead", "2.10.0") // Used in SBT 0.12.4 + @deprecated("use `toList.reverse` instead", "2.10.0") // Used in SBT 0.12.4 def reverse: List[Symbol] = toList.reverse override def mkString(start: String, sep: String, end: String) = diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 0ac72e7d8b4e..c93ecac3fad8 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -430,14 +430,14 @@ trait StdNames { name drop idx + 2 } - @deprecated("Use unexpandedName", "2.11.0") def originalName(name: Name): Name = unexpandedName(name) - @deprecated("Use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name = name.dropModule - @deprecated("Use Name#dropLocal", "2.11.0") def localToGetter(name: TermName): TermName = name.dropLocal - @deprecated("Use Name#dropLocal", "2.11.0") def dropLocalSuffix(name: Name): TermName = name.dropLocal - @deprecated("Use Name#localName", "2.11.0") def getterToLocal(name: TermName): TermName = name.localName - @deprecated("Use Name#setterName", "2.11.0") def getterToSetter(name: TermName): TermName = name.setterName - @deprecated("Use Name#getterName", "2.11.0") def getterName(name: TermName): TermName = name.getterName - @deprecated("Use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName + @deprecated("use unexpandedName", "2.11.0") def originalName(name: Name): Name = unexpandedName(name) + @deprecated("use Name#dropModule", "2.11.0") def stripModuleSuffix(name: Name): Name = name.dropModule + @deprecated("use Name#dropLocal", "2.11.0") def localToGetter(name: TermName): TermName = name.dropLocal + @deprecated("use Name#dropLocal", "2.11.0") def dropLocalSuffix(name: Name): TermName = name.dropLocal + @deprecated("use Name#localName", "2.11.0") def getterToLocal(name: TermName): TermName = name.localName + @deprecated("use Name#setterName", "2.11.0") def getterToSetter(name: TermName): TermName = name.setterName + @deprecated("use Name#getterName", "2.11.0") def getterName(name: TermName): TermName = name.getterName + @deprecated("use Name#getterName", "2.11.0") def setterToGetter(name: TermName): TermName = name.getterName /** * Convert `Tuple2$mcII` to `Tuple2`, or `T1$sp` to `T1`. diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 6d988479af1d..f60b4f6a8df4 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -63,7 +63,7 @@ abstract class SymbolTable extends macros.Universe def isPastTyper = false protected def isDeveloper: Boolean = settings.debug - @deprecated("Use devWarning if this is really a warning; otherwise use log", "2.11.0") + @deprecated("use devWarning if this is really a warning; otherwise use log", "2.11.0") def debugwarn(msg: => String): Unit = devWarning(msg) /** Override with final implementation for inlining. */ @@ -416,7 +416,7 @@ abstract class SymbolTable extends macros.Universe */ def isCompilerUniverse = false - @deprecated("Use enteringPhase", "2.10.0") // Used in SBT 0.12.4 + @deprecated("use enteringPhase", "2.10.0") // Used in SBT 0.12.4 @inline final def atPhase[T](ph: Phase)(op: => T): T = enteringPhase(ph)(op) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 8f24b435b3c1..49202cd71e2d 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -584,7 +584,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isAnonymousClass = false def isCaseClass = false def isConcreteClass = false - @deprecated("Trait implementation classes have been removed in Scala 2.12", "2.12.0") + @deprecated("trait implementation classes have been removed in Scala 2.12", "2.12.0") def isImplClass = false def isJavaInterface = false def isNumericValueClass = false @@ -1002,7 +1002,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isTopLevel = owner.isPackageClass /** Is this symbol defined in a block? */ - @deprecated("Use isLocalToBlock instead", "2.11.0") + @deprecated("use isLocalToBlock instead", "2.11.0") final def isLocal: Boolean = owner.isTerm /** Is this symbol defined in a block? */ @@ -1218,7 +1218,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ name attribute -------------------------------------------------------------- - @deprecated("Use unexpandedName", "2.11.0") def originalName: Name = unexpandedName + @deprecated("use unexpandedName", "2.11.0") def originalName: Name = unexpandedName /** If this symbol has an expanded name, its original (unexpanded) name, * otherwise the name itself. @@ -2374,7 +2374,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => Nil ) - @deprecated("Use `superSymbolIn` instead", "2.11.0") + @deprecated("use `superSymbolIn` instead", "2.11.0") final def superSymbol(base: Symbol): Symbol = superSymbolIn(base) /** The symbol accessed by a super in the definition of this symbol when @@ -2391,7 +2391,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => sym } - @deprecated("Use `getterIn` instead", "2.11.0") + @deprecated("use `getterIn` instead", "2.11.0") final def getter(base: Symbol): Symbol = getterIn(base) /** The getter of this value or setter definition in class `base`, or NoSymbol if none exists. */ @@ -2402,7 +2402,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def setterName: TermName = name.setterName def localName: TermName = name.localName - @deprecated("Use `setterIn` instead", "2.11.0") + @deprecated("use `setterIn` instead", "2.11.0") final def setter(base: Symbol, hasExpandedName: Boolean = needsExpandedSetterName): Symbol = setterIn(base, hasExpandedName) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index cba34aa22065..c9dfd0c337a8 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -44,7 +44,7 @@ trait Trees extends api.Trees { private[this] var rawtpe: Type = _ final def tpe = rawtpe - @deprecated("Use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t) + @deprecated("use setType", "2.11.0") def tpe_=(t: Type): Unit = setType(t) def clearType(): this.type = this setType null def setType(tp: Type): this.type = { rawtpe = tp; this } @@ -54,7 +54,7 @@ trait Trees extends api.Trees { def symbol_=(sym: Symbol) { throw new UnsupportedOperationException("symbol_= inapplicable for " + this) } def setSymbol(sym: Symbol): this.type = { symbol = sym; this } def hasSymbolField = false - @deprecated("Use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField + @deprecated("use hasSymbolField", "2.11.0") def hasSymbol = hasSymbolField def isDef = false @@ -1095,7 +1095,7 @@ trait Trees extends api.Trees { object noSelfType extends ValDef(Modifiers(PRIVATE), nme.WILDCARD, TypeTree(NoType), EmptyTree) with CannotHaveAttrs object pendingSuperCall extends Apply(Select(Super(This(tpnme.EMPTY), tpnme.EMPTY), nme.CONSTRUCTOR), List()) with CannotHaveAttrs - @deprecated("Use `noSelfType` instead", "2.11.0") lazy val emptyValDef = noSelfType + @deprecated("use `noSelfType` instead", "2.11.0") lazy val emptyValDef = noSelfType def newValDef(sym: Symbol, rhs: Tree)( mods: Modifiers = Modifiers(sym.flags), diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a649f6f92658..895bb60a081f 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3847,7 +3847,7 @@ trait Types case _ => false }) - @deprecated("Use isRawType", "2.10.1") // presently used by sbt + @deprecated("use isRawType", "2.10.1") // presently used by sbt def isRaw(sym: Symbol, args: List[Type]) = ( !phase.erasedTypes && args.isEmpty diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala index ef299a600cc4..8a42f1479d96 100644 --- a/src/reflect/scala/reflect/internal/annotations/package.scala +++ b/src/reflect/scala/reflect/internal/annotations/package.scala @@ -1,6 +1,6 @@ package scala.reflect.internal package object annotations { - @deprecated("Use scala.annotation.compileTimeOnly instead", "2.11.0") + @deprecated("use scala.annotation.compileTimeOnly instead", "2.11.0") type compileTimeOnly = scala.annotation.compileTimeOnly -} \ No newline at end of file +} diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index af5128163b27..c3f92f1bce95 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -449,7 +449,7 @@ private[internal] trait TypeMaps { (pre eq NoType) || (pre eq NoPrefix) || !isPossiblePrefix(clazz) ) - @deprecated("Use new AsSeenFromMap instead", "2.12.0") + @deprecated("use new AsSeenFromMap instead", "2.12.0") final def newAsSeenFromMap(pre: Type, clazz: Symbol): AsSeenFromMap = new AsSeenFromMap(pre, clazz) /** A map to compute the asSeenFrom method. diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index 0192d318069a..e0f681d910b6 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -240,12 +240,12 @@ private[util] trait DeprecatedPosition { @deprecated("use `lineCaret`", since="2.11.0") def lineWithCarat(maxWidth: Int): (String, String) = ("", "") - @deprecated("Use `withSource(source)` and `withShift`", "2.11.0") + @deprecated("use `withSource(source)` and `withShift`", "2.11.0") def withSource(source: SourceFile, shift: Int): Position = this withSource source withShift shift - @deprecated("Use `start` instead", "2.11.0") + @deprecated("use `start` instead", "2.11.0") def startOrPoint: Int = if (isRange) start else point - @deprecated("Use `end` instead", "2.11.0") + @deprecated("use `end` instead", "2.11.0") def endOrPoint: Int = if (isRange) end else point } diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala index c438653c9286..3e2655b722c9 100644 --- a/src/reflect/scala/reflect/macros/ExprUtils.scala +++ b/src/reflect/scala/reflect/macros/ExprUtils.scala @@ -12,54 +12,54 @@ trait ExprUtils { self: blackbox.Context => /** Shorthand for `Literal(Constant(null))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literalNull: Expr[Null] /** Shorthand for `Literal(Constant(()))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literalUnit: Expr[Unit] /** Shorthand for `Literal(Constant(true))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literalTrue: Expr[Boolean] /** Shorthand for `Literal(Constant(false))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literalFalse: Expr[Boolean] /** Shorthand for `Literal(Constant(x: Boolean))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Boolean): Expr[Boolean] /** Shorthand for `Literal(Constant(x: Byte))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Byte): Expr[Byte] /** Shorthand for `Literal(Constant(x: Short))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Short): Expr[Short] /** Shorthand for `Literal(Constant(x: Int))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Int): Expr[Int] /** Shorthand for `Literal(Constant(x: Long))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Long): Expr[Long] /** Shorthand for `Literal(Constant(x: Float))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Float): Expr[Float] /** Shorthand for `Literal(Constant(x: Double))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Double): Expr[Double] /** Shorthand for `Literal(Constant(x: String))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: String): Expr[String] /** Shorthand for `Literal(Constant(x: Char))` in the underlying `universe`. */ - @deprecated("Use quasiquotes instead", "2.11.0") + @deprecated("use quasiquotes instead", "2.11.0") def literal(x: Char): Expr[Char] } diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala index 4f3448e1ed02..028dda1de24f 100644 --- a/src/reflect/scala/reflect/macros/Names.scala +++ b/src/reflect/scala/reflect/macros/Names.scala @@ -34,15 +34,15 @@ trait Names { self: blackbox.Context => /** $freshNameNoParams */ - @deprecated("Use freshName instead", "2.11.0") + @deprecated("use freshName instead", "2.11.0") def fresh(): String /** $freshNameStringParam */ - @deprecated("Use freshName instead", "2.11.0") + @deprecated("use freshName instead", "2.11.0") def fresh(name: String): String /** $freshNameNameParam */ - @deprecated("Use freshName instead", "2.11.0") + @deprecated("use freshName instead", "2.11.0") def fresh[NameType <: Name](name: NameType): NameType /** $freshNameNoParams */ diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index bd608601dc08..06d2e999b2a0 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -53,7 +53,7 @@ trait Typers { /** @see `Typers.typecheck` */ - @deprecated("Use `c.typecheck` instead", "2.11.0") + @deprecated("use `c.typecheck` instead", "2.11.0") def typeCheck(tree: Tree, pt: Type = universe.WildcardType, silent: Boolean = false, withImplicitViewsDisabled: Boolean = false, withMacrosDisabled: Boolean = false): Tree = typecheck(tree, TERMmode, pt, silent, withImplicitViewsDisabled, withMacrosDisabled) @@ -101,7 +101,7 @@ trait Typers { /** Recursively resets locally defined symbols and types in a given tree. * WARNING: Don't use this API, go for [[untypecheck]] instead. */ - @deprecated("Use `c.untypecheck` instead", "2.11.0") + @deprecated("use `c.untypecheck` instead", "2.11.0") def resetLocalAttrs(tree: Tree): Tree /** In the current implementation of Scala's reflection API, untyped trees (also known as parser trees or unattributed trees) diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index 3b571695654e..51a7566bb811 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -332,7 +332,7 @@ abstract class Universe extends scala.reflect.api.Universe { } /** @see [[internal.gen]] */ - @deprecated("Use `internal.gen` instead", "2.11.0") + @deprecated("use `internal.gen` instead", "2.11.0") val treeBuild: TreeGen /** @inheritdoc */ @@ -345,94 +345,94 @@ abstract class Universe extends scala.reflect.api.Universe { /** Scala 2.10 compatibility enrichments for Symbol. */ implicit class MacroCompatibleSymbol(symbol: Symbol) { /** @see [[InternalMacroApi.attachments]] */ - @deprecated("Use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0") def attachments: Attachments { type Pos = Position } = internal.attachments(symbol) /** @see [[InternalMacroApi.updateAttachment]] */ - @deprecated("Use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") def updateAttachment[T: ClassTag](attachment: T): Symbol = internal.updateAttachment[T](symbol, attachment) /** @see [[InternalMacroApi.removeAttachment]] */ - @deprecated("Use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") def removeAttachment[T: ClassTag]: Symbol = internal.removeAttachment[T](symbol) /** @see [[InternalMacroApi.setInfo]] */ - @deprecated("Use `internal.setInfo` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setInfo` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setTypeSignature(tpe: Type): Symbol = internal.setInfo(symbol, tpe) /** @see [[InternalMacroApi.setAnnotations]] */ - @deprecated("Use `internal.setAnnotations` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setAnnotations` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setAnnotations(annots: Annotation*): Symbol = internal.setAnnotations(symbol, annots: _*) /** @see [[InternalMacroApi.setName]] */ - @deprecated("Use `internal.setName` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setName` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setName(name: Name): Symbol = internal.setName(symbol, name) /** @see [[InternalMacroApi.setPrivateWithin]] */ - @deprecated("Use `internal.setPrivateWithin` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setPrivateWithin` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setPrivateWithin(sym: Symbol): Symbol = internal.setPrivateWithin(symbol, sym) } /** Scala 2.10 compatibility enrichments for TypeTree. */ implicit class MacroCompatibleTree(tree: Tree) { /** @see [[InternalMacroApi.attachments]] */ - @deprecated("Use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.attachments` instead or import `internal.decorators._` for infix syntax", "2.11.0") def attachments: Attachments { type Pos = Position } = internal.attachments(tree) /** @see [[InternalMacroApi.updateAttachment]] */ - @deprecated("Use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.updateAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") def updateAttachment[T: ClassTag](attachment: T): Tree = internal.updateAttachment[T](tree, attachment) /** @see [[InternalMacroApi.removeAttachment]] */ - @deprecated("Use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.removeAttachment` instead or import `internal.decorators._` for infix syntax", "2.11.0") def removeAttachment[T: ClassTag]: Tree = internal.removeAttachment[T](tree) /** @see [[InternalMacroApi.setPos]] */ - @deprecated("Use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0") def pos_=(pos: Position): Unit = internal.setPos(tree, pos) /** @see [[InternalMacroApi.setPos]] */ - @deprecated("Use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setPos` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setPos(newpos: Position): Tree = internal.setPos(tree, newpos) /** @see [[InternalMacroApi.setType]] */ - @deprecated("Use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0") def tpe_=(t: Type): Unit = internal.setType(tree, t) /** @see [[InternalMacroApi.setType]] */ - @deprecated("Use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setType` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setType(tp: Type): Tree = internal.setType(tree, tp) /** @see [[InternalMacroApi.defineType]] */ - @deprecated("Use `internal.defineType` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.defineType` instead or import `internal.decorators._` for infix syntax", "2.11.0") def defineType(tp: Type): Tree = internal.defineType(tree, tp) /** @see [[InternalMacroApi.setSymbol]] */ - @deprecated("Use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def symbol_=(sym: Symbol): Unit = internal.setSymbol(tree, sym) /** @see [[InternalMacroApi.setSymbol]] */ - @deprecated("Use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setSymbol` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setSymbol(sym: Symbol): Tree = internal.setSymbol(tree, sym) } /** Scala 2.10 compatibility enrichments for TypeTree. */ implicit class CompatibleTypeTree(tt: TypeTree) { /** @see [[InternalMacroApi.setOriginal]] */ - @deprecated("Use `internal.setOriginal` instead or import `internal.decorators._` for infix syntax", "2.11.0") + @deprecated("use `internal.setOriginal` instead or import `internal.decorators._` for infix syntax", "2.11.0") def setOriginal(tree: Tree): TypeTree = internal.setOriginal(tt, tree) } /** @see [[InternalMacroApi.captureVariable]] */ - @deprecated("Use `internal.captureVariable` instead", "2.11.0") + @deprecated("use `internal.captureVariable` instead", "2.11.0") def captureVariable(vble: Symbol): Unit = internal.captureVariable(vble) /** @see [[InternalMacroApi.captureVariable]] */ - @deprecated("Use `internal.referenceCapturedVariable` instead", "2.11.0") + @deprecated("use `internal.referenceCapturedVariable` instead", "2.11.0") def referenceCapturedVariable(vble: Symbol): Tree = internal.referenceCapturedVariable(vble) /** @see [[InternalMacroApi.captureVariable]] */ - @deprecated("Use `internal.capturedVariableType` instead", "2.11.0") + @deprecated("use `internal.capturedVariableType` instead", "2.11.0") def capturedVariableType(vble: Symbol): Type = internal.capturedVariableType(vble) } diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala index b63d419d617b..3bb1bdf7e31c 100644 --- a/src/reflect/scala/reflect/macros/package.scala +++ b/src/reflect/scala/reflect/macros/package.scala @@ -23,6 +23,6 @@ package object macros { * and `scala.reflect.macros.whitebox.Context`. The original `Context` is left in place for compatibility reasons, * but it is now deprecated, nudging the users to choose between blackbox and whitebox macros. */ - @deprecated("Use blackbox.Context or whitebox.Context instead", "2.11.0") + @deprecated("use blackbox.Context or whitebox.Context instead", "2.11.0") type Context = whitebox.Context -} \ No newline at end of file +} diff --git a/test/files/neg/classmanifests_new_deprecations.check b/test/files/neg/classmanifests_new_deprecations.check index 1b63303fd4c8..ed6f42d00ceb 100644 --- a/test/files/neg/classmanifests_new_deprecations.check +++ b/test/files/neg/classmanifests_new_deprecations.check @@ -1,25 +1,25 @@ -classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:2: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead def cm1[T: ClassManifest] = ??? ^ -classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:3: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead def cm2[T](implicit evidence$1: ClassManifest[T]) = ??? ^ -classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:4: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead val cm3: ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:6: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead def rcm1[T: scala.reflect.ClassManifest] = ??? ^ -classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:7: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead def rcm2[T](implicit evidence$1: scala.reflect.ClassManifest[T]) = ??? ^ -classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:8: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead val rcm3: scala.reflect.ClassManifest[Int] = null ^ -classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): Use `scala.reflect.ClassTag` instead +classmanifests_new_deprecations.scala:10: warning: type ClassManifest in object Predef is deprecated (since 2.10.0): use `scala.reflect.ClassTag` instead type CM[T] = ClassManifest[T] ^ -classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): Use scala.reflect.ClassTag instead +classmanifests_new_deprecations.scala:15: warning: type ClassManifest in package reflect is deprecated (since 2.10.0): use scala.reflect.ClassTag instead type RCM[T] = scala.reflect.ClassManifest[T] ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check index 1b6ba5c3641f..2913b1858f72 100644 --- a/test/files/neg/delayed-init-ref.check +++ b/test/files/neg/delayed-init-ref.check @@ -4,8 +4,7 @@ delayed-init-ref.scala:17: warning: Selecting value vall from object O, which ex delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value println(vall) // warn ^ -delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. -See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1 +delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1 trait Before extends DelayedInit { ^ delayed-init-ref.scala:40: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value diff --git a/test/files/neg/names-defaults-neg-warn.check b/test/files/neg/names-defaults-neg-warn.check index 14a1e50e6892..3ff7d67cc367 100644 --- a/test/files/neg/names-defaults-neg-warn.check +++ b/test/files/neg/names-defaults-neg-warn.check @@ -1,7 +1,7 @@ -names-defaults-neg-warn.scala:11: warning: the parameter name s is deprecated: Use x instead +names-defaults-neg-warn.scala:11: warning: the parameter name s is deprecated: use x instead deprNam2.f(s = "dlfkj") ^ -names-defaults-neg-warn.scala:12: warning: the parameter name x is deprecated: Use s instead +names-defaults-neg-warn.scala:12: warning: the parameter name x is deprecated: use s instead deprNam2.g(x = "dlkjf") ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index e6929cb52ea6..0a7b1a7157e2 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -112,7 +112,7 @@ names-defaults-neg.scala:90: error: deprecated parameter name x has to be distin names-defaults-neg.scala:91: error: deprecated parameter name a has to be distinct from any other parameter name (deprecated or not). def deprNam2(a: String)(@deprecatedName('a) b: Int) = 1 ^ -names-defaults-neg.scala:93: warning: the parameter name y is deprecated: Use b instead +names-defaults-neg.scala:93: warning: the parameter name y is deprecated: use b instead deprNam3(y = 10, b = 2) ^ names-defaults-neg.scala:93: error: parameter 'b' is already specified at parameter position 1 diff --git a/test/files/neg/t6406-regextract.check b/test/files/neg/t6406-regextract.check index b49b6656f540..41b362f455da 100644 --- a/test/files/neg/t6406-regextract.check +++ b/test/files/neg/t6406-regextract.check @@ -1,4 +1,4 @@ -t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated (since 2.11.0): Extracting a match result from anything but a CharSequence or Match is deprecated +t6406-regextract.scala:4: warning: method unapplySeq in class Regex is deprecated (since 2.11.0): extracting a match result from anything but a CharSequence or Match is deprecated List(1) collect { case r(i) => i } ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/run/t3235-minimal.check b/test/files/run/t3235-minimal.check index 5b0657219161..374ddc79fe92 100644 --- a/test/files/run/t3235-minimal.check +++ b/test/files/run/t3235-minimal.check @@ -1,12 +1,12 @@ -t3235-minimal.scala:3: warning: method round in class RichInt is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +t3235-minimal.scala:3: warning: method round in class RichInt is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? assert(123456789.round == 123456789) ^ -t3235-minimal.scala:4: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? +t3235-minimal.scala:4: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? assert(math.round(123456789) == 123456789) ^ -t3235-minimal.scala:5: warning: method round in class RichLong is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? +t3235-minimal.scala:5: warning: method round in class RichLong is deprecated (since 2.11.0): this is an integer type; there is no reason to round it. Perhaps you meant to call this on a floating-point value? assert(1234567890123456789L.round == 1234567890123456789L) ^ -t3235-minimal.scala:6: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? +t3235-minimal.scala:6: warning: method round in package math is deprecated (since 2.11.0): This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value? assert(math.round(1234567890123456789L) == 1234567890123456789L) ^ From be38ebba3f32816a150012727d3351570718bcf6 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Sat, 2 Apr 2016 15:27:56 +0200 Subject: [PATCH 0095/2793] Add since arg to deprecationWarning and use it --- .../scala/tools/nsc/CompilationUnits.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 8 ++-- src/compiler/scala/tools/nsc/Reporting.scala | 41 +++++++++++++------ .../scala/tools/nsc/ast/parser/Parsers.scala | 18 ++++---- .../scala/tools/nsc/ast/parser/Scanners.scala | 18 ++++---- .../scala/tools/nsc/javac/JavaParsers.scala | 2 +- .../scala/tools/nsc/javac/JavaScanners.scala | 4 +- .../patmat/ScalacPatternExpanders.scala | 2 +- .../tools/nsc/typechecker/Adaptations.scala | 2 +- .../tools/nsc/typechecker/Contexts.scala | 4 +- .../tools/nsc/typechecker/NamesDefaults.scala | 9 ++-- .../tools/nsc/typechecker/RefChecks.scala | 11 ++--- .../scala/tools/nsc/typechecker/Typers.scala | 15 +++---- .../tools/reflect/FormatInterpolator.scala | 2 +- .../scala/collection/immutable/Range.scala | 8 ++-- .../scala/concurrent/ExecutionContext.scala | 2 +- src/library/scala/concurrent/Future.scala | 10 ++--- src/library/scala/math/BigDecimal.scala | 16 ++++---- src/library/scala/util/Try.scala | 2 +- .../scala/reflect/internal/Reporting.scala | 2 +- .../scala/reflect/internal/Variances.scala | 2 +- .../reflect/internal/util/StringOps.scala | 4 +- .../scala/reflect/runtime/JavaUniverse.scala | 2 +- .../scala/tools/nsc/interpreter/IMain.scala | 2 +- test/files/jvm/future-spec.check | 4 +- test/files/jvm/scala-concurrent-tck.check | 2 +- test/files/jvm/serialization-new.check | 4 +- test/files/jvm/serialization.check | 4 +- test/files/neg/checksensible.check | 2 +- test/files/neg/t7783.check | 10 ++--- test/files/run/bitsets.check | 2 +- test/files/run/collection-stacks.check | 2 +- test/files/run/colltest.check | 2 +- test/files/run/delay-bad.check | 2 +- .../files/run/future-flatmap-exec-count.check | 2 +- .../run/reflection-java-annotations.check | 2 +- .../run/repl-no-imports-no-predef-power.check | 2 +- test/files/run/repl-power.check | 2 +- test/files/run/richs.check | 2 +- test/files/run/sbt-icode-interface.check | 2 +- test/files/run/t2212.check | 2 +- test/files/run/t3361.check | 2 +- test/files/run/t3647.check | 2 +- test/files/run/t3888.check | 2 +- test/files/run/t3970.check | 2 +- test/files/run/t3996.check | 2 +- test/files/run/t4080.check | 2 +- test/files/run/t4396.check | 2 +- test/files/run/t4461.check | 2 +- test/files/run/t4594-repl-settings.scala | 2 +- test/files/run/t4680.check | 2 +- test/files/run/t4813.check | 2 +- test/files/run/t5428.check | 2 +- test/files/run/t576.check | 2 +- test/files/run/t6111.check | 2 +- test/files/run/t6292.check | 2 +- test/files/run/t6329_repl.check | 8 ++-- test/files/run/t6329_repl_bug.check | 2 +- test/files/run/t6329_vanilla_bug.check | 2 +- test/files/run/t6481.check | 2 +- test/files/run/t6690.check | 2 +- test/files/run/t6863.check | 2 +- test/files/run/t6935.check | 2 +- test/files/run/t8549.check | 2 +- test/files/run/unittest_collection.check | 2 +- .../symtab/SymbolTableForUnitTesting.scala | 2 +- 66 files changed, 160 insertions(+), 132 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 34b07a26513d..02d9b0d9133b 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -128,7 +128,7 @@ trait CompilationUnits { global: Global => final def warning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) @deprecated("Call global.currentRun.reporting.deprecationWarning directly instead.", "2.11.2") - final def deprecationWarning(pos: Position, msg: String): Unit = currentRun.reporting.deprecationWarning(pos, msg) + final def deprecationWarning(pos: Position, msg: String, since: String): Unit = currentRun.reporting.deprecationWarning(pos, msg, since) @deprecated("Call global.currentRun.reporting.uncheckedWarning directly instead.", "2.11.2") final def uncheckedWarning(pos: Position, msg: String): Unit = currentRun.reporting.uncheckedWarning(pos, msg) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 7417d9c09ddc..9d6693c00fd8 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1054,9 +1054,9 @@ class Global(var currentSettings: Settings, var reporter: Reporter) var currentUnit: CompilationUnit = NoCompilationUnit // used in sbt - def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings + def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings.map{case (pos, (msg, since)) => (pos, msg)} // used in sbt - def deprecationWarnings: List[(Position, String)] = reporting.deprecationWarnings + def deprecationWarnings: List[(Position, String)] = reporting.deprecationWarnings.map{case (pos, (msg, since)) => (pos, msg)} private class SyncedCompilationBuffer { self => private val underlying = new mutable.ArrayBuffer[CompilationUnit] @@ -1267,11 +1267,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) private def warnDeprecatedAndConflictingSettings(unit: CompilationUnit) { // issue warnings for any usage of deprecated settings settings.userSetSettings filter (_.isDeprecated) foreach { s => - currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get) + currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get, "") } val supportedTarget = "jvm-1.8" if (settings.target.value != supportedTarget) { - currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated and has no effect, setting to " + supportedTarget) + currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated and has no effect, setting to " + supportedTarget, "2.12.0") settings.target.value = supportedTarget } settings.conflictWarning.foreach(reporter.warning(NoPosition, _)) diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 325537a5a8d9..01c583bea31b 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -30,17 +30,33 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w def this(what: String, booleanSetting: Settings#BooleanSetting) { this(what, () => booleanSetting, booleanSetting) } - val warnings = mutable.LinkedHashMap[Position, String]() - def warn(pos: Position, msg: String) = + val warnings = mutable.LinkedHashMap[Position, (String, String)]() + def warn(pos: Position, msg: String, since: String = "") = if (doReport()) reporter.warning(pos, msg) - else if (!(warnings contains pos)) warnings += ((pos, msg)) + else if (!(warnings contains pos)) warnings += ((pos, (msg, since))) def summarize() = if (warnings.nonEmpty && (setting.isDefault || doReport())) { - val numWarnings = warnings.size - val warningVerb = if (numWarnings == 1) "was" else "were" - val warningCount = countElementsAsString(numWarnings, s"$what warning") - - reporter.warning(NoPosition, s"there $warningVerb $warningCount; re-run with ${setting.name} for details") + val sinceAndAmount = mutable.TreeMap[String, Int]() + warnings.valuesIterator.foreach { case (_, since) => + val value = sinceAndAmount.get(since) + if (value.isDefined) sinceAndAmount += ((since, value.get + 1)) + else sinceAndAmount += ((since, 1)) + } + val deprecationSummary = sinceAndAmount.size > 1 + sinceAndAmount.foreach { case (since, amount) => + val numWarnings = amount + val warningsSince = if (since.nonEmpty) s" (since $since)" else "" + val warningVerb = if (numWarnings == 1) "was" else "were" + val warningCount = countElementsAsString(numWarnings, s"$what warning") + val rerun = if (deprecationSummary) "" else s"; re-run with ${setting.name} for details" + reporter.warning(NoPosition, s"there $warningVerb $warningCount$warningsSince$rerun") + } + if (deprecationSummary) { + val numWarnings = warnings.size + val warningVerb = if (numWarnings == 1) "was" else "were" + val warningCount = countElementsAsString(numWarnings, s"$what warning") + reporter.warning(NoPosition, s"there $warningVerb $warningCount in total; re-run with ${setting.name} for details") + } } } @@ -53,7 +69,7 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w private val _allConditionalWarnings = List(_deprecationWarnings, _uncheckedWarnings, _featureWarnings, _inlinerWarnings) // TODO: remove in favor of the overload that takes a Symbol, give that argument a default (NoSymbol) - def deprecationWarning(pos: Position, msg: String): Unit = _deprecationWarnings.warn(pos, msg) + def deprecationWarning(pos: Position, msg: String, since: String): Unit = _deprecationWarnings.warn(pos, msg, since) def uncheckedWarning(pos: Position, msg: String): Unit = _uncheckedWarnings.warn(pos, msg) def featureWarning(pos: Position, msg: String): Unit = _featureWarnings.warn(pos, msg) def inlinerWarning(pos: Position, msg: String): Unit = _inlinerWarnings.warn(pos, msg) @@ -66,11 +82,12 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w def allConditionalWarnings = _allConditionalWarnings flatMap (_.warnings) // behold! the symbol that caused the deprecation warning (may not be deprecated itself) - def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = _deprecationWarnings.warn(pos, msg) + def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit = _deprecationWarnings.warn(pos, msg, since) def deprecationWarning(pos: Position, sym: Symbol): Unit = { - val version = sym.deprecationVersion match { case Some(ver) => s" (since $ver)" case _ => "" } + val version = sym.deprecationVersion.getOrElse("") + val since = if (version.isEmpty) version else s" (since $version)" val message = sym.deprecationMessage match { case Some(msg) => s": $msg" case _ => "" } - deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$version$message") + deprecationWarning(pos, sym, s"$sym${sym.locationString} is deprecated$since$message", version) } private[this] var reportedFeature = Set[Symbol]() diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 8413183ab632..f59deafe1b4c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -39,7 +39,7 @@ trait ParsersCommon extends ScannersCommon { self => */ abstract class ParserCommon { val in: ScannerCommon - def deprecationWarning(off: Offset, msg: String): Unit + def deprecationWarning(off: Offset, msg: String, since: String): Unit def accept(token: Token): Int /** Methods inParensOrError and similar take a second argument which, should @@ -154,7 +154,7 @@ self => // suppress warnings; silent abort on errors def warning(offset: Offset, msg: String): Unit = () - def deprecationWarning(offset: Offset, msg: String): Unit = () + def deprecationWarning(offset: Offset, msg: String, since: String): Unit = () def syntaxError(offset: Offset, msg: String): Unit = throw new MalformedInput(offset, msg) def incompleteInputError(msg: String): Unit = throw new MalformedInput(source.content.length - 1, msg) @@ -206,8 +206,8 @@ self => override def warning(offset: Offset, msg: String): Unit = reporter.warning(o2p(offset), msg) - override def deprecationWarning(offset: Offset, msg: String): Unit = - currentRun.reporting.deprecationWarning(o2p(offset), msg) + override def deprecationWarning(offset: Offset, msg: String, since: String): Unit = + currentRun.reporting.deprecationWarning(o2p(offset), msg, since) private var smartParsing = false @inline private def withSmartParsing[T](body: => T): T = { @@ -1822,7 +1822,7 @@ self => val hasEq = in.token == EQUALS if (hasVal) { - if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated") + if (hasEq) deprecationWarning(in.offset, "val keyword in for comprehension is deprecated", "2.10.0") else syntaxError(in.offset, "val in for comprehension must be followed by assignment") } @@ -2358,7 +2358,7 @@ self => while (in.token == VIEWBOUND) { val msg = "Use an implicit parameter instead.\nExample: Instead of `def f[A <% Int](a: A)` use `def f[A](a: A)(implicit ev: A => Int)`." if (settings.future) - deprecationWarning(in.offset, s"View bounds are deprecated. $msg") + deprecationWarning(in.offset, s"View bounds are deprecated. $msg", "2.12.0") contextBoundBuf += atPos(in.skipToken())(makeFunctionTypeTree(List(Ident(pname)), typ())) } while (in.token == COLON) { @@ -2652,14 +2652,14 @@ self => if (isStatSep || in.token == RBRACE) { if (restype.isEmpty) { if (settings.future) - deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.") + deprecationWarning(in.lastOffset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit`.", "2.12.0") restype = scalaUnitConstr } newmods |= Flags.DEFERRED EmptyTree } else if (restype.isEmpty && in.token == LBRACE) { if (settings.future) - deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.") + deprecationWarning(in.offset, s"Procedure syntax is deprecated. Convert procedure `$name` to method by adding `: Unit =`.", "2.12.0") restype = scalaUnitConstr blockExpr() } else { @@ -2921,7 +2921,7 @@ self => case vdef @ ValDef(mods, _, _, _) if !mods.isDeferred => copyValDef(vdef)(mods = mods | Flags.PRESUPER) case tdef @ TypeDef(mods, name, tparams, rhs) => - deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.") + deprecationWarning(tdef.pos.point, "early type members are deprecated. Move them to the regular body: the semantics are the same.", "2.11.0") treeCopy.TypeDef(tdef, mods | Flags.PRESUPER, name, tparams, rhs) case docdef @ DocDef(comm, rhs) => treeCopy.DocDef(docdef, comm, rhs) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 3d8f5a2dd3f5..891858ba7b95 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -35,7 +35,7 @@ trait ScannersCommon { // things to fill in, in addition to buf, decodeUni which come from CharArrayReader def error(off: Offset, msg: String): Unit def incompleteInputError(off: Offset, msg: String): Unit - def deprecationWarning(off: Offset, msg: String): Unit + def deprecationWarning(off: Offset, msg: String, since: String): Unit } def createKeywordArray(keywords: Seq[(Name, Token)], defaultToken: Token): (Token, Array[Token]) = { @@ -208,7 +208,7 @@ trait Scanners extends ScannersCommon { if (name == nme.MACROkw) syntaxError(s"$name is now a reserved word; usage as an identifier is disallowed") else if (emitIdentifierDeprecationWarnings) - deprecationWarning(s"$name is now a reserved word; usage as an identifier is deprecated") + deprecationWarning(s"$name is a reserved word (since 2.10.0); usage as an identifier is deprecated", "2.10.0") } } } @@ -824,7 +824,7 @@ trait Scanners extends ScannersCommon { if (settings.future) syntaxError(start, msg("unsupported")) else - deprecationWarning(start, msg("deprecated")) + deprecationWarning(start, msg("deprecated"), "2.11.0") putChar(oct.toChar) } else { ch match { @@ -1034,7 +1034,7 @@ trait Scanners extends ScannersCommon { /** generate an error at the current token offset */ def syntaxError(msg: String): Unit = syntaxError(offset, msg) - def deprecationWarning(msg: String): Unit = deprecationWarning(offset, msg) + def deprecationWarning(msg: String, since: String): Unit = deprecationWarning(offset, msg, since) /** signal an error where the input ended in the middle of a token */ def incompleteInputError(msg: String): Unit = { @@ -1204,8 +1204,8 @@ trait Scanners extends ScannersCommon { override val decodeUni: Boolean = !settings.nouescape // suppress warnings, throw exception on errors - def deprecationWarning(off: Offset, msg: String): Unit = () - def error (off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) + def deprecationWarning(off: Offset, msg: String, since: String): Unit = () + def error(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) def incompleteInputError(off: Offset, msg: String): Unit = throw new MalformedInput(off, msg) } @@ -1214,9 +1214,9 @@ trait Scanners extends ScannersCommon { class UnitScanner(val unit: CompilationUnit, patches: List[BracePatch]) extends SourceFileScanner(unit.source) { def this(unit: CompilationUnit) = this(unit, List()) - override def deprecationWarning(off: Offset, msg: String) = currentRun.reporting.deprecationWarning(unit.position(off), msg) - override def error (off: Offset, msg: String) = reporter.error(unit.position(off), msg) - override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg) + override def deprecationWarning(off: Offset, msg: String, since: String) = currentRun.reporting.deprecationWarning(unit.position(off), msg, since) + override def error(off: Offset, msg: String) = reporter.error(unit.position(off), msg) + override def incompleteInputError(off: Offset, msg: String) = currentRun.parsing.incompleteInputError(unit.position(off), msg) private var bracePatches: List[BracePatch] = patches diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index cc851b633089..fd9c99a3b966 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -27,7 +27,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def freshName(prefix: String): Name = freshTermName(prefix) def freshTermName(prefix: String): TermName = unit.freshTermName(prefix) def freshTypeName(prefix: String): TypeName = unit.freshTypeName(prefix) - def deprecationWarning(off: Int, msg: String) = currentRun.reporting.deprecationWarning(off, msg) + def deprecationWarning(off: Int, msg: String, since: String) = currentRun.reporting.deprecationWarning(off, msg, since) implicit def i2p(offset : Int) : Position = Position.offset(unit.source, offset) def warning(pos : Int, msg : String) : Unit = reporter.warning(pos, msg) def syntaxError(pos: Int, msg: String) : Unit = reporter.error(pos, msg) diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index ac86dfd66510..c74a6938c6dd 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -860,9 +860,9 @@ trait JavaScanners extends ast.parser.ScannersCommon { class JavaUnitScanner(unit: CompilationUnit) extends JavaScanner { in = new JavaCharArrayReader(unit.source.content, !settings.nouescape.value, syntaxError) init() - def error (pos: Int, msg: String) = reporter.error(pos, msg) + def error(pos: Int, msg: String) = reporter.error(pos, msg) def incompleteInputError(pos: Int, msg: String) = currentRun.parsing.incompleteInputError(pos, msg) - def deprecationWarning(pos: Int, msg: String) = currentRun.reporting.deprecationWarning(pos, msg) + def deprecationWarning(pos: Int, msg: String, since: String) = currentRun.reporting.deprecationWarning(pos, msg, since) implicit def g2p(pos: Int): Position = Position.offset(unit.source, pos) } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala index d4f44303bbde..2c1fb064cc00 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala @@ -148,7 +148,7 @@ trait ScalacPatternExpanders { val tupled = extractor.asSinglePattern if (effectivePatternArity(args) == 1 && isTupleType(extractor.typeOfSinglePattern)) { val sym = sel.symbol.owner - currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)") + currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (SI-6675)", "2.11.0") } tupled } else extractor diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index 46561de78f13..57de44a038a1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -77,7 +77,7 @@ trait Adaptations { val msg = "Adaptation of argument list by inserting () is deprecated: " + ( if (isLeakyTarget) "leaky (Object-receiving) target makes this especially dangerous." else "this is unlikely to be what you want.") - context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg)) + context.deprecationWarning(t.pos, t.symbol, adaptWarningMessage(msg), "2.11.0") } } else if (settings.warnAdaptedArgs) context.warning(t.pos, adaptWarningMessage(s"Adapting argument list by creating a ${args.size}-tuple: this may not be what you want.")) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index bcc1ed3e645c..16ef75c8639d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -586,8 +586,8 @@ trait Contexts { self: Analyzer => } - def deprecationWarning(pos: Position, sym: Symbol, msg: String): Unit = - currentRun.reporting.deprecationWarning(fixPosition(pos), sym, msg) + def deprecationWarning(pos: Position, sym: Symbol, msg: String, since: String): Unit = + currentRun.reporting.deprecationWarning(fixPosition(pos), sym, msg, since) def deprecationWarning(pos: Position, sym: Symbol): Unit = currentRun.reporting.deprecationWarning(fixPosition(pos), sym) // TODO: allow this to escalate to an error, and implicit search will ignore deprecated implicits diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index d7405c28d532..ab6837ec0146 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -559,21 +559,22 @@ trait NamesDefaults { self: Analyzer => def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = { implicit val context0 = typer.context def matchesName(param: Symbol, name: Name, argIndex: Int) = { - def warn(w: String) = context0.deprecationWarning(args(argIndex).pos, param, w) + def warn(msg: String, since: String) = context0.deprecationWarning(args(argIndex).pos, param, msg, since) def checkDeprecation(anonOK: Boolean) = when (param.deprecatedParamName) { case Some(`name`) => true case Some(nme.NO_NAME) => anonOK } - def since = param.deprecatedParamVersion.map(ver => s" (since $ver)").getOrElse("") + def version = param.deprecatedParamVersion.getOrElse("") + def since = if (version.isEmpty) version else s" (since $version)" def checkName = { val res = param.name == name - if (res && checkDeprecation(true)) warn(s"naming parameter $name is deprecated$since.") + if (res && checkDeprecation(true)) warn(s"naming parameter $name is deprecated$since.", version) res } def checkAltName = { val res = checkDeprecation(false) - if (res) warn(s"the parameter name $name is deprecated$since: use ${param.name} instead") + if (res) warn(s"the parameter name $name is deprecated$since: use ${param.name} instead", version) res } !param.isSynthetic && (checkName || checkAltName) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 3aea64a1f277..2d454c2fe6cd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -548,10 +548,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def checkOverrideDeprecated() { if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { - val version = other.deprecatedOverridingVersion map (ver => s" (since $ver)") getOrElse "" - val message = other.deprecatedOverridingMessage map (msg => s": $msg") getOrElse "" - val report = s"overriding ${other.fullLocationString} is deprecated$version$message" - currentRun.reporting.deprecationWarning(member.pos, other, report) + val version = other.deprecatedOverridingVersion.getOrElse("") + val since = if (version.isEmpty) version else s" (since $version)" + val message = other.deprecatedOverridingMessage map (msg => s": $msg") getOrElse "" + val report = s"overriding ${other.fullLocationString} is deprecated$since$message" + currentRun.reporting.deprecationWarning(member.pos, other, report, version) } } } @@ -1423,7 +1424,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans currentRun.reporting.deprecationWarning( tree.pos, symbol, - s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}") + s"${symbol.toString} overrides concrete, non-deprecated symbol(s): ${concrOvers.map(_.name.decode).mkString(", ")}", "") } } private def isRepeatedParamArg(tree: Tree) = currentApplication match { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d44a0eaf59fd..dcf14612c9c8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1704,10 +1704,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!isPastTyper && psym.hasDeprecatedInheritanceAnnotation && !sameSourceFile && !context.owner.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { - val version = psym.deprecatedInheritanceVersion map (ver => s" (since $ver)") getOrElse "" - val message = psym.deprecatedInheritanceMessage map (msg => s": $msg") getOrElse "" - val report = s"inheritance from ${psym.fullLocationString} is deprecated$version$message" - context.deprecationWarning(parent.pos, psym, report) + val version = psym.deprecatedInheritanceVersion.getOrElse("") + val since = if (version.isEmpty) version else s" (since $version)" + val message = psym.deprecatedInheritanceMessage.map(msg => s": $msg").getOrElse("") + val report = s"inheritance from ${psym.fullLocationString} is deprecated$since$message" + context.deprecationWarning(parent.pos, psym, report, version) } if (psym.isSealed && !phase.erasedTypes) @@ -3710,7 +3711,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2) - context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.") + context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.", "2.11.0") if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation else annInfo(typedAnn) @@ -4785,7 +4786,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // temporarily use `filter` as an alternative for `withFilter` def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = { - def warn(sym: Symbol) = context.deprecationWarning(tree.pos, sym, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead") + def warn(sym: Symbol) = context.deprecationWarning(tree.pos, sym, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead", "2.11.0") silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ => silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match { case SilentResultValue(res) => warn(res.symbol) ; res @@ -5580,7 +5581,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def reportWarning(inferredType: Type) = { val explanation = s"inference of $inferredType from macro impl's c.Expr[$inferredType] is deprecated and is going to stop working in 2.12" - context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)") + context.deprecationWarning(ddef.pos, ddef.symbol, s"$commonMessage ($explanation)", "2.12.0") } computeMacroDefTypeFromMacroImplRef(ddef, rhs1) match { case ErrorType => ErrorType diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala index cbdb01a10a95..9825acd39fd7 100644 --- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -117,7 +117,7 @@ abstract class FormatInterpolator { c.error(errPoint, msg("unsupported")) s0 } else { - currentRun.reporting.deprecationWarning(errPoint, msg("deprecated")) + currentRun.reporting.deprecationWarning(errPoint, msg("deprecated"), "2.11.0") try StringContext.treatEscapes(s0) catch escapeHatch } } diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 36e2fa25dd2e..0c24d17c1539 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -80,7 +80,8 @@ extends scala.collection.AbstractSeq[Int] || (start < end && step < 0) || (start == end && !isInclusive) ) - @deprecated("this method will be made private, use `length` instead.", "2.11") + + @deprecated("this method will be made private, use `length` instead", "2.11.0") final val numRangeElements: Int = { if (step == 0) throw new IllegalArgumentException("step cannot be 0.") else if (isEmpty) 0 @@ -90,7 +91,8 @@ extends scala.collection.AbstractSeq[Int] else len.toInt } } - @deprecated("this method will be made private, use `last` instead.", "2.11") + + @deprecated("this method will be made private, use `last` instead", "2.11.0") final val lastElement = if (isEmpty) start - step else step match { @@ -103,7 +105,7 @@ extends scala.collection.AbstractSeq[Int] else end - step } - @deprecated("this method will be made private.", "2.11") + @deprecated("this method will be made private", "2.11.0") final val terminalElement = lastElement + step /** The last element of this range. This method will return the correct value diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index f2c3284f9236..fe684e4d46fb 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -87,7 +87,7 @@ trait ExecutionContext { * constructed, so that it doesn't need any additional * preparation later. */ - @deprecated("preparation of ExecutionContexts will be removed", "2.12") + @deprecated("preparation of ExecutionContexts will be removed", "2.12.0") def prepare(): ExecutionContext = this } diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 93e9fddcb3a8..c0398605a6fd 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -113,7 +113,7 @@ trait Future[+T] extends Awaitable[T] { * * @group Callbacks */ - @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12") + @deprecated("use `foreach` or `onComplete` instead (keep in mind that they take total rather than partial functions)", "2.12.0") def onSuccess[U](pf: PartialFunction[T, U])(implicit executor: ExecutionContext): Unit = onComplete { case Success(v) => pf.applyOrElse[T, Any](v, Predef.conforms[T]) // Exploiting the cached function to avoid MatchError @@ -138,7 +138,7 @@ trait Future[+T] extends Awaitable[T] { * * @group Callbacks */ - @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12") + @deprecated("use `onComplete` or `failed.foreach` instead (keep in mind that they take total rather than partial functions)", "2.12.0") def onFailure[U](@deprecatedName('callback) pf: PartialFunction[Throwable, U])(implicit executor: ExecutionContext): Unit = onComplete { case Failure(t) => pf.applyOrElse[Throwable, Any](t, Predef.conforms[Throwable]) // Exploiting the cached function to avoid MatchError @@ -688,7 +688,7 @@ object Future { * @param p the predicate which indicates if it's a match * @return the `Future` holding the optional result of the search */ - @deprecated("use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12") + @deprecated("use the overloaded version of this method that takes a scala.collection.immutable.Iterable instead", "2.12.0") def find[T](@deprecatedName('futurestravonce) futures: TraversableOnce[Future[T]])(@deprecatedName('predicate) p: T => Boolean)(implicit executor: ExecutionContext): Future[Option[T]] = { val futuresBuffer = futures.toBuffer if (futuresBuffer.isEmpty) successful[Option[T]](None) @@ -775,7 +775,7 @@ object Future { * @param op the fold operation to be applied to the zero and futures * @return the `Future` holding the result of the fold */ - @deprecated("use Future.foldLeft instead", "2.12") + @deprecated("use Future.foldLeft instead", "2.12.0") def fold[T, R](futures: TraversableOnce[Future[T]])(zero: R)(@deprecatedName('foldFun) op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { if (futures.isEmpty) successful(zero) else sequence(futures).map(_.foldLeft(zero)(op)) @@ -794,7 +794,7 @@ object Future { * @param op the reduce operation which is applied to the results of the futures * @return the `Future` holding the result of the reduce */ - @deprecated("use Future.reduceLeft instead", "2.12") + @deprecated("use Future.reduceLeft instead", "2.12.0") def reduce[T, R >: T](futures: TraversableOnce[Future[T]])(op: (R, T) => R)(implicit executor: ExecutionContext): Future[R] = { if (futures.isEmpty) failed(new NoSuchElementException("reduce attempted on empty collection")) else sequence(futures).map(_ reduceLeft op) diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index 8e03017f0964..4bc0c0cf950d 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -148,7 +148,7 @@ object BigDecimal { * @param mc the `MathContext` used for future computations * @return the constructed `BigDecimal` */ - @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.", "2.11") + @deprecated("MathContext is not applied to Doubles in valueOf. Use BigDecimal.decimal to use rounding, or java.math.BigDecimal.valueOf to avoid it.", "2.11.0") def valueOf(d: Double, mc: MathContext): BigDecimal = apply(BigDec valueOf d, mc) /** Constructs a `BigDecimal` using the java BigDecimal static @@ -163,14 +163,14 @@ object BigDecimal { * valueOf constructor. This is unlikely to do what you want; * use `valueOf(f.toDouble)` or `decimal(f)` instead. */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11") + @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0") def valueOf(f: Float): BigDecimal = valueOf(f.toDouble) /** Constructs a `BigDecimal` using the java BigDecimal static * valueOf constructor. This is unlikely to do what you want; * use `valueOf(f.toDouble)` or `decimal(f)` instead. */ - @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11") + @deprecated("Float arguments to valueOf may not do what you wish. Use decimal or valueOf(f.toDouble).", "2.11.0") def valueOf(f: Float, mc: MathContext): BigDecimal = valueOf(f.toDouble, mc) @@ -259,10 +259,10 @@ object BigDecimal { */ def apply(d: Double, mc: MathContext): BigDecimal = decimal(d, mc) - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11") + @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0") def apply(x: Float): BigDecimal = apply(x.toDouble) - @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11") + @deprecated("The default conversion from Float may not do what you want. Use BigDecimal.decimal for a String representation, or explicitly convert the Float with .toDouble.", "2.11.0") def apply(x: Float, mc: MathContext): BigDecimal = apply(x.toDouble, mc) /** Translates a character array representation of a `BigDecimal` @@ -329,7 +329,7 @@ object BigDecimal { /** Constructs a `BigDecimal` from a `java.math.BigDecimal`. */ def apply(bd: BigDec): BigDecimal = apply(bd, defaultMathContext) - @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11") + @deprecated("This method appears to round a java.math.BigDecimal but actually doesn't. Use new BigDecimal(bd, mc) instead for no rounding, or BigDecimal.decimal(bd, mc) for rounding.", "2.11.0") def apply(bd: BigDec, mc: MathContext): BigDecimal = new BigDecimal(bd, mc) /** Implicit conversion from `Int` to `BigDecimal`. */ @@ -467,7 +467,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ * `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat`, depending on the intended meaning. * By default, `decimal` creation is used, so `isDecimalFloat` is probably what you want. */ - @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11") + @deprecated("What constitutes validity is unclear. Use `isExactFloat`, `isBinaryFloat`, or `isDecimalFloat` instead.", "2.11.0") def isValidFloat = { val f = toFloat !f.isInfinity && bigDecimal.compareTo(new BigDec(f.toDouble)) == 0 @@ -476,7 +476,7 @@ extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[ * `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble`, depending on the intended meaning. * By default, `decimal` creation is used, so `isDecimalDouble` is probably what you want. */ - @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11") + @deprecated("Validity has distinct meanings. Use `isExactDouble`, `isBinaryDouble`, or `isDecimalDouble` instead.", "2.11.0") def isValidDouble = { val d = toDouble !d.isInfinity && bigDecimal.compareTo(new BigDec(d)) == 0 diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index 3c8b21b03c1c..00e9585c38e2 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -132,7 +132,7 @@ sealed abstract class Try[+T] extends Product with Serializable { * collection" contract even though it seems unlikely to matter much in a * collection with max size 1. */ - @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12") + @deprecatedInheritance("You were never supposed to be able to extend this class.", "2.12.0") class WithFilter(p: T => Boolean) { def map[U](f: T => U): Try[U] = Try.this filter p map f def flatMap[U](f: T => Try[U]): Try[U] = Try.this filter p flatMap f diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala index afdae8128946..27fda9a7d453 100644 --- a/src/reflect/scala/reflect/internal/Reporting.scala +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -25,7 +25,7 @@ trait Reporting { self : Positions => type PerRunReporting <: PerRunReportingBase protected def PerRunReporting: PerRunReporting abstract class PerRunReportingBase { - def deprecationWarning(pos: Position, msg: String): Unit + def deprecationWarning(pos: Position, msg: String, since: String): Unit /** Have we already supplemented the error message of a compiler crash? */ private[this] var supplementedError = false diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 8d74065207e1..69bade55f1a5 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -79,7 +79,7 @@ trait Variances { // Unsound pre-2.11 behavior preserved under -Xsource:2.10 if (settings.isScala211 || sym.isOverridingSymbol) Invariant else { - currentRun.reporting.deprecationWarning(sym.pos, s"Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond") + currentRun.reporting.deprecationWarning(sym.pos, "Construct depends on unsound variance analysis and will not compile in scala 2.11 and beyond", "2.11.0") Bivariant } ) diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index efb8126ff013..79195e6eab5c 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -72,12 +72,12 @@ trait StringOps { */ def countElementsAsString(n: Int, elements: String): String = n match { - case 0 => "no " + elements + "s" + case 0 => s"no ${elements}s" case 1 => "one " + elements case 2 => "two " + elements + "s" case 3 => "three " + elements + "s" case 4 => "four " + elements + "s" - case _ => "" + n + " " + elements + "s" + case _ => s"$n ${elements}s" } /** Turns a count into a friendly English description if n<=4. diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 7848753e6911..a87d1d23cc38 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -30,7 +30,7 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle // minimal Run to get Reporting wired def currentRun = new RunReporting {} class PerRunReporting extends PerRunReportingBase { - def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) + def deprecationWarning(pos: Position, msg: String, since: String): Unit = reporter.warning(pos, msg) } protected def PerRunReporting = new PerRunReporting diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index a77e6f45f8fb..763a8ccd1bc7 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -780,7 +780,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends } ((pos, msg)) :: loop(filtered) } - val warnings = loop(run.reporting.allConditionalWarnings) + val warnings = loop(run.reporting.allConditionalWarnings.map{case (pos, (msg, since)) => (pos, msg)}) if (warnings.nonEmpty) mostRecentWarnings = warnings } diff --git a/test/files/jvm/future-spec.check b/test/files/jvm/future-spec.check index 50c5d446af97..562d699bde8e 100644 --- a/test/files/jvm/future-spec.check +++ b/test/files/jvm/future-spec.check @@ -1 +1,3 @@ -warning: there were 20 deprecation warnings; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0) +warning: there were 19 deprecation warnings (since 2.12.0) +warning: there were 20 deprecation warnings in total; re-run with -deprecation for details diff --git a/test/files/jvm/scala-concurrent-tck.check b/test/files/jvm/scala-concurrent-tck.check index 8aec46e5d636..88cff75abba9 100644 --- a/test/files/jvm/scala-concurrent-tck.check +++ b/test/files/jvm/scala-concurrent-tck.check @@ -1 +1 @@ -warning: there were 75 deprecation warnings; re-run with -deprecation for details +warning: there were 75 deprecation warnings (since 2.12.0); re-run with -deprecation for details diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index ca91ec107384..5b8a08da82e0 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -1,4 +1,6 @@ -warning: there were three deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0) +warning: there was one deprecation warning (since 2.11.6) +warning: there were three deprecation warnings in total; re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] arrayEquals(a1, _a1): true diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index ca91ec107384..5b8a08da82e0 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -1,4 +1,6 @@ -warning: there were three deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0) +warning: there was one deprecation warning (since 2.11.6) +warning: there were three deprecation warnings in total; re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] arrayEquals(a1, _a1): true diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check index 7de22fef5475..a6e9176c6954 100644 --- a/test/files/neg/checksensible.check +++ b/test/files/neg/checksensible.check @@ -97,7 +97,7 @@ checksensible.scala:84: warning: comparing values of types EqEqRefTest.this.C3 a checksensible.scala:95: warning: comparing values of types Unit and Int using `!=' will always yield true while ((c = in.read) != -1) ^ -warning: there were three deprecation warnings; re-run with -deprecation for details +warning: there were three deprecation warnings (since 2.11.0); re-run with -deprecation for details error: No warnings can be incurred under -Xfatal-warnings. 34 warnings found one error found diff --git a/test/files/neg/t7783.check b/test/files/neg/t7783.check index 18dc84a8dbe5..647cfee12152 100644 --- a/test/files/neg/t7783.check +++ b/test/files/neg/t7783.check @@ -1,16 +1,16 @@ -t7783.scala:1: warning: type D in object O is deprecated (since ): +t7783.scala:1: warning: type D in object O is deprecated: object O { class C; @deprecated("", "") type D = C; def foo: Seq[D] = Nil } ^ -t7783.scala:11: warning: type D in object O is deprecated (since ): +t7783.scala:11: warning: type D in object O is deprecated: type T = O.D ^ -t7783.scala:12: warning: type D in object O is deprecated (since ): +t7783.scala:12: warning: type D in object O is deprecated: locally(null: O.D) ^ -t7783.scala:13: warning: type D in object O is deprecated (since ): +t7783.scala:13: warning: type D in object O is deprecated: val x: O.D = null ^ -t7783.scala:14: warning: type D in object O is deprecated (since ): +t7783.scala:14: warning: type D in object O is deprecated: locally(null.asInstanceOf[O.D]) ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check index c24fd6238f98..770d9b5e3ffe 100644 --- a/test/files/run/bitsets.check +++ b/test/files/run/bitsets.check @@ -1,4 +1,4 @@ -warning: there were three deprecation warnings; re-run with -deprecation for details +warning: there were three deprecation warnings (since 2.11.6); re-run with -deprecation for details ms0 = BitSet(2) ms1 = BitSet(2) ms2 = BitSet(2) diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check index 3a366bfcdfd9..826e3a87f835 100644 --- a/test/files/run/collection-stacks.check +++ b/test/files/run/collection-stacks.check @@ -1,4 +1,4 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details 3-2-1: true 3-2-1: true apply diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check index 9579d781aac6..f362f23547ba 100644 --- a/test/files/run/colltest.check +++ b/test/files/run/colltest.check @@ -1,4 +1,4 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details true false true diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check index cb6e329f7ac8..fcd05c827f06 100644 --- a/test/files/run/delay-bad.check +++ b/test/files/run/delay-bad.check @@ -4,7 +4,7 @@ delay-bad.scala:53: warning: a pure expression does nothing in statement positio delay-bad.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses f(new { val x = 5 } with E() { 5 }) ^ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details // new C { } diff --git a/test/files/run/future-flatmap-exec-count.check b/test/files/run/future-flatmap-exec-count.check index 7065c133e028..7c68bd76b5d6 100644 --- a/test/files/run/future-flatmap-exec-count.check +++ b/test/files/run/future-flatmap-exec-count.check @@ -1,4 +1,4 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.12.0); re-run with -deprecation for details mapping execute() flatmapping diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check index 842037254ef8..67317d20729e 100644 --- a/test/files/run/reflection-java-annotations.check +++ b/test/files/run/reflection-java-annotations.check @@ -1,4 +1,4 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)) ======= new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)), v12 = FOO, v13 = new JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false) diff --git a/test/files/run/repl-no-imports-no-predef-power.check b/test/files/run/repl-no-imports-no-predef-power.check index 0d4a30b8e3b3..a76db3dbc2cb 100644 --- a/test/files/run/repl-no-imports-no-predef-power.check +++ b/test/files/run/repl-no-imports-no-predef-power.check @@ -7,7 +7,7 @@ Try :help or completions for vals._ and power._ scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details res0: $r.global.noSelfType.type = private val _ = _ scala> val tp = ArrayClass[scala.util.Random] // magic with tags diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check index 0d4a30b8e3b3..a76db3dbc2cb 100644 --- a/test/files/run/repl-power.check +++ b/test/files/run/repl-power.check @@ -7,7 +7,7 @@ Try :help or completions for vals._ and power._ scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details res0: $r.global.noSelfType.type = private val _ = _ scala> val tp = ArrayClass[scala.util.Random] // magic with tags diff --git a/test/files/run/richs.check b/test/files/run/richs.check index cf265ae00738..97b032393c6a 100644 --- a/test/files/run/richs.check +++ b/test/files/run/richs.check @@ -1,4 +1,4 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details RichCharTest1: true diff --git a/test/files/run/sbt-icode-interface.check b/test/files/run/sbt-icode-interface.check index df1629dd7eb1..7421f077f62a 100644 --- a/test/files/run/sbt-icode-interface.check +++ b/test/files/run/sbt-icode-interface.check @@ -1 +1 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.12.0); re-run with -deprecation for details diff --git a/test/files/run/t2212.check b/test/files/run/t2212.check index 1465f1341aa2..d13ea43b0747 100644 --- a/test/files/run/t2212.check +++ b/test/files/run/t2212.check @@ -1,4 +1,4 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details LinkedList(1) LinkedList(1) true diff --git a/test/files/run/t3361.check b/test/files/run/t3361.check index 5e0a76350199..7d2fa3b1559b 100644 --- a/test/files/run/t3361.check +++ b/test/files/run/t3361.check @@ -1 +1 @@ -warning: there were 16 deprecation warnings; re-run with -deprecation for details +warning: there were 16 deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t3647.check b/test/files/run/t3647.check index e5c1ee17013e..cb16c6486f84 100644 --- a/test/files/run/t3647.check +++ b/test/files/run/t3647.check @@ -1 +1 @@ -warning: there were three deprecation warnings; re-run with -deprecation for details +warning: there were three deprecation warnings (since 2.11.8); re-run with -deprecation for details diff --git a/test/files/run/t3888.check b/test/files/run/t3888.check index df1629dd7eb1..6fda32d713ea 100644 --- a/test/files/run/t3888.check +++ b/test/files/run/t3888.check @@ -1 +1 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t3970.check b/test/files/run/t3970.check index 0683a6c1a689..fd1c3af3bb92 100644 --- a/test/files/run/t3970.check +++ b/test/files/run/t3970.check @@ -1 +1 @@ -warning: there were 5 deprecation warnings; re-run with -deprecation for details +warning: there were 5 deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t3996.check b/test/files/run/t3996.check index a9ecc29fea08..f214cd8e6a65 100644 --- a/test/files/run/t3996.check +++ b/test/files/run/t3996.check @@ -1 +1 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t4080.check b/test/files/run/t4080.check index 462e925b7694..18f18ef2ddab 100644 --- a/test/files/run/t4080.check +++ b/test/files/run/t4080.check @@ -1,2 +1,2 @@ -warning: there were three deprecation warnings; re-run with -deprecation for details +warning: there were three deprecation warnings (since 2.11.0); re-run with -deprecation for details LinkedList(1, 0, 2, 3) diff --git a/test/files/run/t4396.check b/test/files/run/t4396.check index d38fb7fae7ec..9eb1be0255a6 100644 --- a/test/files/run/t4396.check +++ b/test/files/run/t4396.check @@ -1,4 +1,4 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details hallo constructor out:22 diff --git a/test/files/run/t4461.check b/test/files/run/t4461.check index 346993af6f48..32c7f5c4873c 100644 --- a/test/files/run/t4461.check +++ b/test/files/run/t4461.check @@ -1,4 +1,4 @@ -warning: there were four deprecation warnings; re-run with -deprecation for details +warning: there were four deprecation warnings (since 2.11.0); re-run with -deprecation for details Include(End,1) Include(End,2) Include(End,3) diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala index 8ececce24af3..524ec2884341 100644 --- a/test/files/run/t4594-repl-settings.scala +++ b/test/files/run/t4594-repl-settings.scala @@ -9,7 +9,7 @@ object Test extends SessionTest { |depp: String | |scala> def a = depp - |warning: there was one deprecation warning; re-run with -deprecation for details + |warning: there was one deprecation warning (since Time began.); re-run with -deprecation for details |a: String | |scala> :settings -deprecation diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check index 21a1e0cd1505..21c5f9e56791 100644 --- a/test/files/run/t4680.check +++ b/test/files/run/t4680.check @@ -4,7 +4,7 @@ t4680.scala:51: warning: a pure expression does nothing in statement position; y t4680.scala:69: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses new { val x = 5 } with E() { 5 } ^ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details // new C { } diff --git a/test/files/run/t4813.check b/test/files/run/t4813.check index a9ecc29fea08..f214cd8e6a65 100644 --- a/test/files/run/t4813.check +++ b/test/files/run/t4813.check @@ -1 +1 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t5428.check b/test/files/run/t5428.check index 52fce0939968..d298f0ef102e 100644 --- a/test/files/run/t5428.check +++ b/test/files/run/t5428.check @@ -1,2 +1,2 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details Stack(8, 7, 6, 5, 4, 3) diff --git a/test/files/run/t576.check b/test/files/run/t576.check index 22f3843abfea..2934e395ba84 100644 --- a/test/files/run/t576.check +++ b/test/files/run/t576.check @@ -1,4 +1,4 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details 1 2 3 diff --git a/test/files/run/t6111.check b/test/files/run/t6111.check index 588065800198..99f9c551b254 100644 --- a/test/files/run/t6111.check +++ b/test/files/run/t6111.check @@ -1,3 +1,3 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details (8,8) (x,x) diff --git a/test/files/run/t6292.check b/test/files/run/t6292.check index 6f7430d5b866..f7b8f483ab94 100644 --- a/test/files/run/t6292.check +++ b/test/files/run/t6292.check @@ -1 +1 @@ -warning: there were 7 deprecation warnings; re-run with -deprecation for details +warning: there were 7 deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check index 8909c47e79c4..86cd984e117a 100644 --- a/test/files/run/t6329_repl.check +++ b/test/files/run/t6329_repl.check @@ -3,28 +3,28 @@ scala> import scala.reflect.classTag import scala.reflect.classTag scala> classManifest[scala.List[_]] -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] scala> classTag[scala.List[_]] res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List scala> classManifest[scala.collection.immutable.List[_]] -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] scala> classTag[scala.collection.immutable.List[_]] res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List scala> classManifest[Predef.Set[_]] -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[] scala> classTag[Predef.Set[_]] res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set scala> classManifest[scala.collection.immutable.Set[_]] -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[] scala> classTag[scala.collection.immutable.Set[_]] diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check index 4b539f9e580f..6476fa71fc07 100644 --- a/test/files/run/t6329_repl_bug.check +++ b/test/files/run/t6329_repl_bug.check @@ -6,7 +6,7 @@ scala> import scala.reflect.runtime._ import scala.reflect.runtime._ scala> classManifest[List[_]] -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] scala> scala.reflect.classTag[List[_]] diff --git a/test/files/run/t6329_vanilla_bug.check b/test/files/run/t6329_vanilla_bug.check index 01bf0636ea13..4e139dd9544d 100644 --- a/test/files/run/t6329_vanilla_bug.check +++ b/test/files/run/t6329_vanilla_bug.check @@ -1,3 +1,3 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details scala.collection.immutable.List[] scala.collection.immutable.List diff --git a/test/files/run/t6481.check b/test/files/run/t6481.check index 4a3f6f7ee944..0535110f7542 100644 --- a/test/files/run/t6481.check +++ b/test/files/run/t6481.check @@ -1,4 +1,4 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details delayed init new foo(1, 2) delayed init diff --git a/test/files/run/t6690.check b/test/files/run/t6690.check index a9ecc29fea08..f214cd8e6a65 100644 --- a/test/files/run/t6690.check +++ b/test/files/run/t6690.check @@ -1 +1 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t6863.check b/test/files/run/t6863.check index d4df5f7a74bc..010e82a41ef6 100644 --- a/test/files/run/t6863.check +++ b/test/files/run/t6863.check @@ -10,4 +10,4 @@ t6863.scala:46: warning: comparing values of types Unit and Unit using `==' will t6863.scala:59: warning: comparing values of types Unit and Unit using `==' will always yield true assert({ () => x }.apply == ()) ^ -warning: there were four deprecation warnings; re-run with -deprecation for details +warning: there were four deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t6935.check b/test/files/run/t6935.check index df1629dd7eb1..6fda32d713ea 100644 --- a/test/files/run/t6935.check +++ b/test/files/run/t6935.check @@ -1 +1 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t8549.check b/test/files/run/t8549.check index a9ecc29fea08..f214cd8e6a65 100644 --- a/test/files/run/t8549.check +++ b/test/files/run/t8549.check @@ -1 +1 @@ -warning: there were two deprecation warnings; re-run with -deprecation for details +warning: there were two deprecation warnings (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/unittest_collection.check b/test/files/run/unittest_collection.check index df1629dd7eb1..6fda32d713ea 100644 --- a/test/files/run/unittest_collection.check +++ b/test/files/run/unittest_collection.check @@ -1 +1 @@ -warning: there was one deprecation warning; re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index 8cc7aefdd34e..fb05ab8d5a2a 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -85,7 +85,7 @@ class SymbolTableForUnitTesting extends SymbolTable { // minimal Run to get Reporting wired def currentRun = new RunReporting {} class PerRunReporting extends PerRunReportingBase { - def deprecationWarning(pos: Position, msg: String): Unit = reporter.warning(pos, msg) + def deprecationWarning(pos: Position, msg: String, since: String): Unit = reporter.warning(pos, msg) } protected def PerRunReporting = new PerRunReporting From 85057d542c7e201191544415ff454afb243aa104 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 19 May 2016 18:46:49 +0200 Subject: [PATCH 0096/2793] Add documentation to @deprecated --- src/library/scala/deprecated.scala | 43 ++++++++++++++++++- src/library/scala/deprecatedInheritance.scala | 20 ++++++++- src/library/scala/deprecatedName.scala | 40 +++++++++-------- src/library/scala/deprecatedOverriding.scala | 19 ++++++++ 4 files changed, 101 insertions(+), 21 deletions(-) diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala index e940a4bfbe2e..7338dffb8dd0 100644 --- a/src/library/scala/deprecated.scala +++ b/src/library/scala/deprecated.scala @@ -11,11 +11,52 @@ package scala import scala.annotation.meta._ /** An annotation that designates that a definition is deprecated. - * Access to the member then generates a deprecated warning. + * A deprecation warning is issued upon usage of the annotated definition. * + * Library authors should state the library's deprecation policy in their documentation to give + * developers guidance on how long a deprecated definition will be preserved. + * + * Library authors should prepend the name of their library to the version number to help + * developers distinguish deprecations coming from different libraries: + * + * {{{ + * @deprecated("this method will be removed", "FooLib 12.0") + * def oldMethod(x: Int) = ... + * }}} + * + * The compiler will emit deprecation warnings grouped by library and version: + * + * {{{ + * oldMethod(1) + * oldMethod(2) + * aDeprecatedMethodFromBarLibrary(3, 4) + * + * // warning: there were two deprecation warnings (since FooLib 12.0) + * // warning: there was one deprecation warning (since BarLib 3.2) + * // warning: there were three deprecation warnings in total; re-run with -deprecation for details + * }}} + * + * A deprecated element of the Scala language or a definition in the Scala standard library will + * be preserved or at least another major version. + * + * This means that an element deprecated since 2.12 will be preserved in 2.13 and will very likely + * not be part of 2.14, though sometimes a deprecated element might be kept for more than a major + * release to ease migration and upgrades from older Scala versions.
+ * Developers should not rely on this. + * + * @note The Scala team has decided to enact a special deprecation policy for the 2.12 release:
+ * + * As an upgrade from Scala 2.11 to Scala 2.12 also requires upgrading from Java 6 to Java 8, + * no deprecated elements will be removed in this release to ease migration and upgrades + * from older Scala versions. + * + * @see The official documentation on [[http://www.scala-lang.org/news/2.11.0/#binary-compatibility binary compatibility]]. * @param message the message to print during compilation if the definition is accessed * @param since a string identifying the first version in which the definition was deprecated * @since 2.3 + * @see [[scala.deprecatedInheritance]] + * @see [[scala.deprecatedOverriding]] + * @see [[scala.deprecatedName]] */ @getter @setter @beanGetter @beanSetter class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala index 7614a96f95f6..b85d07b0bdd5 100644 --- a/src/library/scala/deprecatedInheritance.scala +++ b/src/library/scala/deprecatedInheritance.scala @@ -11,12 +11,28 @@ package scala /** An annotation that designates that inheriting from a class is deprecated. * * This is usually done to warn about a non-final class being made final in a future version. - * Sub-classing such a class then generates a warning. No warnings are generated if the - * subclass is in the same compilation unit. + * Sub-classing such a class then generates a warning. + * + * No warnings are generated if the subclass is in the same compilation unit. + * + * {{{ + * @deprecatedInheritance("this class will be made final", "2.12") + * class Foo + * }}} + * + * {{{ + * val foo = new Foo // no deprecation warning + * class Bar extends Foo + * // warning: inheritance from class Foo is deprecated (since 2.12): this class will be made final + * // class Bar extends Foo + * // ^ + * }}} * * @param message the message to print during compilation if the class was sub-classed * @param since a string identifying the first version in which inheritance was deprecated * @since 2.10 + * @see [[scala.deprecated]] * @see [[scala.deprecatedOverriding]] + * @see [[scala.deprecatedName]] */ class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala index da8b76efc90f..e2322f03639b 100644 --- a/src/library/scala/deprecatedName.scala +++ b/src/library/scala/deprecatedName.scala @@ -10,23 +10,27 @@ package scala import scala.annotation.meta._ -/** - * An annotation that designates the name of the parameter to which it is - * applied as deprecated. Using that name in a named argument generates - * a deprecation warning. - * - * For instance, evaluating the code below in the Scala interpreter - * {{{ - * def inc(x: Int, @deprecatedName('y) n: Int): Int = x + n - * inc(1, y = 2) - * }}} - * will produce the following output: - * {{{ - * warning: there were 1 deprecation warnings; re-run with -deprecation for details - * res0: Int = 3 - * }}} - * - * @since 2.8.1 - */ + + /** An annotation that designates that the name of a parameter is deprecated. + * + * Using this name in a named argument generates a deprecation warning. + * + * For instance, evaluating the code below in the Scala interpreter (with `-deprecation`) + * {{{ + * def inc(x: Int, @deprecatedName('y, "2.12") n: Int): Int = x + n + * inc(1, y = 2) + * }}} + * will produce the following warning: + * {{{ + * warning: the parameter name y is deprecated (since 2.12): use n instead + * inc(1, y = 2) + * ^ + * }}} + * + * @since 2.8.1 + * @see [[scala.deprecated]] + * @see [[scala.deprecatedInheritance]] + * @see [[scala.deprecatedOverriding]] + */ @param class deprecatedName(name: Symbol = Symbol(""), since: String = "") extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala index 26a9d9ee7d56..ee887db220c6 100644 --- a/src/library/scala/deprecatedOverriding.scala +++ b/src/library/scala/deprecatedOverriding.scala @@ -12,9 +12,28 @@ package scala * * Overriding such a member in a sub-class then generates a warning. * + * {{{ + * class Foo { + * @deprecatedOverriding("this method will be made final", "2.12") + * def add(x: Int, y: Int) = x + y + * } + * }}} + * + * {{{ + * class Bar extends Foo // no deprecation warning + * class Baz extends Foo { + * override def add(x: Int, y: Int) = x - y + * } + * // warning: overriding method add in class Foo is deprecated (since 2.12): this method will be made final + * // override def add(x: Int, y: Int) = x - y + * // ^ + * }}} + * * @param message the message to print during compilation if the member was overridden * @param since a string identifying the first version in which overriding was deprecated * @since 2.10 + * @see [[scala.deprecated]] * @see [[scala.deprecatedInheritance]] + * @see [[scala.deprecatedName]] */ class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation From a45509cfe9a40b83e92483471889f9a909bea590 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 30 May 2016 08:52:41 +0200 Subject: [PATCH 0097/2793] Don't pass -opt to starr / locker build in the bootstrap script Renaming -Yopt to -opt revealed that we're passing the flag when building the locker (and optionally the starr) version. This is not necessary: when building the next stage with the flag enabled, the same optimizations are performed no matter if the current stage was built with the flag or not. --- scripts/jobs/integrate/bootstrap | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 05c7d22b26b4..76673b4f32c5 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -494,7 +494,6 @@ bootstrap() { -Dremote.snapshot.repository=NOPE\ -Dremote.release.repository=$releaseTempRepoUrl\ -Drepository.credentials.id=$releaseTempRepoCred\ - -Dscalac.args.optimise=-opt:l:classpath\ -Ddocs.skip=1\ -Dlocker.skip=1\ $publishStarrPrivateTask >> $baseDir/logs/builds 2>&1 @@ -516,7 +515,6 @@ bootstrap() { $SET_STARR\ -Dremote.release.repository=$releaseTempRepoUrl\ -Drepository.credentials.id=$releaseTempRepoCred\ - -Dscalac.args.optimise=-opt:l:classpath\ -Ddocs.skip=1\ -Dlocker.skip=1\ $publishLockerPrivateTask >> $baseDir/logs/builds 2>&1 From 7b132f39b82e4fc47cd95eadce9e3f22da8c8d82 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 31 May 2016 15:19:49 +1000 Subject: [PATCH 0098/2793] Avoid tree sharing with substituteThis The underlying transformer has a by-name parameter for the to provide the `to` tree, but this was strict in the layers of API above. Tree sharing is frowned upon in general as it leads to cross talk when, e.g., the erasure typechecker mutates the `tpe` field of the shared tree in different context. --- src/reflect/scala/reflect/api/Internals.scala | 6 +++--- src/reflect/scala/reflect/internal/Internals.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala index 9b7112f01178..2c8f84be0baa 100644 --- a/src/reflect/scala/reflect/api/Internals.scala +++ b/src/reflect/scala/reflect/api/Internals.scala @@ -116,7 +116,7 @@ trait Internals { self: Universe => /** Substitute given tree `to` for occurrences of nodes that represent * `C.this`, where `C` refers to the given class `clazz`. */ - def substituteThis(tree: Tree, clazz: Symbol, to: Tree): Tree + def substituteThis(tree: Tree, clazz: Symbol, to: => Tree): Tree /** A factory method for `ClassDef` nodes. */ @@ -391,7 +391,7 @@ trait Internals { self: Universe => def substituteTypes(from: List[Symbol], to: List[Type]): Tree = internal.substituteTypes(tree, from, to) /** @see [[internal.substituteThis]] */ - def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to) + def substituteThis(clazz: Symbol, to: => Tree): Tree = internal.substituteThis(tree, clazz, to) } /** Extension methods for symbols */ @@ -1143,7 +1143,7 @@ trait Internals { self: Universe => /** @see [[InternalApi.substituteThis]] */ @deprecated("use `internal.substituteThis` instead or import `internal.decorators._` for infix syntax", "2.11.0") - def substituteThis(clazz: Symbol, to: Tree): Tree = internal.substituteThis(tree, clazz, to) + def substituteThis(clazz: Symbol, to: => Tree): Tree = internal.substituteThis(tree, clazz, to) } /** Scala 2.10 compatibility enrichments for Tree. */ diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala index 1a48701ca72b..a07441e3ca21 100644 --- a/src/reflect/scala/reflect/internal/Internals.scala +++ b/src/reflect/scala/reflect/internal/Internals.scala @@ -29,7 +29,7 @@ trait Internals extends api.Internals { def freeTypes(tree: Tree): List[FreeTypeSymbol] = tree.freeTypes def substituteSymbols(tree: Tree, from: List[Symbol], to: List[Symbol]): Tree = tree.substituteSymbols(from, to) def substituteTypes(tree: Tree, from: List[Symbol], to: List[Type]): Tree = tree.substituteTypes(from, to) - def substituteThis(tree: Tree, clazz: Symbol, to: Tree): Tree = tree.substituteThis(clazz, to) + def substituteThis(tree: Tree, clazz: Symbol, to: => Tree): Tree = tree.substituteThis(clazz, to) def attachments(tree: Tree): Attachments { type Pos = Position } = tree.attachments def updateAttachment[T: ClassTag](tree: Tree, attachment: T): tree.type = tree.updateAttachment(attachment) def removeAttachment[T: ClassTag](tree: Tree): tree.type = tree.removeAttachment[T] diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index c9dfd0c337a8..77097d892d43 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -181,7 +181,7 @@ trait Trees extends api.Trees { def substituteTypes(from: List[Symbol], to: List[Type]): Tree = new TreeTypeSubstituter(from, to)(this) - def substituteThis(clazz: Symbol, to: Tree): Tree = + def substituteThis(clazz: Symbol, to: => Tree): Tree = new ThisSubstituter(clazz, to) transform this def hasExistingSymbol = (symbol ne null) && (symbol ne NoSymbol) From 4c2f6f3cb1e0eee11facae8c44e3cd7db11378f1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Sat, 28 May 2016 09:23:05 +0200 Subject: [PATCH 0099/2793] Privatize the deprecated members of `immutable.Range`. The implementation of these obscure members of `Range` are uselessly complicated for the purposes of `Range` itself. Making them private will allow to relax their semantics to the specific needs of `Range`, making them simpler, together with the initialization code of `Range`. `terminalElement` becomes dead code and is removed. --- src/library/scala/collection/immutable/Range.scala | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 0c24d17c1539..ca4f1f47a7db 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -81,8 +81,7 @@ extends scala.collection.AbstractSeq[Int] || (start == end && !isInclusive) ) - @deprecated("this method will be made private, use `length` instead", "2.11.0") - final val numRangeElements: Int = { + private val numRangeElements: Int = { if (step == 0) throw new IllegalArgumentException("step cannot be 0.") else if (isEmpty) 0 else { @@ -92,8 +91,7 @@ extends scala.collection.AbstractSeq[Int] } } - @deprecated("this method will be made private, use `last` instead", "2.11.0") - final val lastElement = + private val lastElement = if (isEmpty) start - step else step match { case 1 => if (isInclusive) end else end-1 @@ -105,9 +103,6 @@ extends scala.collection.AbstractSeq[Int] else end - step } - @deprecated("this method will be made private", "2.11.0") - final val terminalElement = lastElement + step - /** The last element of this range. This method will return the correct value * even if there are too many elements to iterate over. */ From ec1e80422676f02a088034d1658c7cb7f2e6e663 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Sat, 28 May 2016 09:44:33 +0200 Subject: [PATCH 0100/2793] Relax the semantics of `Range.lastElement` for internal use. `lastElement` is only used in code paths where the range is non-empty. It is therefore wasteful to try and give it a sort of sensible value for empty ranges. --- .../scala/collection/immutable/Range.scala | 21 +++++++++---------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index ca4f1f47a7db..2e567501155f 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -91,17 +91,16 @@ extends scala.collection.AbstractSeq[Int] } } - private val lastElement = - if (isEmpty) start - step - else step match { - case 1 => if (isInclusive) end else end-1 - case -1 => if (isInclusive) end else end+1 - case _ => - val remainder = (gap % step).toInt - if (remainder != 0) end - remainder - else if (isInclusive) end - else end - step - } + // This field has a sensible value only for non-empty ranges + private val lastElement = step match { + case 1 => if (isInclusive) end else end-1 + case -1 => if (isInclusive) end else end+1 + case _ => + val remainder = (gap % step).toInt + if (remainder != 0) end - remainder + else if (isInclusive) end + else end - step + } /** The last element of this range. This method will return the correct value * even if there are too many elements to iterate over. From 608387529b790c2a0fc87b4cf5b5324d68d02648 Mon Sep 17 00:00:00 2001 From: Mike Pheasant Date: Wed, 1 Jun 2016 08:51:41 +1000 Subject: [PATCH 0101/2793] SI-9767 document and test behaviour of String->integer/float conversions We delegate `String`'s extension methods `toInt`, `toFloat`, etc to corresponding methods in the Java standard library. These differ in the way they handle whitespace in the original string. This commit documents and tests the current behaviour. --- .../collection/immutable/StringLike.scala | 22 ++++++++----- .../collection/immutable/StringLikeTest.scala | 31 +++++++++++++++++++ 2 files changed, 46 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index b468b09a9d1a..155d25d9339f 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -286,31 +286,39 @@ self => def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*) /** - * @throws java.lang.IllegalArgumentException - If the string does not contain a parsable boolean. + * @throws java.lang.IllegalArgumentException If the string does not contain a parsable `Boolean`. */ def toBoolean: Boolean = parseBoolean(toString) /** - * @throws java.lang.NumberFormatException - If the string does not contain a parsable byte. + * Parse as a `Byte` (string must contain only decimal digits and optional leading `-`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Byte`. */ def toByte: Byte = java.lang.Byte.parseByte(toString) /** - * @throws java.lang.NumberFormatException - If the string does not contain a parsable short. + * Parse as a `Short` (string must contain only decimal digits and optional leading `-`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Short`. */ def toShort: Short = java.lang.Short.parseShort(toString) /** - * @throws java.lang.NumberFormatException - If the string does not contain a parsable int. + * Parse as an `Int` (string must contain only decimal digits and optional leading `-`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Int`. */ def toInt: Int = java.lang.Integer.parseInt(toString) /** - * @throws java.lang.NumberFormatException - If the string does not contain a parsable long. + * Parse as a `Long` (string must contain only decimal digits and optional leading `-`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Long`. */ def toLong: Long = java.lang.Long.parseLong(toString) /** - * @throws java.lang.NumberFormatException - If the string does not contain a parsable float. + * Parse as a `Float` (surrounding whitespace is removed with a `trim`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Float`. + * @throws java.lang.NullPointerException If the string is null. */ def toFloat: Float = java.lang.Float.parseFloat(toString) /** - * @throws java.lang.NumberFormatException - If the string does not contain a parsable double. + * Parse as a `Double` (surrounding whitespace is removed with a `trim`). + * @throws java.lang.NumberFormatException If the string does not contain a parsable `Double`. + * @throws java.lang.NullPointerException If the string is null. */ def toDouble: Double = java.lang.Double.parseDouble(toString) diff --git a/test/junit/scala/collection/immutable/StringLikeTest.scala b/test/junit/scala/collection/immutable/StringLikeTest.scala index 50be638b8908..44bade860ebd 100644 --- a/test/junit/scala/collection/immutable/StringLikeTest.scala +++ b/test/junit/scala/collection/immutable/StringLikeTest.scala @@ -1,5 +1,6 @@ package scala.collection.immutable +import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @@ -40,4 +41,34 @@ class StringLikeTest { AssertUtil.assertSameElements("--ch--omp--".split("-"), Array("", "", "ch", "", "omp")) // All the cases! AssertUtil.assertSameElements(twopairs.split(high), Array(twopairs)) //don't split on characters that are half a surrogate pair } + + /* Test for SI-9767 */ + @Test + def testNumericConversion: Unit = { + val sOne = " \t\n 1 \n\r\t " + val sOk = "2" + val sNull:String = null + + AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toInt) + AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toLong) + AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toShort) + AssertUtil.assertThrows[java.lang.NumberFormatException](sOne.toByte) + assertTrue("trim toDouble", sOne.toDouble == 1.0d) + assertTrue("trim toFloat", sOne.toFloat == 1.0f) + + assertTrue("no trim toInt", sOk.toInt == 2) + assertTrue("no trim toLong", sOk.toLong == 2L) + assertTrue("no trim toShort", sOk.toShort == 2.toShort) + assertTrue("no trim toByte", sOk.toByte == 2.toByte) + assertTrue("no trim toDouble", sOk.toDouble == 2.0d) + assertTrue("no trim toFloat", sOk.toFloat == 2.0f) + + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toInt, {s => s == "null"}) + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toLong, {s => s == "null"}) + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toShort, {s => s == "null"}) + AssertUtil.assertThrows[java.lang.NumberFormatException](sNull.toByte, {s => s == "null"}) + + AssertUtil.assertThrows[java.lang.NullPointerException](sNull.toDouble) + AssertUtil.assertThrows[java.lang.NullPointerException](sNull.toFloat) + } } From 0533a3df71e9c855ac68e10d060c2c87d16994e0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 May 2016 21:16:41 +1000 Subject: [PATCH 0102/2793] Lambda impl methods static and more stably named The body of lambdas is compiled into a synthetic method in the enclosing class. Previously, this method was a public virtual method named `fully$qualified$Class$$anonfun$n`. For lambdas that didn't capture a `this` reference, a static method was used. This commit changes two aspects. Firstly, all lambda impl methods are now emitted static. An extra parameter is added to those that require a this reference. This is an improvement as it: - allows, shorter, more readable names for the lambda impl method - avoids pollution of the vtable of the class. Note that javac uses private instance methods, rather than public static methods. If we followed its lead, we would be unable to support important use cases in our inliner Secondly, the name of the enclosing method has been included in the name of the lambda impl method to improve debuggability and to improve serialization compatibility. The serialization improvement comes from the way that fresh names for the impl methods are allocated: adding or removing lambdas in methods not named "foo" won't change the numbering of the `anonfun$foo$n` impl methods from methods named "foo". This is in line with user expectations about anonymous class and lambda serialization stability. Brian Goetz has described this tricky area well in: http://cr.openjdk.java.net/~briangoetz/eg-attachments/lambda-serialization.html This commit doesn't go as far a Javac, we don't use the hash of the lambda type info, param names, etc to map to a lambda impl method name. As such, we are more prone to the type-1 and -2 failures described there. However, our Scala 2.11.8 has similar characteristics, so we aren't going backwards. Special case in the naming: Use "new" rather than "" for constructor enclosed lambdas, as javac does. I have also changed the way that "delambdafy target" methods are identifed. Rather than relying on the naming convention, I have switched to using a symbol attachment. The assumption is that we only need to identify them from within the same compilation unit. This means we can distinguish impl metbods for expanded functions (ones called from an `apply` method of an ahead-of-time expanded anonfun class), from those that truly end up as targets for lambda metafactory. Only the latter are translated to static methods in this patch. --- .../scala/tools/nsc/ast/TreeGen.scala | 50 ++++++++++++++++-- .../backend/jvm/analysis/BackendUtils.scala | 2 +- .../tools/nsc/transform/Delambdafy.scala | 29 ++++++++--- .../tools/nsc/transform/SpecializeTypes.scala | 1 + .../scala/tools/nsc/transform/UnCurry.scala | 19 +++++-- .../tools/nsc/typechecker/Duplicators.scala | 7 ++- .../reflect/internal/StdAttachments.scala | 2 + .../scala/reflect/internal/Symbols.scala | 2 +- .../reflect/runtime/JavaUniverseForce.scala | 1 + test/files/run/delambdafy_t6028.check | 8 +-- test/files/run/delambdafy_t6555.check | 4 +- .../delambdafy_uncurry_byname_method.check | 4 +- .../files/run/delambdafy_uncurry_method.check | 4 +- test/files/run/t9097.scala | 2 +- .../tools/nsc/backend/jvm/IndySammyTest.scala | 2 +- .../backend/jvm/OptimizedBytecodeTest.scala | 14 ++--- .../jvm/opt/ClosureOptimizerTest.scala | 6 +-- .../nsc/backend/jvm/opt/InlinerTest.scala | 52 +++++++++---------- .../backend/jvm/opt/MethodLevelOptsTest.scala | 2 +- .../scala/tools/testing/BytecodeTesting.scala | 10 +++- 20 files changed, 153 insertions(+), 68 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0786ceb7c246..3dff4a02c962 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -238,7 +238,8 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { * (outside the synchronized block). * * The idiom works only if the condition is using a volatile field. - * @see http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html + * + * @see http://www.cs.umd.edu/~pugh/java/memoryModel/DoubleCheckedLocking.html */ def mkSynchronizedCheck(clazz: Symbol, cond: Tree, syncBody: List[Tree], stats: List[Tree]): Tree = mkSynchronizedCheck(mkAttributedThis(clazz), cond, syncBody, stats) @@ -274,8 +275,19 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } // used to create the lifted method that holds a function's body - def mkLiftedFunctionBodyMethod(localTyper: analyzer.Typer)(owner: Symbol, fun: Function) = - mkMethodForFunctionBody(localTyper)(owner, fun, nme.ANON_FUN_NAME)(additionalFlags = ARTIFACT) + def mkLiftedFunctionBodyMethod(localTyper: global.analyzer.Typer)(owner: global.Symbol, fun: global.Function) = { + def nonLocalEnclosingMember(sym: Symbol): Symbol = { + if (sym.isLocalDummy) sym.enclClass.primaryConstructor + else if (sym.isLocalToBlock) nonLocalEnclosingMember(sym.originalOwner) + else sym + } + val ownerName = nonLocalEnclosingMember(fun.symbol.originalOwner).name match { + case nme.CONSTRUCTOR => nme.NEWkw // do as javac does for the suffix, prefer "new" to "$lessinit$greater$1" + case x => x + } + val newName = nme.ANON_FUN_NAME.append(nme.NAME_JOIN_STRING).append(ownerName) + mkMethodForFunctionBody(localTyper)(owner, fun, newName)(additionalFlags = ARTIFACT) + } /** @@ -310,6 +322,38 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { newDefDef(methSym, moveToMethod(useMethodParams(fun.body)))(tpt = TypeTree(resTp)) } + /** + * Create a new `DefDef` based on `orig` with an explicit self parameter. + * + * Details: + * - Must by run after erasure + * - If `maybeClone` is the identity function, this runs "in place" + * and mutates the symbol of `orig`. `orig` should be discarded + * - Symbol owners and returns are substituted, as are parameter symbols + * - Recursive calls are not rewritten. This is correct if we assume + * that we either: + * - are in "in-place" mode, but can guarantee that no recursive calls exists + * - are associating the RHS with a cloned symbol, but intend for the original + * method to remain and for recursive calls to target it. + */ + final def mkStatic(orig: DefDef, maybeClone: Symbol => Symbol): DefDef = { + assert(phase.erasedTypes, phase) + assert(!orig.symbol.hasFlag(SYNCHRONIZED), orig.symbol.defString) + val origSym = orig.symbol + val origParams = orig.symbol.info.params + val newSym = maybeClone(orig.symbol) + newSym.setFlag(STATIC) + // Add an explicit self parameter + val selfParamSym = newSym.newSyntheticValueParam(newSym.owner.typeConstructor, nme.SELF) + newSym.updateInfo(newSym.info match { + case mt @ MethodType(params, res) => copyMethodType(mt, selfParamSym :: params, res) + }) + val selfParam = ValDef(selfParamSym) + val rhs = orig.rhs.substituteThis(newSym.owner, atPos(newSym.pos)(gen.mkAttributedIdent(selfParamSym))) + .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym -> newSym) + treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym) + } + // TODO: the rewrite to AbstractFunction is superfluous once we compile FunctionN to a SAM type (aka functional interface) def functionClassType(fun: Function): Type = if (isFunctionType(fun.tpe)) abstractFunctionType(fun.vparams.map(_.symbol.tpe), fun.body.tpe.deconst) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index f94642389df8..6d3c3f3863c2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -121,7 +121,7 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { def getBoxedUnit: FieldInsnNode = new FieldInsnNode(GETSTATIC, srBoxedUnitRef.internalName, "UNIT", srBoxedUnitRef.descriptor) - private val anonfunAdaptedName = """.*\$anonfun\$\d+\$adapted""".r + private val anonfunAdaptedName = """.*\$anonfun\$.*\$\d+\$adapted""".r def hasAdaptedImplMethod(closureInit: ClosureInstantiation): Boolean = { isBuiltinFunctionType(Type.getReturnType(closureInit.lambdaMetaFactoryCall.indy.desc).getInternalName) && anonfunAdaptedName.pattern.matcher(closureInit.lambdaMetaFactoryCall.implMethod.getName).matches diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index d350ca8e1757..1dfc1330c6a5 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -61,6 +61,9 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre private def mkLambdaMetaFactoryCall(fun: Function, target: Symbol, functionalInterface: Symbol, samUserDefined: Symbol, isSpecialized: Boolean): Tree = { val pos = fun.pos + def isSelfParam(p: Symbol) = p.isSynthetic && p.name == nme.SELF + val hasSelfParam = isSelfParam(target.firstParam) + val allCapturedArgRefs = { // find which variables are free in the lambda because those are captures that need to be // passed into the constructor of the anonymous function class @@ -68,7 +71,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre gen.mkAttributedRef(capture) setPos pos ).toList - if (target hasFlag STATIC) captureArgs // no `this` reference needed + if (!hasSelfParam) captureArgs.filterNot(arg => isSelfParam(arg.symbol)) + else if (currentMethod.hasFlag(Flags.STATIC)) captureArgs else (gen.mkAttributedThis(fun.symbol.enclClass) setPos pos) :: captureArgs } @@ -179,7 +183,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val numCaptures = targetParams.length - functionParamTypes.length val (targetCapturedParams, targetFunctionParams) = targetParams.splitAt(numCaptures) - val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT) + val methSym = oldClass.newMethod(target.name.append("$adapted").toTermName, target.pos, target.flags | FINAL | ARTIFACT | STATIC) val bridgeCapturedParams = targetCapturedParams.map(param => methSym.newSyntheticValueParam(param.tpe, param.name.toTermName)) val bridgeFunctionParams = map2(targetFunctionParams, bridgeParamTypes)((param, tp) => methSym.newSyntheticValueParam(tp, param.name.toTermName)) @@ -223,10 +227,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre private def transformFunction(originalFunction: Function): Tree = { val target = targetMethod(originalFunction) - target.makeNotPrivate(target.owner) - - // must be done before calling createBoxingBridgeMethod and mkLambdaMetaFactoryCall - if (!(target hasFlag STATIC) && !methodReferencesThis(target)) target setFlag STATIC + assert(target.hasFlag(Flags.STATIC)) + target.setFlag(notPRIVATE) val funSym = originalFunction.tpe.typeSymbolDirect // The functional interface that can be used to adapt the lambda target method `target` to the given function type. @@ -252,11 +254,22 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // here's the main entry point of the transform override def transform(tree: Tree): Tree = tree match { // the main thing we care about is lambdas - case fun: Function => super.transform(transformFunction(fun)) + case fun: Function => + super.transform(transformFunction(fun)) case Template(_, _, _) => + def pretransform(tree: Tree): Tree = tree match { + case dd: DefDef if dd.symbol.isDelambdafyTarget => + if (!dd.symbol.hasFlag(STATIC) && methodReferencesThis(dd.symbol)) { + gen.mkStatic(dd, sym => sym) + } else { + dd.symbol.setFlag(STATIC) + dd + } + case t => t + } try { // during this call boxingBridgeMethods will be populated from the Function case - val Template(parents, self, body) = super.transform(tree) + val Template(parents, self, body) = super.transform(deriveTemplate(tree)(_.mapConserve(pretransform))) Template(parents, self, body ++ boxingBridgeMethods) } finally boxingBridgeMethods.clear() case _ => super.transform(tree) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index e894c58b1ac9..40ab8c0cf896 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1329,6 +1329,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { class SpecializationDuplicator(casts: Map[Symbol, Type]) extends Duplicator(casts) { override def retyped(context: Context, tree: Tree, oldThis: Symbol, newThis: Symbol, env: scala.collection.Map[Symbol, Type]): Tree = enteringSpecialize(super.retyped(context, tree, oldThis, newThis, env)) + } /** A tree symbol substituter that substitutes on type skolems. diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index e0b1543f2441..374e8430d819 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -213,6 +213,7 @@ abstract class UnCurry extends InfoTransform // Expand the function body into an anonymous class gen.expandFunction(localTyper)(fun, inConstructorFlag) } else { + val mustExpand = mustExpandFunction(fun) // method definition with the same arguments, return type, and body as the original lambda val liftedMethod = gen.mkLiftedFunctionBodyMethod(localTyper)(fun.symbol.owner, fun) @@ -221,11 +222,18 @@ abstract class UnCurry extends InfoTransform gen.mkForwarder(gen.mkAttributedRef(liftedMethod.symbol), (fun.vparams map (_.symbol)) :: Nil) )) + if (!mustExpand) { + liftedMethod.symbol.updateAttachment(DelambdafyTarget) + liftedMethod.updateAttachment(DelambdafyTarget) + } + val typedNewFun = localTyper.typedPos(fun.pos)(Block(liftedMethod, super.transform(newFun))) - if (mustExpandFunction(fun)) { + if (mustExpand) { val Block(stats, expr : Function) = typedNewFun treeCopy.Block(typedNewFun, stats, gen.expandFunction(localTyper)(expr, inConstructorFlag)) - } else typedNewFun + } else { + typedNewFun + } } def transformArgs(pos: Position, fun: Symbol, args: List[Tree], formals: List[Type]) = { @@ -341,13 +349,18 @@ abstract class UnCurry extends InfoTransform private def isSelfSynchronized(ddef: DefDef) = ddef.rhs match { case Apply(fn @ TypeApply(Select(sel, _), _), _) => - fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait + fn.symbol == Object_synchronized && sel.symbol == ddef.symbol.enclClass && !ddef.symbol.enclClass.isTrait && + !ddef.symbol.isDelambdafyTarget /* these become static later, unsuitable for ACC_SYNCHRONIZED */ case _ => false } /** If an eligible method is entirely wrapped in a call to synchronized * locked on the same instance, remove the synchronized scaffolding and * mark the method symbol SYNCHRONIZED for bytecode generation. + * + * Delambdafy targets are deemed ineligible as the Delambdafy phase will + * replace `this.synchronized` with `$this.synchronzed` now that it emits + * all lambda impl methods as static. */ private def translateSynchronized(tree: Tree) = tree match { case dd @ DefDef(_, _, _, _, _, Apply(fn, body :: Nil)) if isSelfSynchronized(dd) => diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 0c10242950ed..78e72cf771e7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -229,7 +229,12 @@ abstract class Duplicators extends Analyzer { case ddef @ DefDef(_, _, _, _, tpt, rhs) => ddef.tpt modifyType fixType - super.typed(ddef.clearType(), mode, pt) + val result = super.typed(ddef.clearType(), mode, pt) + // TODO this is a hack, we really need a cleaner way to transport symbol attachments to duplicated methods + // bodies in specialized subclasses. + if (ddef.hasAttachment[DelambdafyTarget.type]) + result.symbol.updateAttachment(DelambdafyTarget) + result case fun: Function => debuglog("Clearing the type and retyping Function: " + fun) diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 0243dd48d2d8..ef95b3884317 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -51,6 +51,8 @@ trait StdAttachments { */ case class SAMFunction(samTp: Type, sam: Symbol) extends PlainAttachment + case object DelambdafyTarget extends PlainAttachment + /** When present, indicates that the host `Ident` has been created from a backquoted identifier. */ case object BackquotedIdentifierAttachment extends PlainAttachment diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 49202cd71e2d..3b886d357fde 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -807,7 +807,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isAnonymousFunction = isSynthetic && (name containsName tpnme.ANON_FUN_NAME) final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME) - final def isDelambdafyTarget = isArtifact && isMethod && (name containsName tpnme.ANON_FUN_NAME) + final def isDelambdafyTarget = isArtifact && isMethod && hasAttachment[DelambdafyTarget.type] final def isDefinedInPackage = effectiveOwner.isPackageClass final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index d50debd7ee93..28222cf9a7fd 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -38,6 +38,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.FixedMirrorTypeCreator this.CompoundTypeTreeOriginalAttachment this.SAMFunction + this.DelambdafyTarget this.BackquotedIdentifierAttachment this.ForAttachment this.SyntheticUnitAttachment diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check index b90dea62ed69..8b0ae7e9b97e 100644 --- a/test/files/run/delambdafy_t6028.check +++ b/test/files/run/delambdafy_t6028.check @@ -11,7 +11,7 @@ package { def foo(methodParam: String): Function0 = { val methodLocal: String = ""; { - (() => T.this.$anonfun$1(methodParam, methodLocal)) + (() => T.this.$anonfun$foo$1(methodParam, methodLocal)) } }; def bar(barParam: String): Object = { @@ -21,10 +21,10 @@ package { def tryy(tryyParam: String): Function0 = { var tryyLocal: runtime.ObjectRef = scala.runtime.ObjectRef.create(""); { - (() => T.this.$anonfun$2(tryyParam, tryyLocal)) + (() => T.this.$anonfun$tryy$1(tryyParam, tryyLocal)) } }; - final private[this] def $anonfun$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1); + final private[this] def $anonfun$foo$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1); abstract trait MethodLocalTrait$1 extends Object { def /*MethodLocalTrait$1*/$init$(barParam$1: String): Unit = { () @@ -54,7 +54,7 @@ package { T.this.MethodLocalObject$lzycompute$1(barParam$1, MethodLocalObject$module$1) else MethodLocalObject$module$1.elem.$asInstanceOf[T#MethodLocalObject$2.type](); - final private[this] def $anonfun$2(tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): Unit = try { + final private[this] def $anonfun$tryy$1(tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): Unit = try { tryyLocal$1.elem = tryyParam$1 } finally () } diff --git a/test/files/run/delambdafy_t6555.check b/test/files/run/delambdafy_t6555.check index b6ccebde78f0..d8b834edc7ce 100644 --- a/test/files/run/delambdafy_t6555.check +++ b/test/files/run/delambdafy_t6555.check @@ -6,8 +6,8 @@ package { () }; private[this] val f: String => String = { - final def $anonfun(param: String): String = param; - ((param: String) => $anonfun(param)) + final def $anonfun$f(param: String): String = param; + ((param: String) => $anonfun$f(param)) }; def f(): String => String = Foo.this.f } diff --git a/test/files/run/delambdafy_uncurry_byname_method.check b/test/files/run/delambdafy_uncurry_byname_method.check index e0f281b1cd8c..71e404ce64f5 100644 --- a/test/files/run/delambdafy_uncurry_byname_method.check +++ b/test/files/run/delambdafy_uncurry_byname_method.check @@ -7,8 +7,8 @@ package { }; def bar(x: () => String): String = x.apply(); def foo(): String = Foo.this.bar({ - final def $anonfun(): String = ""; - (() => $anonfun()) + final def $anonfun$foo(): String = ""; + (() => $anonfun$foo()) }) } } diff --git a/test/files/run/delambdafy_uncurry_method.check b/test/files/run/delambdafy_uncurry_method.check index 5ee3d174b3a0..8aa0b92054b3 100644 --- a/test/files/run/delambdafy_uncurry_method.check +++ b/test/files/run/delambdafy_uncurry_method.check @@ -7,8 +7,8 @@ package { }; def bar(): Unit = { val f: Int => Int = { - final def $anonfun(x: Int): Int = x.+(1); - ((x: Int) => $anonfun(x)) + final def $anonfun|(x: Int): Int = x.+(1); + ((x: Int) => $anonfun|(x)) }; () } diff --git a/test/files/run/t9097.scala b/test/files/run/t9097.scala index 49c0bbe79a19..49c9e2f2e5d8 100644 --- a/test/files/run/t9097.scala +++ b/test/files/run/t9097.scala @@ -28,6 +28,6 @@ object Test extends StoreReporterDirectTest { assert(!storeReporter.hasErrors, message = filteredInfos map (_.msg) mkString "; ") val out = baos.toString("UTF-8") // was 2 before the fix, the two PackageDefs for a would both contain the ClassDef for the closure - assert(out.lines.count(_ contains "def $anonfun$1(x$1: Int): String") == 1, out) + assert(out.lines.count(_ contains "def $anonfun$hihi$1(x$1: Int): String") == 1, out) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala index 2bcbcc870cf5..1ad02c10cf00 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/IndySammyTest.scala @@ -43,7 +43,7 @@ class IndySammyTest extends BytecodeTesting { val c = compileClass(s"class C { ${lamDef(from, to, body)}; ${appDef(arg)} }", allowMessage = allowMessage) val applySig = getAsmMethod(funClass, "apply").desc - val anonfun = getMethod(c, "C$$$anonfun$1") + val anonfun = getMethod(c, "$anonfun$lam$1") val lamInsn = getInstructions(c, "lam").dropNonOp val applyInvoke = getMethod(c, "app") diff --git a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala index 8cf6a655d232..b64a5ae3ceff 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala @@ -43,9 +43,9 @@ class OptimizedBytecodeTest extends BytecodeTesting { val c = compileClass(code) assertSameSummary(getMethod(c, "t"), List( - LDC, ASTORE, ALOAD /*0*/, ALOAD /*1*/, "C$$$anonfun$1", IRETURN)) - assertSameSummary(getMethod(c, "C$$$anonfun$1"), List(LDC, "C$$$anonfun$2", IRETURN)) - assertSameSummary(getMethod(c, "C$$$anonfun$2"), List(-1 /*A*/, GOTO /*A*/)) + LDC, ASTORE, ALOAD /*0*/, ALOAD /*1*/, "$anonfun$t$1", IRETURN)) + assertSameSummary(getMethod(c, "$anonfun$t$1"), List(ALOAD, IFNONNULL, ACONST_NULL, ATHROW, -1, LDC, "$anonfun$t$2", IRETURN)) + assertSameSummary(getMethod(c, "$anonfun$t$2"), List(-1 /*A*/, GOTO /*A*/)) } @Test @@ -295,9 +295,9 @@ class OptimizedBytecodeTest extends BytecodeTesting { |} """.stripMargin val c = compileClass(code, allowMessage = _.msg.contains("exception handler declared in the inlined method")) - assertInvoke(getMethod(c, "f1a"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "f1a"), "C", "$anonfun$f1a$1") assertInvoke(getMethod(c, "f1b"), "C", "wrapper1") - assertInvoke(getMethod(c, "f2a"), "C", "C$$$anonfun$3") + assertInvoke(getMethod(c, "f2a"), "C", "$anonfun$f2a$1") assertInvoke(getMethod(c, "f2b"), "C", "wrapper2") } @@ -331,7 +331,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { |class Listt """.stripMargin val List(c, nil, nilMod, listt) = compileClasses(code) - assertInvoke(getMethod(c, "t"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "t"), "C", "$anonfun$t$1") } @Test @@ -357,6 +357,6 @@ class OptimizedBytecodeTest extends BytecodeTesting { def optimiseEnablesNewOpt(): Unit = { val code = """class C { def t = (1 to 10) foreach println }""" val List(c) = readAsmClasses(newCompiler(extraArgs = "-optimise -deprecation").compileToBytes(code, allowMessage = _.msg.contains("is deprecated"))) - assertInvoke(getMethod(c, "t"), "C", "C$$$anonfun$1") // range-foreach inlined from classpath + assertInvoke(getMethod(c, "t"), "C", "$anonfun$t$1") // range-foreach inlined from classpath } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala index 2da2ecdb723a..f672237f103c 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ClosureOptimizerTest.scala @@ -28,7 +28,7 @@ class ClosureOptimizerTest extends BytecodeTesting { val c = compileClass(code) val t = getAsmMethod(c, "t") - val bodyCall = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Nothing$") + val bodyCall = findInstr(t, "INVOKESTATIC C.$anonfun$t$1 ()Lscala/runtime/Nothing$") assert(bodyCall.getNext.getOpcode == ATHROW) } @@ -44,7 +44,7 @@ class ClosureOptimizerTest extends BytecodeTesting { val c = compileClass(code) val t = getAsmMethod(c, "t") - val bodyCall = findInstr(t, "INVOKESTATIC C.C$$$anonfun$1 ()Lscala/runtime/Null$") + val bodyCall = findInstr(t, "INVOKESTATIC C.$anonfun$t$1 ()Lscala/runtime/Null$") assert(bodyCall.getNext.getOpcode == POP) assert(bodyCall.getNext.getNext.getOpcode == ACONST_NULL) } @@ -62,7 +62,7 @@ class ClosureOptimizerTest extends BytecodeTesting { val c = compileClass(code) assertSameCode(getMethod(c, "t"), List(VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "scala/collection/immutable/List", "head", "()Ljava/lang/Object;", false), - TypeOp(CHECKCAST, "java/lang/String"), Invoke(INVOKESTATIC, "C", "C$$$anonfun$1", "(Ljava/lang/String;)Ljava/lang/String;", false), + TypeOp(CHECKCAST, "java/lang/String"), Invoke(INVOKESTATIC, "C", "$anonfun$t$1", "(Ljava/lang/String;)Ljava/lang/String;", false), Op(ARETURN))) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 333792677aeb..7234659a1d52 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -945,11 +945,11 @@ class InlinerTest extends BytecodeTesting { val t1 = getMethod(c, "t1") assertNoIndy(t1) // the indy call is inlined into t, and the closure elimination rewrites the closure invocation to the body method - assertInvoke(t1, "C", "C$$$anonfun$2") + assertInvoke(t1, "C", "$anonfun$m$2") val t2 = getMethod(c, "t2") assertNoIndy(t2) - assertInvoke(t2, "M$", "M$$$anonfun$1") + assertInvoke(t2, "M$", "$anonfun$m$1") } @Test @@ -1033,7 +1033,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - assertInvoke(getMethod(c, "t1"), "C", "C$$$anonfun$1") + assertInvoke(getMethod(c, "t1"), "C", "$anonfun$t1$1") assertInvoke(getMethod(c, "t2"), "C", "a") assertInvoke(getMethod(c, "t3"), "C", "b") assertNoInvoke(getMethod(c, "t4")) @@ -1097,8 +1097,8 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) - assertInvoke(getMethod(c, "t1"), "C", "C$$$anonfun$1") - assertInvoke(getMethod(c, "t2"), "C", "C$$$anonfun$2") + assertInvoke(getMethod(c, "t1"), "C", "$anonfun$t1$1") + assertInvoke(getMethod(c, "t2"), "C", "$anonfun$t2$1") assertInvoke(getMethod(c, "t3"), "scala/Function1", "apply$mcII$sp") assertInvoke(getMethod(c, "t4"), "scala/Function1", "apply$mcII$sp") assertInvoke(getMethod(c, "t5"), "C", "h") @@ -1273,39 +1273,39 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c, _, _) = compile(code) - assertSameSummary(getMethod(c, "t1"), List(BIPUSH, "C$$$anonfun$1", IRETURN)) - assertSameSummary(getMethod(c, "t1a"), List(LCONST_1, "C$$$anonfun$2", IRETURN)) - assertSameSummary(getMethod(c, "t2"), List(ICONST_1, ICONST_2, "C$$$anonfun$3",IRETURN)) + assertSameSummary(getMethod(c, "t1"), List(BIPUSH, "$anonfun$t1$1", IRETURN)) + assertSameSummary(getMethod(c, "t1a"), List(LCONST_1, "$anonfun$t1a$1", IRETURN)) + assertSameSummary(getMethod(c, "t2"), List(ICONST_1, ICONST_2, "$anonfun$t2$1",IRETURN)) // val a = new ValKl(n); new ValKl(anonfun(a.x)).x // value class instantiation-extraction should be optimized by boxing elim assertSameSummary(getMethod(c, "t3"), List( NEW, DUP, ICONST_1, "", ASTORE, NEW, DUP, ALOAD, "x", - "C$$$anonfun$4", + "$anonfun$t3$1", "", "x", IRETURN)) - assertSameSummary(getMethod(c, "t4"), List(BIPUSH, "C$$$anonfun$5", "boxToInteger", ARETURN)) - assertSameSummary(getMethod(c, "t4a"), List(ICONST_1, LDC, "C$$$anonfun$6", LRETURN)) - assertSameSummary(getMethod(c, "t5"), List(BIPUSH, ICONST_3, "C$$$anonfun$7", "boxToInteger", ARETURN)) - assertSameSummary(getMethod(c, "t5a"), List(BIPUSH, BIPUSH, I2B, "C$$$anonfun$8", IRETURN)) - assertSameSummary(getMethod(c, "t6"), List(BIPUSH, "C$$$anonfun$9", RETURN)) - assertSameSummary(getMethod(c, "t7"), List(ICONST_1, "C$$$anonfun$10", RETURN)) - assertSameSummary(getMethod(c, "t8"), List(ICONST_1, LDC, "C$$$anonfun$11", LRETURN)) - assertSameSummary(getMethod(c, "t9"), List(ICONST_1, "boxToInteger", "C$$$anonfun$12", RETURN)) + assertSameSummary(getMethod(c, "t4"), List(BIPUSH, "$anonfun$t4$1", "boxToInteger", ARETURN)) + assertSameSummary(getMethod(c, "t4a"), List(ICONST_1, LDC, "$anonfun$t4a$1", LRETURN)) + assertSameSummary(getMethod(c, "t5"), List(BIPUSH, ICONST_3, "$anonfun$t5$1", "boxToInteger", ARETURN)) + assertSameSummary(getMethod(c, "t5a"), List(BIPUSH, BIPUSH, I2B, "$anonfun$t5a$1", IRETURN)) + assertSameSummary(getMethod(c, "t6"), List(BIPUSH, "$anonfun$t6$1", RETURN)) + assertSameSummary(getMethod(c, "t7"), List(ICONST_1, "$anonfun$t7$1", RETURN)) + assertSameSummary(getMethod(c, "t8"), List(ICONST_1, LDC, "$anonfun$t8$1", LRETURN)) + assertSameSummary(getMethod(c, "t9"), List(ICONST_1, "boxToInteger", "$anonfun$t9$1", RETURN)) // t9a inlines Range.foreach, which is quite a bit of code, so just testing the core - assertInvoke(getMethod(c, "t9a"), "C", "C$$$anonfun$13") + assertInvoke(getMethod(c, "t9a"), "C", "$anonfun$t9a$1") assertInvoke(getMethod(c, "t9a"), "scala/runtime/BoxesRunTime", "boxToInteger") assertSameSummary(getMethod(c, "t10"), List( ICONST_1, ISTORE, ALOAD, ILOAD, - "C$$$anonfun$14", RETURN)) + "$anonfun$t10$1", RETURN)) // t10a inlines Range.foreach - assertInvoke(getMethod(c, "t10a"), "C", "C$$$anonfun$15") + assertInvoke(getMethod(c, "t10a"), "C", "$anonfun$t10a$1") assertDoesNotInvoke(getMethod(c, "t10a"), "boxToInteger") } @@ -1330,8 +1330,8 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val List(c) = compile(code) assertSameCode(getMethod(c, "t1"), List(Op(ICONST_0), Op(ICONST_1), Op(IADD), Op(IRETURN))) - assertEquals(getInstructions(c, "t2") collect { case i: Invoke => i.owner +"."+ i.name }, List( - "scala/runtime/IntRef.create", "C.C$$$anonfun$1")) + assertEquals(getMethod(c, "t2").instructions collect { case i: Invoke => i.owner +"."+ i.name }, List( + "scala/runtime/IntRef.create", "C.$anonfun$t2$1")) } @Test @@ -1449,9 +1449,9 @@ class InlinerTest extends BytecodeTesting { // box-unbox will clean it up assertSameSummary(getMethod(c, "t"), List( - ALOAD, "C$$$anonfun$1", IFEQ /*A*/, - "C$$$anonfun$2", IRETURN, - -1 /*A*/, "C$$$anonfun$3", IRETURN)) + ALOAD, "$anonfun$t$1", IFEQ /*A*/, + "$anonfun$t$2", IRETURN, + -1 /*A*/, "$anonfun$t$3", IRETURN)) } @Test @@ -1501,7 +1501,7 @@ class InlinerTest extends BytecodeTesting { val List(c) = compile(code) val t = getMethod(c, "t") assertNoIndy(t) - assertInvoke(t, "C", "C$$$anonfun$1") + assertInvoke(t, "C", "$anonfun$t$1") } @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index 9675e2e4456f..938bc7b84686 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -221,7 +221,7 @@ class MethodLevelOptsTest extends BytecodeTesting { VarOp(ILOAD, 1), VarOp(ILOAD, 2), VarOp(ILOAD, 3), - Invoke(INVOKESTATIC, "C", "C$$$anonfun$1", "(III)I", false), Op(IRETURN))) + Invoke(INVOKESTATIC, "C", "$anonfun$t$1", "(III)I", false), Op(IRETURN))) } @Test diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index b11ad271483d..1a0c1e210a48 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -1,5 +1,6 @@ package scala.tools.testing +import junit.framework.AssertionFailedError import org.junit.Assert._ import scala.collection.JavaConverters._ @@ -245,8 +246,13 @@ object BytecodeTesting { getAsmMethods(c, _ == name) def getAsmMethod(c: ClassNode, name: String): MethodNode = { - val List(m) = getAsmMethods(c, name) - m + val methods = getAsmMethods(c, name) + methods match { + case List(m) => m + case ms => + val allNames = getAsmMethods(c, _ => true).map(_.name) + throw new AssertionFailedError(s"Could not find method named $name among ${allNames}") + } } def getMethods(c: ClassNode, name: String): List[Method] = From f01d061caaae26b3fdff0e4db800292e9b3252c2 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 13 May 2016 13:43:13 +0200 Subject: [PATCH 0103/2793] Treat self parameter as non-null in the optimizer --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 2 +- .../backend/jvm/analysis/NullnessAnalyzer.scala | 16 +++++++++++----- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 2 +- .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 2 +- .../nsc/backend/jvm/OptimizedBytecodeTest.scala | 2 +- .../jvm/analysis/NullnessAnalyzerTest.scala | 2 +- 6 files changed, 16 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 3dff4a02c962..80a707461c36 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -344,7 +344,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { val newSym = maybeClone(orig.symbol) newSym.setFlag(STATIC) // Add an explicit self parameter - val selfParamSym = newSym.newSyntheticValueParam(newSym.owner.typeConstructor, nme.SELF) + val selfParamSym = newSym.newSyntheticValueParam(newSym.owner.typeConstructor, nme.SELF).setFlag(ARTIFACT) newSym.updateInfo(newSym.info match { case mt @ MethodType(params, res) => copyMethodType(mt, selfParamSym :: params, res) }) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index 30e73f8ac24f..01afd0d2ef70 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -5,8 +5,8 @@ package analysis import java.util import scala.annotation.switch -import scala.tools.asm.{Type, Opcodes} -import scala.tools.asm.tree.{MethodInsnNode, LdcInsnNode, AbstractInsnNode} +import scala.tools.asm.{Opcodes, Type} +import scala.tools.asm.tree.{AbstractInsnNode, LdcInsnNode, MethodInsnNode, MethodNode} import scala.tools.asm.tree.analysis._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import BytecodeUtils._ @@ -63,7 +63,7 @@ object NullnessValue { def unknown(insn: AbstractInsnNode) = if (BytecodeUtils.instructionResultSize(insn) == 2) UnknownValue2 else UnknownValue1 } -final class NullnessInterpreter(bTypes: BTypes) extends Interpreter[NullnessValue](Opcodes.ASM5) { +final class NullnessInterpreter(bTypes: BTypes, method: MethodNode) extends Interpreter[NullnessValue](Opcodes.ASM5) { def newValue(tp: Type): NullnessValue = { // ASM loves giving semantics to null. The behavior here is the same as in SourceInterpreter, // which is provided by the framework. @@ -80,7 +80,13 @@ final class NullnessInterpreter(bTypes: BTypes) extends Interpreter[NullnessValu override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): NullnessValue = { // For instance methods, the `this` parameter is known to be not null. - if (isInstanceMethod && local == 0) NotNullValue + val isThis = local == 0 && (isInstanceMethod || { + method.parameters != null && !method.parameters.isEmpty && { + val p = method.parameters.get(0) + (p.access & Opcodes.ACC_SYNTHETIC) != 0 && p.name == "$this" + } + }) + if (isThis) NotNullValue else super.newParameterValue(isInstanceMethod, local, tp) } @@ -197,7 +203,7 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal * This class is required to override the `newFrame` methods, which makes makes sure the analyzer * uses NullnessFrames. */ -class NullnessAnalyzer(bTypes: BTypes) extends Analyzer[NullnessValue](new NullnessInterpreter(bTypes)) { +class NullnessAnalyzer(bTypes: BTypes, method: MethodNode) extends Analyzer[NullnessValue](new NullnessInterpreter(bTypes, method)) { override def newFrame(nLocals: Int, nStack: Int): NullnessFrame = new NullnessFrame(nLocals, nStack) override def newFrame(src: Frame[_ <: NullnessValue]): NullnessFrame = new NullnessFrame(src) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index e8d1bf203a42..d4ff6493a379 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -103,7 +103,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { val analyzer = { if (compilerSettings.optNullnessTracking && AsmAnalyzer.sizeOKForNullness(methodNode)) { - Some(new AsmAnalyzer(methodNode, definingClass.internalName, new NullnessAnalyzer(btypes))) + Some(new AsmAnalyzer(methodNode, definingClass.internalName, new NullnessAnalyzer(btypes, methodNode))) } else if (AsmAnalyzer.sizeOKForBasicValue(methodNode)) { Some(new AsmAnalyzer(methodNode, definingClass.internalName)) } else None diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 5ca0ad2773ae..447ee209b593 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -397,7 +397,7 @@ class LocalOpt[BT <: BTypes](val btypes: BT) { */ def nullnessOptimizations(method: MethodNode, ownerClassName: InternalName): Boolean = { AsmAnalyzer.sizeOKForNullness(method) && { - lazy val nullnessAnalyzer = new AsmAnalyzer(method, ownerClassName, new NullnessAnalyzer(btypes)) + lazy val nullnessAnalyzer = new AsmAnalyzer(method, ownerClassName, new NullnessAnalyzer(btypes, method)) // When running nullness optimizations the method may still have unreachable code. Analyzer // frames of unreachable instructions are `null`. diff --git a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala index b64a5ae3ceff..9a0899ffc5f7 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/OptimizedBytecodeTest.scala @@ -44,7 +44,7 @@ class OptimizedBytecodeTest extends BytecodeTesting { assertSameSummary(getMethod(c, "t"), List( LDC, ASTORE, ALOAD /*0*/, ALOAD /*1*/, "$anonfun$t$1", IRETURN)) - assertSameSummary(getMethod(c, "$anonfun$t$1"), List(ALOAD, IFNONNULL, ACONST_NULL, ATHROW, -1, LDC, "$anonfun$t$2", IRETURN)) + assertSameSummary(getMethod(c, "$anonfun$t$1"), List(LDC, "$anonfun$t$2", IRETURN)) assertSameSummary(getMethod(c, "$anonfun$t$2"), List(-1 /*A*/, GOTO /*A*/)) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala index 1de5aa28ca3d..c173bacd4616 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzerTest.scala @@ -21,7 +21,7 @@ class NullnessAnalyzerTest extends BytecodeTesting { import compiler._ import global.genBCode.bTypes.backendUtils._ - def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(global.genBCode.bTypes)) + def newNullnessAnalyzer(methodNode: MethodNode, classInternalName: InternalName = "C") = new AsmAnalyzer(methodNode, classInternalName, new NullnessAnalyzer(global.genBCode.bTypes, methodNode)) def testNullness(analyzer: AsmAnalyzer[NullnessValue], method: MethodNode, query: String, index: Int, nullness: NullnessValue): Unit = { for (i <- findInstrs(method, query)) { From e077c24525bf8f9bd8b73684e630eb7fc6bcb5f6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 14 Apr 2016 14:00:33 +1000 Subject: [PATCH 0104/2793] SI-9390 Emit local defs that don't capture this as static This avoids unnecessary memory retention, and allows lambdas that call the local methods to be serializable, regardless of whether or not the enclosing class is serializable. The second point is especially pressing, given that the enclosing class for local methods defined in a used to be the (serializable) anonymous function class, but as of Scala 2.12 will be the enclosing class of the lambda. This change is similar in spirit to SI-9408 / 93bee55e. --- .../tools/nsc/transform/Delambdafy.scala | 35 +++++++--- test/files/run/t5652.check | 6 +- test/files/run/t5652b.check | 4 +- test/files/run/t5652c.check | 4 +- test/files/run/t9390.scala | 67 +++++++++++++++++++ test/files/run/t9390b.scala | 31 +++++++++ .../backend/jvm/opt/InlineWarningTest.scala | 4 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 2 +- 8 files changed, 135 insertions(+), 18 deletions(-) create mode 100644 test/files/run/t9390.scala create mode 100644 test/files/run/t9390b.scala diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 1dfc1330c6a5..a8933a9ee6b9 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -225,6 +225,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre } } + private def transformFunction(originalFunction: Function): Tree = { val target = targetMethod(originalFunction) assert(target.hasFlag(Flags.STATIC)) @@ -272,6 +273,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val Template(parents, self, body) = super.transform(deriveTemplate(tree)(_.mapConserve(pretransform))) Template(parents, self, body ++ boxingBridgeMethods) } finally boxingBridgeMethods.clear() + case dd: DefDef if dd.symbol.isLiftedMethod && !dd.symbol.isDelambdafyTarget => + // SI-9390 emit lifted methods that don't require a `this` reference as STATIC + // delambdafy targets are excluded as they are made static by `transformFunction`. + if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) + dd.symbol.setFlag(STATIC) + super.transform(tree) case _ => super.transform(tree) } } // DelambdafyTransformer @@ -326,19 +333,28 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // recursively find methods that refer to 'this' directly or indirectly via references to other methods // for each method found add it to the referrers set - private def refersToThis(symbol: Symbol): Boolean = - (thisReferringMethods contains symbol) || - (liftedMethodReferences(symbol) exists refersToThis) && { - // add it early to memoize - debuglog(s"$symbol indirectly refers to 'this'") - thisReferringMethods += symbol - true + private def refersToThis(symbol: Symbol): Boolean = { + var seen = mutable.Set[Symbol]() + def loop(symbol: Symbol): Boolean = { + if (seen(symbol)) false + else { + seen += symbol + (thisReferringMethods contains symbol) || + (liftedMethodReferences(symbol) exists loop) && { + // add it early to memoize + debuglog(s"$symbol indirectly refers to 'this'") + thisReferringMethods += symbol + true + } } + } + loop(symbol) + } private var currentMethod: Symbol = NoSymbol override def traverse(tree: Tree) = tree match { - case DefDef(_, _, _, _, _, _) if tree.symbol.isDelambdafyTarget => + case DefDef(_, _, _, _, _, _) if tree.symbol.isDelambdafyTarget || tree.symbol.isLiftedMethod => // we don't expect defs within defs. At this phase trees should be very flat if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.") currentMethod = tree.symbol @@ -349,6 +365,9 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // They'll be of the form {(args...) => this.anonfun(args...)} // but we do need to make note of the lifted body method in case it refers to 'this' if (currentMethod.exists) liftedMethodReferences(currentMethod) += targetMethod(fun) + case Apply(sel @ Select(This(_), _), args) if sel.symbol.isLiftedMethod => + if (currentMethod.exists) liftedMethodReferences(currentMethod) += sel.symbol + super.traverseTrees(args) case This(_) => if (currentMethod.exists && tree.symbol == currentMethod.enclClass) { debuglog(s"$currentMethod directly refers to 'this'") diff --git a/test/files/run/t5652.check b/test/files/run/t5652.check index a0fb6fe9b4e5..7c65ba66981d 100644 --- a/test/files/run/t5652.check +++ b/test/files/run/t5652.check @@ -1,7 +1,7 @@ -public default int T1.T1$$g$1() public default int T1.f0() public default void T1.$init$() -public final int A1.A1$$g$2() +public static int T1.T1$$g$1() public int A1.f1() -public final int A2.A2$$g$1() +public static final int A1.A1$$g$2() public int A2.f2() +public static final int A2.A2$$g$1() diff --git a/test/files/run/t5652b.check b/test/files/run/t5652b.check index ca9d0a74f094..0f4290796f2d 100644 --- a/test/files/run/t5652b.check +++ b/test/files/run/t5652b.check @@ -1,4 +1,4 @@ -private final int A1.g$1() +private static final int A1.g$1() public int A1.f1() -private final int A2.g$1() +private static final int A2.g$1() public int A2.f2() diff --git a/test/files/run/t5652c.check b/test/files/run/t5652c.check index 3b889e066dab..5a6d535f0293 100644 --- a/test/files/run/t5652c.check +++ b/test/files/run/t5652c.check @@ -1,6 +1,6 @@ -public final int A1.A1$$g$1() -public final int A1.A1$$g$2() public int A1.f1() public int A1.f2() +public static final int A1.A1$$g$1() +public static final int A1.A1$$g$2() 1 2 diff --git a/test/files/run/t9390.scala b/test/files/run/t9390.scala new file mode 100644 index 000000000000..8d7e1be5572c --- /dev/null +++ b/test/files/run/t9390.scala @@ -0,0 +1,67 @@ +class C { + def methodLift1 = { + def isEven(c: Int) = c % 2 == 0 + val f: Int => Boolean = isEven + f + } + def methodLift2 = { + def isEven(c: Int) = c % 2 == 0 + def isEven0(c: Int) = isEven(c) + val f: Int => Boolean = isEven0 + f + } + + def methodLift3 = { + def isEven(c: Int) = {toString; c % 2 == 0} + def isEven0(c: Int) = isEven(c) + val f: Int => Boolean = isEven0 + f + } +} + +object Test { + def main(args: Array[String]): Unit = { + val c = new C + + { + val f = c.methodLift1 + assert(f(0)) + assert(!f(1)) + val f1 = serializeDeserialize(f) + assert(f1(0)) + assert(!f1(1)) + } + + + { + val f = c.methodLift2 + assert(f(0)) + assert(!f(1)) + val f1 = serializeDeserialize(f) + assert(f1(0)) + assert(!f1(1)) + } + + { + val f = c.methodLift3 + assert(f(0)) + assert(!f(1)) + try { + serializeDeserialize(this) + assert(false) + } catch { + case _: java.io.NotSerializableException => + // expected, the closure in methodLift3 must capture C which is not serializable + } + } + } + + def serializeDeserialize[T <: AnyRef](obj: T): T = { + import java.io._ + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } +} diff --git a/test/files/run/t9390b.scala b/test/files/run/t9390b.scala new file mode 100644 index 000000000000..439e21e0a036 --- /dev/null +++ b/test/files/run/t9390b.scala @@ -0,0 +1,31 @@ +class C { // C is not serializable + def foo = (x: Int) => (y: Int) => x + y + def bar = (x: Int) => (y: Int) => {toString; x + y} +} + +object Test { + def main(args: Array[String]): Unit = { + val c = new C + val f = c.foo + assert(f(1)(2) == 3) + val f1 = serializeDeserialize(f) + assert(f1(1)(2) == 3) + + try { + serializeDeserialize(c.bar) + assert(false) + } catch { + case _: java.io.NotSerializableException => + // expected, lambda transitively refers to this + } + } + + def serializeDeserialize[T <: AnyRef](obj: T): T = { + import java.io._ + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 6161dc7b7324..024cf0c416df 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -113,7 +113,7 @@ class InlineWarningTest extends BytecodeTesting { val warn = """M::f()I is annotated @inline but could not be inlined: - |The callee M::f()I contains the instruction INVOKESPECIAL M.nested$1 ()I + |The callee M::f()I contains the instruction INVOKESTATIC M.nested$1 ()I |that would cause an IllegalAccessError when inlined into class N""".stripMargin var c = 0 @@ -140,7 +140,7 @@ class InlineWarningTest extends BytecodeTesting { val warn = """M::f(Lscala/Function1;)I could not be inlined: - |The callee M::f(Lscala/Function1;)I contains the instruction INVOKESPECIAL M.nested$1 ()I + |The callee M::f(Lscala/Function1;)I contains the instruction INVOKESTATIC M.nested$1 ()I |that would cause an IllegalAccessError when inlined into class N""".stripMargin var c = 0 diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 7234659a1d52..02cd632af176 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1141,7 +1141,7 @@ class InlinerTest extends BytecodeTesting { val warn = """C::h()I is annotated @inline but could not be inlined: - |The callee C::h()I contains the instruction INVOKESPECIAL C.f$1 ()I + |The callee C::h()I contains the instruction INVOKESTATIC C.f$1 ()I |that would cause an IllegalAccessError when inlined into class D.""".stripMargin val List(c, d) = compile(code, allowMessage = _.msg contains warn) From def22ff08692ba55e607b8948e6159bdda3f48e5 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 27 May 2016 11:12:48 -0700 Subject: [PATCH 0105/2793] opt: fuse some operations on `Scope`s `Scope`'s `filter` is implemented using `toList`, so may as well start with `toList`ourselves. Also fused some `filter`/`foreach` combos. --- .../nsc/transform/ExtensionMethods.scala | 3 +- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- .../tools/nsc/typechecker/RefChecks.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 37 ++++++++++--------- .../scala/reflect/internal/Definitions.scala | 7 ++-- .../scala/reflect/internal/Symbols.scala | 6 +-- 6 files changed, 30 insertions(+), 29 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 9d02228ab543..f2237a07160d 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -192,8 +192,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree] currentOwner.primaryConstructor.makeNotPrivate(NoSymbol) // SI-7859 make param accessors accessible so the erasure can generate unbox operations. - val paramAccessors = currentOwner.info.decls.filter(sym => sym.isParamAccessor && sym.isMethod) - paramAccessors.foreach(_.makeNotPrivate(currentOwner)) + currentOwner.info.decls.foreach(sym => if (sym.isParamAccessor && sym.isMethod) sym.makeNotPrivate(currentOwner)) super.transform(tree) } else if (currentOwner.isStaticOwner) { super.transform(tree) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 8943ec810d7e..2773ee19cff1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -463,7 +463,7 @@ trait Namers extends MethodSynthesis { // opening up the package object on the classpath at all if one exists in source. if (m.isPackageObject) { val packageScope = m.enclosingPackageClass.rawInfo.decls - packageScope.filter(_.owner != m.enclosingPackageClass).toList.foreach(packageScope unlink _) + packageScope.foreach(mem => if (mem.owner != m.enclosingPackageClass) packageScope unlink mem) } updatePosFlags(m, tree.pos, moduleFlags) setPrivateWithin(tree, m) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 2d454c2fe6cd..a5a680d13523 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -172,12 +172,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // This has become noisy with implicit classes. if (settings.warnPolyImplicitOverload && settings.developer) { - clazz.info.decls filter (x => x.isImplicit && x.typeParams.nonEmpty) foreach { sym => + clazz.info.decls.foreach(sym => if (sym.isImplicit && sym.typeParams.nonEmpty) { // implicit classes leave both a module symbol and a method symbol as residue val alts = clazz.info.decl(sym.name).alternatives filterNot (_.isModule) if (alts.size > 1) alts foreach (x => reporter.warning(x.pos, "parameterized overloaded implicit methods are not visible as view bounds")) - } + }) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index dcf14612c9c8..0b55a732d8e4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2391,19 +2391,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // The block is an anonymous class definitions/instantiation pair // -> members that are hidden by the type of the block are made private - val toHide = ( - classDecls filter (member => - member.isTerm - && member.isPossibleInRefinement - && member.isPublic - && !matchesVisibleMember(member) - ) map (member => member - resetFlag (PROTECTED | LOCAL) - setFlag (PRIVATE | SYNTHETIC_PRIVATE) - setPrivateWithin NoSymbol - ) - ) - syntheticPrivates ++= toHide + classDecls foreach { toHide => + if (toHide.isTerm + && toHide.isPossibleInRefinement + && toHide.isPublic + && !matchesVisibleMember(toHide)) { + (toHide + resetFlag (PROTECTED | LOCAL) + setFlag (PRIVATE | SYNTHETIC_PRIVATE) + setPrivateWithin NoSymbol) + + syntheticPrivates += toHide + } + } + case _ => } } @@ -3641,10 +3642,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) } else { - val annScope = annType.decls - .filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) + val annScopeJava = + if (isJava) annType.decls.filter(sym => sym.isMethod && !sym.isConstructor && sym.isJavaDefined) + else EmptyScope // annScopeJava is only used if isJava + val names = mutable.Set[Symbol]() - names ++= (if (isJava) annScope.iterator + names ++= (if (isJava) annScopeJava.iterator else typedFun.tpe.params.iterator) def hasValue = names exists (_.name == nme.value) @@ -3655,7 +3658,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val nvPairs = args map { case arg @ AssignOrNamedArg(Ident(name), rhs) => - val sym = if (isJava) annScope.lookup(name) + val sym = if (isJava) annScopeJava.lookup(name) else findSymbol(typedFun.tpe.params)(_.name == name) if (sym == NoSymbol) { reportAnnotationError(UnknownAnnotationNameError(arg, name)) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index ca6c893d130d..fe6d88e7c749 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -863,12 +863,13 @@ trait Definitions extends api.StandardDefinitions { // Scopes() // must filter out "universal" members (getClass is deferred for some reason) val deferredMembers = ( - tp membersBasedOnFlags (excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD) - filter (mem => mem.isDeferredNotJavaDefault && !isUniversalMember(mem)) // TODO: test + tp.membersBasedOnFlags(excludedFlags = BridgeAndPrivateFlags, requiredFlags = METHOD).toList.filter( + mem => mem.isDeferredNotJavaDefault && !isUniversalMember(mem) + ) // TODO: test ) // if there is only one, it's monomorphic and has a single argument list - if (deferredMembers.size == 1 && + if (deferredMembers.lengthCompare(1) == 0 && deferredMembers.head.typeParams.isEmpty && deferredMembers.head.info.paramSectionCount == 1) deferredMembers.head diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 49202cd71e2d..97b7f239f03a 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2036,11 +2036,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } } - private final def caseFieldAccessorsUnsorted: List[Symbol] = - (info.decls filter (_.isCaseAccessorMethod)).toList + private final def caseFieldAccessorsUnsorted: List[Symbol] = info.decls.toList.filter(_.isCaseAccessorMethod) - final def constrParamAccessors: List[Symbol] = - info.decls.filter(sym => !sym.isMethod && sym.isParamAccessor).toList + final def constrParamAccessors: List[Symbol] = info.decls.toList.filter(sym => !sym.isMethod && sym.isParamAccessor) /** The symbol accessed by this accessor (getter or setter) function. */ final def accessed: Symbol = { From 6cd356a9ffc2054f2a3d729a47ceae13c7575e09 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 30 May 2016 22:45:51 -0700 Subject: [PATCH 0106/2793] Compute constrParamAccessors once. It's expensive --- .../scala/tools/nsc/typechecker/Typers.scala | 48 ++++++++++--------- 1 file changed, 26 insertions(+), 22 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0b55a732d8e4..72da4498c761 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2068,35 +2068,39 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => (call, Nil) } - val (superConstr, superArgs) = decompose(rhs) - assert(superConstr.symbol ne null, superConstr)//debug - def superClazz = superConstr.symbol.owner - def superParamAccessors = superClazz.constrParamAccessors // associate superclass paramaccessors with their aliases - if (superConstr.symbol.isPrimaryConstructor && !superClazz.isJavaDefined && sameLength(superParamAccessors, superArgs)) { - for ((superAcc, superArg @ Ident(name)) <- superParamAccessors zip superArgs) { - if (mexists(vparamss)(_.symbol == superArg.symbol)) { - val alias = ( - superAcc.initialize.alias - orElse (superAcc getterIn superAcc.owner) - filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias) - ) - if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) { - val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match { - case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed - case acc => acc - } - ownAcc match { - case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) => - debuglog(s"$acc has alias ${alias.fullLocationString}") - acc setAlias alias - case _ => + val (superConstr, superArgs) = decompose(rhs) + if (superConstr.symbol.isPrimaryConstructor) { + val superClazz = superConstr.symbol.owner + if (!superClazz.isJavaDefined) { + val superParamAccessors = superClazz.constrParamAccessors + if (sameLength(superParamAccessors, superArgs)) { + for ((superAcc, superArg@Ident(name)) <- superParamAccessors zip superArgs) { + if (mexists(vparamss)(_.symbol == superArg.symbol)) { + val alias = ( + superAcc.initialize.alias + orElse (superAcc getterIn superAcc.owner) + filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias) + ) + if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) { + val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match { + case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed + case acc => acc + } + ownAcc match { + case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) => + debuglog(s"$acc has alias ${alias.fullLocationString}") + acc setAlias alias + case _ => + } + } } } } } } + pending.foreach(ErrorUtils.issueTypeError) } From 3f685073923d76de08ffdba78075f2267ee56133 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 2 Jun 2016 11:18:02 +1000 Subject: [PATCH 0107/2793] Drop local suffix in lambda impl method name --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 80a707461c36..14ee7d7a7897 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -283,7 +283,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } val ownerName = nonLocalEnclosingMember(fun.symbol.originalOwner).name match { case nme.CONSTRUCTOR => nme.NEWkw // do as javac does for the suffix, prefer "new" to "$lessinit$greater$1" - case x => x + case x => x.dropLocal } val newName = nme.ANON_FUN_NAME.append(nme.NAME_JOIN_STRING).append(ownerName) mkMethodForFunctionBody(localTyper)(owner, fun, newName)(additionalFlags = ARTIFACT) From 7952cd1651b6ed1fe0cd68198e49cf90423242d8 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 1 Jun 2016 23:25:35 +0200 Subject: [PATCH 0108/2793] Fix comparisons involving NaN Floating point comparisons involving NaN should always return false, except for !=. Fixes a regression introduced by #4963. --- .../tools/nsc/backend/ScalaPrimitives.scala | 4 +- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 26 ++++++------- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 1 + .../junit/scala/lang/primitives/NaNTest.scala | 38 +++++++++++++++++++ 4 files changed, 52 insertions(+), 17 deletions(-) create mode 100644 test/junit/scala/lang/primitives/NaNTest.scala diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index 00771b6b8c2b..dfd5b07a3b30 100644 --- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -61,8 +61,8 @@ abstract class ScalaPrimitives { final val NE = 43 // x != y final val LT = 44 // x < y final val LE = 45 // x <= y - final val GE = 46 // x > y - final val GT = 47 // x >= y + final val GT = 46 // x > y + final val GE = 47 // x >= y // Boolean unary operations final val ZNOT = 50 // !x diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 5d152ef0e8e2..d7106ae908ff 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1110,22 +1110,19 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } /* Emit code to compare the two top-most stack values using the 'op' operator. */ - private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label) { - if (targetIfNoJump == success) genCJUMP(failure, success, op.negate, tk, targetIfNoJump) + private def genCJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false) { + if (targetIfNoJump == success) genCJUMP(failure, success, op.negate, tk, targetIfNoJump, negated = !negated) else { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT bc.emitIF_ICMP(op, success) } else if (tk.isRef) { // REFERENCE(_) | ARRAY(_) bc.emitIF_ACMP(op, success) } else { + def useCmpG = if (negated) op == TestOp.GT || op == TestOp.GE else op == TestOp.LT || op == TestOp.LE (tk: @unchecked) match { case LONG => emit(asm.Opcodes.LCMP) - case FLOAT => - if (op == TestOp.LT || op == TestOp.LE) emit(asm.Opcodes.FCMPG) - else emit(asm.Opcodes.FCMPL) - case DOUBLE => - if (op == TestOp.LT || op == TestOp.LE) emit(asm.Opcodes.DCMPG) - else emit(asm.Opcodes.DCMPL) + case FLOAT => emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) + case DOUBLE => emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) } bc.emitIF(op, success) } @@ -1134,8 +1131,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } /* Emits code to compare (and consume) stack-top and zero using the 'op' operator */ - private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label) { - if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate, tk, targetIfNoJump) + private def genCZJUMP(success: asm.Label, failure: asm.Label, op: TestOp, tk: BType, targetIfNoJump: asm.Label, negated: Boolean = false) { + if (targetIfNoJump == success) genCZJUMP(failure, success, op.negate, tk, targetIfNoJump, negated = !negated) else { if (tk.isIntSizedType) { // BOOL, BYTE, CHAR, SHORT, or INT bc.emitIF(op, success) @@ -1145,18 +1142,17 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case TestOp.NE => bc emitIFNONNULL success } } else { + def useCmpG = if (negated) op == TestOp.GT || op == TestOp.GE else op == TestOp.LT || op == TestOp.LE (tk: @unchecked) match { case LONG => emit(asm.Opcodes.LCONST_0) emit(asm.Opcodes.LCMP) case FLOAT => emit(asm.Opcodes.FCONST_0) - if (op == TestOp.LT || op == TestOp.LE) emit(asm.Opcodes.FCMPG) - else emit(asm.Opcodes.FCMPL) + emit(if (useCmpG) asm.Opcodes.FCMPG else asm.Opcodes.FCMPL) case DOUBLE => emit(asm.Opcodes.DCONST_0) - if (op == TestOp.LT || op == TestOp.LE) emit(asm.Opcodes.DCMPG) - else emit(asm.Opcodes.DCMPL) + emit(if (useCmpG) asm.Opcodes.DCMPG else asm.Opcodes.DCMPL) } bc.emitIF(op, success) } @@ -1171,8 +1167,8 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case scalaPrimitives.NE => TestOp.NE case scalaPrimitives.LT => TestOp.LT case scalaPrimitives.LE => TestOp.LE - case scalaPrimitives.GE => TestOp.GE case scalaPrimitives.GT => TestOp.GT + case scalaPrimitives.GE => TestOp.GE } /** Some useful equality helpers. */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index a5744983b280..5a5747c81f65 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -1336,6 +1336,7 @@ object BCodeHelpers { } object TestOp { + // the order here / op numbers are important to get the correct result when calling opcodeIF val EQ = new TestOp(0) val NE = new TestOp(1) val LT = new TestOp(2) diff --git a/test/junit/scala/lang/primitives/NaNTest.scala b/test/junit/scala/lang/primitives/NaNTest.scala new file mode 100644 index 000000000000..f4c42583952e --- /dev/null +++ b/test/junit/scala/lang/primitives/NaNTest.scala @@ -0,0 +1,38 @@ +package scala.lang.primitives + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.RunTesting + +@RunWith(classOf[JUnit4]) +class NaNTest extends RunTesting { + + @Test + def compNaNFalse(): Unit = { + def code(tp: String) = + s"""val n = $tp.NaN + |def ne(x: $tp, y: $tp) = x != y + |val fs: List[($tp, $tp) => Boolean] = List(_ < _, _ <= _, _ > _, _ >= _, _ == _, (x, y) => !ne(x, y)) + |val vs = List[$tp](n, 1, -1, 0) + |for (f <- fs; v <- vs; (x, y) <- List((n, v), (v, n))) yield f(x, y) + """.stripMargin + + runner.run[List[Boolean]](code("Double")).foreach(assertFalse) + runner.run[List[Boolean]](code("Float")).foreach(assertFalse) + } + + @Test + def genericEqNe(): Unit = { + def code(tp: String) = + s"""def a[T](x: T, y: T) = x == y + |def b[T](x: T, y: T) = x != y + |val n = $tp.NaN + |a(n, n) :: a(n, 0) :: a (0, n) :: !b(n, n) :: !b(n, 0) :: !b(0, n) :: Nil + """.stripMargin + runner.run[List[Boolean]](code("Double")).foreach(assertFalse) + runner.run[List[Boolean]](code("Float")).foreach(assertFalse) + } +} From bd588ff2191ea71eab251c16b8ca5228bf02d0b1 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 2 Jun 2016 09:05:47 +0200 Subject: [PATCH 0109/2793] scala version in benchmark project --- test/benchmarks/build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt index 4806ecdde80c..31cee701ad61 100644 --- a/test/benchmarks/build.sbt +++ b/test/benchmarks/build.sbt @@ -1,5 +1,5 @@ scalaHome := Some(file("../../build/pack")) -scalaVersion := "2.11.8" +scalaVersion := "2.12.0-dev" scalacOptions ++= Seq("-feature", "-Yopt:l:classpath") lazy val root = (project in file(".")). From fe61bcf99c0c10054066f7ff41e4c7f44cf02e5d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 27 May 2016 14:43:22 -0700 Subject: [PATCH 0110/2793] SI-9104 Autodetect raw pastage If `-raw` is not supplied explicitly to REPL `:paste`, see if the code text starts with `package` keyword or else see if it parses to a named package (to cope with leading commentary). In that case, take it as raw. But parse only on suspect comment slash. It's only worth parsing for a package if there's a chance that package keyword is buried behind comments. Small refactors to the `paste` object. --- .../scala/tools/nsc/interpreter/ILoop.scala | 45 +++++++++++-------- .../scala/tools/nsc/interpreter/IMain.scala | 16 +++++-- test/files/run/repl-paste-b.check | 14 ++++++ test/files/run/repl-paste-b.scala | 13 ++++++ test/files/run/repl-paste-raw-b.pastie | 8 ++++ test/files/run/repl-paste-raw-b.scala | 18 ++++++++ test/files/run/repl-paste-raw-c.pastie | 5 +++ test/files/run/repl-paste-raw-c.scala | 16 +++++++ test/files/run/repl-paste-raw.pastie | 4 +- test/files/run/repl-paste-raw.scala | 2 +- 10 files changed, 117 insertions(+), 24 deletions(-) create mode 100644 test/files/run/repl-paste-b.check create mode 100644 test/files/run/repl-paste-b.scala create mode 100644 test/files/run/repl-paste-raw-b.pastie create mode 100644 test/files/run/repl-paste-raw-b.scala create mode 100644 test/files/run/repl-paste-raw-c.pastie create mode 100644 test/files/run/repl-paste-raw-c.scala diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 8f19b4860a2d..66a5f08e964b 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -490,11 +490,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def editCommand(what: String): Result = editCommand(what, Properties.envOrNone("EDITOR")) def editCommand(what: String, editor: Option[String]): Result = { - def diagnose(code: String) = { - echo("The edited code is incomplete!\n") - val errless = intp compileSources new BatchSourceFile("", s"object pastel {\n$code\n}") - if (errless) echo("The compiler reports no errors.") - } + def diagnose(code: String): Unit = paste.incomplete("The edited code is incomplete!\n", "", code) def edit(text: String): Result = editor match { case Some(ed) => @@ -756,21 +752,13 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) text } def interpretCode() = { - val res = intp.withLabel(label)(intp interpret code) - // if input is incomplete, let the compiler try to say why - if (res == IR.Incomplete) { - echo("The pasted code is incomplete!\n") - // Remembrance of Things Pasted in an object - val errless = intp compileSources new BatchSourceFile(label, s"object pastel {\n$code\n}") - if (errless) echo("...but compilation found no error? Good luck with that.") - } - } - def compileCode() = { - val errless = intp compileSources new BatchSourceFile(label, code) - if (!errless) echo("There were compilation errors!") + if (intp.withLabel(label)(intp interpret code) == IR.Incomplete) + paste.incomplete("The pasted code is incomplete!\n", label, code) } + def compileCode() = paste.compilePaste(label = label, code = code) + if (code.nonEmpty) { - if (raw) compileCode() else interpretCode() + if (raw || paste.isPackaged(code)) compileCode() else interpretCode() } result } @@ -778,6 +766,27 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) private object paste extends Pasted(prompt) { def interpret(line: String) = intp interpret line def echo(message: String) = ILoop.this echo message + + val leadingElement = raw"(?s)\s*(package\s|/)".r + def isPackaged(code: String): Boolean = { + leadingElement.findPrefixMatchOf(code) + .map(m => if (m.group(1) == "/") intp.parse.packaged(code) else true) + .getOrElse(false) + } + + // if input is incomplete, wrap and compile for diagnostics. + def incomplete(message: String, label: String, code: String): Boolean = { + echo(message) + val errless = intp.compileSources(new BatchSourceFile(label, s"object pastel {\n$code\n}")) + if (errless) echo("No error found in incomplete source.") + errless + } + + def compilePaste(label: String, code: String): Boolean = { + val errless = intp.compileSources(new BatchSourceFile(label, code)) + if (!errless) echo("There were compilation errors!") + errless + } } private object invocation { diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 2f20a1cd0aff..44784aa9534b 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -1101,7 +1101,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case class Incomplete(trees: List[Tree]) extends Result case class Success(trees: List[Tree]) extends Result - def apply(line: String): Result = debugging(s"""parse("$line")""") { + def apply(line: String): Result = debugging(s"""parse("$line")""") { var isIncomplete = false def parse = { reporter.reset() @@ -1110,8 +1110,18 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends else if (isIncomplete) Incomplete(trees) else Success(trees) } - currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true) {parse} - + currentRun.parsing.withIncompleteHandler((_, _) => isIncomplete = true)(parse) + } + // code has a named package + def packaged(line: String): Boolean = { + def parses = { + reporter.reset() + val tree = newUnitParser(line).parse() + !reporter.hasErrors && { + tree match { case PackageDef(Ident(id), _) => id != nme.EMPTY_PACKAGE_NAME case _ => false } + } + } + beSilentDuring(parses) } } diff --git a/test/files/run/repl-paste-b.check b/test/files/run/repl-paste-b.check new file mode 100644 index 000000000000..2e205d48d65f --- /dev/null +++ b/test/files/run/repl-paste-b.check @@ -0,0 +1,14 @@ + +scala> :paste < EOF +// Entering paste mode (EOF to finish) + +object X +EOF + +// Exiting paste mode, now interpreting. + +defined object X + +scala> assert(X.getClass.getName.contains("line")) + +scala> :quit diff --git a/test/files/run/repl-paste-b.scala b/test/files/run/repl-paste-b.scala new file mode 100644 index 000000000000..718f7d9e1703 --- /dev/null +++ b/test/files/run/repl-paste-b.scala @@ -0,0 +1,13 @@ +import scala.tools.partest.ReplTest + +// confirm X not in empty package +object Test extends ReplTest { + def code = + """ +:paste < EOF +object X +EOF +assert(X.getClass.getName.contains("line")) +""" + +} diff --git a/test/files/run/repl-paste-raw-b.pastie b/test/files/run/repl-paste-raw-b.pastie new file mode 100644 index 000000000000..f13b4bcf8bdb --- /dev/null +++ b/test/files/run/repl-paste-raw-b.pastie @@ -0,0 +1,8 @@ + +// a raw paste is not a script +// hence it can be packaged + +package brown_paper + +// these are a few of my favorite things +case class Gift (hasString: Boolean) diff --git a/test/files/run/repl-paste-raw-b.scala b/test/files/run/repl-paste-raw-b.scala new file mode 100644 index 000000000000..d1c7692f2f7f --- /dev/null +++ b/test/files/run/repl-paste-raw-b.scala @@ -0,0 +1,18 @@ + +import scala.tools.partest.SessionTest + +object Test extends SessionTest { + def session = +s"""| + |scala> :paste $pastie + |Pasting file $pastie... + | + |scala> val favoriteThing = brown_paper.Gift(true) + |favoriteThing: brown_paper.Gift = Gift(true) + | + |scala> favoriteThing.hasString + |res0: Boolean = true + | + |scala> :quit""" + def pastie = testPath changeExtension "pastie" +} diff --git a/test/files/run/repl-paste-raw-c.pastie b/test/files/run/repl-paste-raw-c.pastie new file mode 100644 index 000000000000..364d8cef4be6 --- /dev/null +++ b/test/files/run/repl-paste-raw-c.pastie @@ -0,0 +1,5 @@ + +// not actually a candidate for raw paste + +val nope = 42 + diff --git a/test/files/run/repl-paste-raw-c.scala b/test/files/run/repl-paste-raw-c.scala new file mode 100644 index 000000000000..600ac4d2f084 --- /dev/null +++ b/test/files/run/repl-paste-raw-c.scala @@ -0,0 +1,16 @@ + +import scala.tools.partest.SessionTest + +object Test extends SessionTest { + def session = +s"""| + |scala> :paste -raw $pastie + |Pasting file $pastie... + |$pastie:3: error: expected class or object definition + |val nope = 42 + |^ + |There were compilation errors! + | + |scala> :quit""" + def pastie = testPath changeExtension "pastie" +} diff --git a/test/files/run/repl-paste-raw.pastie b/test/files/run/repl-paste-raw.pastie index f13b4bcf8bdb..a4a570aaa2cf 100644 --- a/test/files/run/repl-paste-raw.pastie +++ b/test/files/run/repl-paste-raw.pastie @@ -1,8 +1,8 @@ +package brown_paper + // a raw paste is not a script // hence it can be packaged -package brown_paper - // these are a few of my favorite things case class Gift (hasString: Boolean) diff --git a/test/files/run/repl-paste-raw.scala b/test/files/run/repl-paste-raw.scala index 9bd5e8e63e12..d1c7692f2f7f 100644 --- a/test/files/run/repl-paste-raw.scala +++ b/test/files/run/repl-paste-raw.scala @@ -4,7 +4,7 @@ import scala.tools.partest.SessionTest object Test extends SessionTest { def session = s"""| - |scala> :paste -raw $pastie + |scala> :paste $pastie |Pasting file $pastie... | |scala> val favoriteThing = brown_paper.Gift(true) From f07019ffa56ec2dfab8ab0d9a83133005761a877 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 15 Apr 2016 14:05:02 +1000 Subject: [PATCH 0111/2793] SI-9390 Avoid needless outer capture with local classes An existing optimization in `Constructors` elides the outer field in member and local classes, if the class doesn't use the outer reference. (Member classes also need to be final, which is a secret handshake to say we're also happy to weaken prefix matching in the pattern matcher.) That optimization leaves the constructor signature as is: the constructor still accepts the outer instance, but does not store it. For member classes, this means that we can separately compile code that calls the constructor. Local classes need not be hampered by this constraint, we could remove the outer instance from the constructor call too. Why would we want to do this? Let's look at the case before and after this commit. Before: ``` class C extends Object { def foo(): Function1 = $anonfun(); final def $anonfun$foo$1($this: C, x: Object): Object = new <$anon: Object>($this); def (): C = { C.super.(); () } }; final class anon$1 extends Object { def ($outer: C): <$anon: Object> = { anon$1.super.(); () } } ``` After: ``` class C extends Object { def foo(): Function1 = $anonfun(); final def $anonfun$foo$1(x: Object): Object = new <$anon: Object>(null); def (): C = { C.super.(); () } }; final class anon$1 extends Object { def ($outer: C): <$anon: Object> = { anon$1.super.(); () } } ``` However, the status quo means that a lambda that This in turn makes lambdas that refer to such classes serializable even when the outer class is not itself serialiable. I have not attempted to extend this to calls to secondary constructors. --- .../tools/nsc/transform/Constructors.scala | 3 +++ .../tools/nsc/transform/Delambdafy.scala | 8 +++++++ .../tools/nsc/transform/ExplicitOuter.scala | 4 +--- .../reflect/internal/StdAttachments.scala | 5 +++++ .../scala/reflect/internal/StdNames.scala | 1 + .../scala/reflect/internal/Symbols.scala | 3 +++ .../reflect/runtime/JavaUniverseForce.scala | 1 + test/files/jvm/t9105.check | 14 ++----------- test/files/run/t9390c.scala | 21 +++++++++++++++++++ test/files/run/t9390d.scala | 12 +++++++++++ 10 files changed, 57 insertions(+), 15 deletions(-) create mode 100644 test/files/run/t9390c.scala create mode 100644 test/files/run/t9390d.scala diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 636fb08b89ef..971a55f763c8 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -715,6 +715,9 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { primaryConstrBody.expr) }) + if (omittableAccessor.exists(_.isOuterField) && !constructorStats.exists(_.exists { case i: Ident if i.symbol.isOuterParam => true; case _ => false})) + primaryConstructor.symbol.updateAttachment(OuterArgCanBeElided) + val constructors = primaryConstructor :: auxConstructors // Unlink all fields that can be dropped from class scope diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index a8933a9ee6b9..2dd8def53e13 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -279,10 +279,15 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) dd.symbol.setFlag(STATIC) super.transform(tree) + case Apply(fun, outer :: rest) if shouldElideOuterArg(fun.symbol, outer) => + val nullOuter = gen.mkZero(outer.tpe) + treeCopy.Apply(tree, transform(fun), nullOuter :: transformTrees(rest)) case _ => super.transform(tree) } } // DelambdafyTransformer + private def shouldElideOuterArg(fun: Symbol, outerArg: Tree): Boolean = + fun.isConstructor && treeInfo.isQualifierSafeToElide(outerArg) && fun.hasAttachment[OuterArgCanBeElided.type] // A traverser that finds symbols used but not defined in the given Tree // TODO freeVarTraverser in LambdaLift does a very similar task. With some @@ -368,6 +373,9 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre case Apply(sel @ Select(This(_), _), args) if sel.symbol.isLiftedMethod => if (currentMethod.exists) liftedMethodReferences(currentMethod) += sel.symbol super.traverseTrees(args) + case Apply(fun, outer :: rest) if shouldElideOuterArg(fun.symbol, outer) => + super.traverse(fun) + super.traverseTrees(rest) case This(_) => if (currentMethod.exists && tree.symbol == currentMethod.enclClass) { debuglog(s"$currentMethod directly refers to 'this'") diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 3d6fad4238ac..411ff6b9bec2 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -67,8 +67,6 @@ abstract class ExplicitOuter extends InfoTransform result } - private val innerClassConstructorParamName: TermName = newTermName("arg" + nme.OUTER) - class RemoveBindingsTransformer(toRemove: Set[Symbol]) extends Transformer { override def transform(tree: Tree) = tree match { case Bind(_, body) if toRemove(tree.symbol) => super.transform(body) @@ -169,7 +167,7 @@ abstract class ExplicitOuter extends InfoTransform val paramsWithOuter = if (sym.isClassConstructor && isInner(sym.owner)) // 1 - sym.newValueParameter(innerClassConstructorParamName, sym.pos).setInfo(sym.owner.outerClass.thisType) :: params + sym.newValueParameter(nme.OUTER_ARG, sym.pos).setInfo(sym.owner.outerClass.thisType) :: params else params if ((resTpTransformed ne resTp) || (paramsWithOuter ne params)) MethodType(paramsWithOuter, resTpTransformed) diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index ef95b3884317..76e34153c9f1 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -71,4 +71,9 @@ trait StdAttachments { abstract class InlineAnnotatedAttachment case object NoInlineCallsiteAttachment extends InlineAnnotatedAttachment case object InlineCallsiteAttachment extends InlineAnnotatedAttachment + + /** Attached to a local class that has its outer field elided. A `null` constant may be passed + * in place of the outer parameter, can help callers to avoid capturing the outer instance. + */ + case object OuterArgCanBeElided extends PlainAttachment } diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index c93ecac3fad8..d96d06ca9411 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -364,6 +364,7 @@ trait StdNames { val MODULE_INSTANCE_FIELD: NameType = NameTransformer.MODULE_INSTANCE_NAME // "MODULE$" val OUTER: NameType = "$outer" val OUTER_LOCAL: NameType = OUTER.localName + val OUTER_ARG: NameType = "arg" + OUTER val OUTER_SYNTH: NameType = "" // emitted by virtual pattern matcher, replaced by outer accessor in explicitouter val ROOTPKG: NameType = "_root_" val SELECTOR_DUMMY: NameType = "" diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3b886d357fde..e2fb82718698 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -914,6 +914,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Is this symbol an accessor method for outer? */ final def isOuterField = isArtifact && (unexpandedName == nme.OUTER_LOCAL) + /** Is this symbol an outer parameter in a constructor */ + final def isOuterParam = isParameter && owner.isConstructor && (name == nme.OUTER_ARG || name == nme.OUTER) + /** Does this symbol denote a stable value, ignoring volatility? * * Stability and volatility are checked separately to allow volatile paths in patterns that amount to equality checks. SI-6815 diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 28222cf9a7fd..0a90a141d3c0 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -45,6 +45,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.SubpatternsAttachment this.NoInlineCallsiteAttachment this.InlineCallsiteAttachment + this.OuterArgCanBeElided this.noPrint this.typeDebug this.Range diff --git a/test/files/jvm/t9105.check b/test/files/jvm/t9105.check index 48439ee004fb..9447e0cf2916 100644 --- a/test/files/jvm/t9105.check +++ b/test/files/jvm/t9105.check @@ -1,18 +1,8 @@ -#partest -Ydelambdafy:inline -(class C$$anonfun$1$A$1,class C$$anonfun$1,null) -(class C$$anonfun$1$B$1,class C$$anonfun$1,private final java.lang.Object C$$anonfun$1.m$1()) -(class C$$anonfun$1$C$1,class C$$anonfun$1,null) -(class C$$anonfun$1$$anonfun$2$D$1,class C$$anonfun$1$$anonfun$2,null) -(class C$$anonfun$met$1$E$1,class C$$anonfun$met$1,null) -(class C$$anonfun$met$1$F$1,class C$$anonfun$met$1,private final java.lang.Object C$$anonfun$met$1.m$2()) -(class C$$anonfun$met$1$G$1,class C$$anonfun$met$1,null) -(class C$$anonfun$met$1$$anonfun$3$H$1,class C$$anonfun$met$1$$anonfun$3,null) -#partest !-Ydelambdafy:inline (class C$A$1,class C,null) -(class C$B$1,class C,private final java.lang.Object C.m$1()) +(class C$B$1,class C,private static final java.lang.Object C.m$1()) (class C$C$1,class C,null) (class C$D$1,class C,null) (class C$E$1,class C,public scala.Function0 C.met()) -(class C$F$1,class C,private final java.lang.Object C.m$2()) +(class C$F$1,class C,private static final java.lang.Object C.m$2()) (class C$G$1,class C,public scala.Function0 C.met()) (class C$H$1,class C,public scala.Function0 C.met()) diff --git a/test/files/run/t9390c.scala b/test/files/run/t9390c.scala new file mode 100644 index 000000000000..db39da57cddd --- /dev/null +++ b/test/files/run/t9390c.scala @@ -0,0 +1,21 @@ +class C { // C is not serializable + def foo = { + { (x: Any) => new Object {} } + } +} +object Test { + def main(args: Array[String]): Unit = { + val c = new C + val f = c.foo + val f1 = serializeDeserialize(f) + } + + def serializeDeserialize[T <: AnyRef](obj: T): T = { + import java.io._ + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } +} diff --git a/test/files/run/t9390d.scala b/test/files/run/t9390d.scala new file mode 100644 index 000000000000..3c5de3abf73d --- /dev/null +++ b/test/files/run/t9390d.scala @@ -0,0 +1,12 @@ +class C { // C is not serializable + def foo: () => Any = { + { () => class UseOuterInConstructor { C.this.toString }; new UseOuterInConstructor : Any} + } +} +object Test { + def main(args: Array[String]): Unit = { + val c = new C + val f = c.foo + f() // Doesn't NPE, as we didn't elide the outer instance in the constructor call. + } +} From 5667ff46c134878e35edca1ff57b8007ebec4f9a Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 2 Jun 2016 18:53:04 -0700 Subject: [PATCH 0112/2793] Prohibit @native method in trait On the JVM, a @native interface method results in a VerifyError. Other platforms could decide to be more permissive, but it seems like allowing them in classes is enough. --- .../tools/nsc/typechecker/RefChecks.scala | 28 ++++++++++++------- src/library/scala/native.scala | 9 ++++-- test/files/neg/trait-no-native.check | 4 +++ test/files/neg/trait-no-native.scala | 4 +++ 4 files changed, 32 insertions(+), 13 deletions(-) create mode 100644 test/files/neg/trait-no-native.check create mode 100644 test/files/neg/trait-no-native.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index a5a680d13523..d1764ea4829e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1659,24 +1659,32 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // inside annotations. applyRefchecksToAnnotations(tree) var result: Tree = tree match { - case DefDef(_, _, _, _, _, EmptyTree) if sym hasAnnotation NativeAttr => - sym resetFlag DEFERRED - transform(deriveDefDef(tree)(_ => typed(gen.mkSysErrorCall("native method stub")))) - - case ValDef(_, _, _, _) | DefDef(_, _, _, _, _, _) => + case vod: ValOrDefDef => checkDeprecatedOvers(tree) - checkInfiniteLoop(tree.asInstanceOf[ValOrDefDef]) + checkInfiniteLoop(vod) if (settings.warnNullaryUnit) checkNullaryMethodReturnType(sym) if (settings.warnInaccessible) { if (!sym.isConstructor && !sym.isEffectivelyFinalOrNotOverridden && !sym.isSynthetic) checkAccessibilityOfReferencedTypes(tree) } - tree match { - case dd: DefDef => checkByNameRightAssociativeDef(dd) - case _ => + vod match { + case dd: DefDef => + checkByNameRightAssociativeDef(dd) + + if (sym hasAnnotation NativeAttr) { + if (sym.owner.isTrait) { + reporter.error(tree.pos, "A trait cannot define a native method.") + tree + } else if (dd.rhs == EmptyTree) { + // pretend it had a stub implementation + sym resetFlag DEFERRED + deriveDefDef(dd)(_ => typed(gen.mkSysErrorCall("native method stub"))) + } else tree + } else tree + + case _ => tree } - tree case Template(parents, self, body) => localTyper = localTyper.atOwner(tree, currentOwner) diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala index dbacc7861862..49d3ced805dd 100644 --- a/src/library/scala/native.scala +++ b/src/library/scala/native.scala @@ -16,8 +16,11 @@ package scala * @native def f(x: Int, y: List[Long]): String = ... * }}} * - * Method body is not generated if method is marked with `@native`, - * but it is type checked when present. + * A `@native` method is compiled to the platform's native method, + * while discarding the method's body (if any). The body will be type checked if present. * - * @since 2.6 */ + * A method marked @native must be a member of a class, not a trait (since 2.12). + * + * @since 2.6 + */ class native extends scala.annotation.StaticAnnotation {} diff --git a/test/files/neg/trait-no-native.check b/test/files/neg/trait-no-native.check new file mode 100644 index 000000000000..12bce4042dda --- /dev/null +++ b/test/files/neg/trait-no-native.check @@ -0,0 +1,4 @@ +trait-no-native.scala:3: error: A trait cannot define a native method. + @native def foo = ??? + ^ +one error found diff --git a/test/files/neg/trait-no-native.scala b/test/files/neg/trait-no-native.scala new file mode 100644 index 000000000000..463e604a48b7 --- /dev/null +++ b/test/files/neg/trait-no-native.scala @@ -0,0 +1,4 @@ +trait T { + // should not compile, because it would result in a VerifyError + @native def foo = ??? +} From be9c0febd65875615b318209e0aa994eb76c2011 Mon Sep 17 00:00:00 2001 From: Nicolas Stucki Date: Tue, 18 Aug 2015 10:11:29 +0200 Subject: [PATCH 0113/2793] SI-9737 [no-merge] Backport stringOf ParIterable Cherry-picked c5f3d3f286ee5c26c8ddcf10f6878058e8f7e040 Edited comment: in stringOf, let GenIterable subsume both Iterable and ParIterable. This change is required for Scala.js compatibility as it does not support parallel collections. Conflicts: src/library/scala/runtime/ScalaRunTime.scala --- src/library/scala/runtime/ScalaRunTime.scala | 4 +- .../scala/runtime/ScalaRunTimeTest.scala | 57 +++++++++++++++++++ 2 files changed, 59 insertions(+), 2 deletions(-) diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 20f067f34f6f..026d5edd293f 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -9,7 +9,7 @@ package scala package runtime -import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator } +import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator, GenIterable } import scala.collection.mutable.WrappedArray import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: } import scala.collection.generic.{ Sorted, IsTraversableLike } @@ -325,7 +325,7 @@ object ScalaRunTime { case x if useOwnToString(x) => x.toString case x: AnyRef if isArray(x) => arrayToString(x) case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") - case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") + case x: GenIterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") diff --git a/test/junit/scala/runtime/ScalaRunTimeTest.scala b/test/junit/scala/runtime/ScalaRunTimeTest.scala index 9da197c71ace..e28deae786e2 100644 --- a/test/junit/scala/runtime/ScalaRunTimeTest.scala +++ b/test/junit/scala/runtime/ScalaRunTimeTest.scala @@ -67,4 +67,61 @@ class ScalaRunTimeTest { val c = new C() assertFalse(c.toString, isTuple(c)) } + + @Test + def testStringOf() { + import ScalaRunTime.stringOf + import scala.collection._ + import parallel.ParIterable + + assertEquals("null", stringOf(null)) + assertEquals( "\"\"", stringOf("")) + + assertEquals("abc", stringOf("abc")) + assertEquals("\" abc\"", stringOf(" abc")) + assertEquals("\"abc \"", stringOf("abc ")) + + assertEquals("""Array()""", stringOf(Array.empty[AnyRef])) + assertEquals("""Array()""", stringOf(Array.empty[Int])) + assertEquals("""Array(1, 2, 3)""", stringOf(Array(1, 2, 3))) + assertEquals("""Array(a, "", " c", null)""", stringOf(Array("a", "", " c", null))) + assertEquals("""Array(Array("", 1, Array(5)), Array(1))""", + stringOf(Array(Array("", 1, Array(5)), Array(1)))) + + val map = Map(1->"", 2->"a", 3->" a", 4->null) + assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a, 3 -> " a", 4 -> null)""", stringOf(map)) + assertEquals(s"""${map.stringPrefix}(1 -> "", 2 -> a)""", stringOf(map, 2)) + + val iterable = Iterable("a", "", " c", null) + assertEquals(s"""${iterable.stringPrefix}(a, "", " c", null)""", stringOf(iterable)) + assertEquals(s"""${iterable.stringPrefix}(a, "")""", stringOf(iterable, 2)) + + val parIterable = ParIterable("a", "", " c", null) + assertEquals(s"""${parIterable.stringPrefix}(a, "", " c", null)""", stringOf(parIterable)) + assertEquals(s"""${parIterable.stringPrefix}(a, "")""", stringOf(parIterable, 2)) + + val traversable = new Traversable[Int] { + def foreach[U](f: Int => U): Unit = (0 to 3).foreach(f) + } + assertEquals(s"${traversable.stringPrefix}(0, 1, 2, 3)", stringOf(traversable)) + assertEquals(s"${traversable.stringPrefix}(0, 1)", stringOf(traversable, 2)) + + val tuple1 = Tuple1(0) + assertEquals("(0,)", stringOf(tuple1)) + assertEquals("(0,)", stringOf(tuple1, 0)) + + val tuple2 = Tuple2(0, 1) + assertEquals("(0,1)", stringOf(tuple2)) + assertEquals("(0,1)", stringOf(tuple2, 0)) + + val tuple3 = Tuple3(0, 1, 2) + assertEquals("(0,1,2)", stringOf(tuple3)) + assertEquals("(0,1,2)", stringOf(tuple3, 0)) + + val x = new Object { + override def toString(): String = "this is the stringOf string" + } + assertEquals(stringOf(x), "this is the stringOf string") + assertEquals(stringOf(x, 2), "this is the stringOf string") + } } From a23f6a7ba2c20096837a8d254a42d2bedd620f15 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Sat, 4 Jun 2016 12:47:52 +0100 Subject: [PATCH 0114/2793] Remove experimental status from sbt build in load message sbt is now the preferred build tool. --- build.sbt | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 0194bc08b6a8..9fcfa01161b8 100644 --- a/build.sbt +++ b/build.sbt @@ -700,9 +700,7 @@ lazy val root = (project in file(".")) scaladoc, scalap, partestExtras, junit, libraryAll, scalaDist).settings( sources in Compile := Seq.empty, onLoadMessage := """|*** Welcome to the sbt build definition for Scala! *** - |This build definition has an EXPERIMENTAL status. If you are not - |interested in testing or working on the build itself, please use - |the Ant build definition for now. Check README.md for more information.""".stripMargin + |Check README.md for more information.""".stripMargin ) // The following subprojects' binaries are required for building "pack": From fcf17beee7304928cb97c4902a2e2833091d3a8f Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 4 Jun 2016 15:11:26 -0700 Subject: [PATCH 0115/2793] SI-7898 Preserve reader against subversion SBT tries to install its own SimpleReader (for some reason) if it needs to create its own IMain. Because Rube Goldberg needs to execute some postinit hooks, don't let SBT do that. --- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 7dab371cafd6..b086b2181e70 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -966,7 +966,10 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) // while we go fire up the REPL try { - createInterpreter() + // don't allow ancient sbt to hijack the reader + savingReader { + createInterpreter() + } intp.initializeSynchronous() globalFuture = Future successful true if (intp.reporter.hasErrors) { From 1f812e9482855d3fd5a8a5e9118942dc80f22db5 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 30 May 2016 14:38:50 +0200 Subject: [PATCH 0116/2793] Avoid separate traversal in inliner to remove line number nodes --- .../tools/nsc/backend/jvm/analysis/BackendUtils.scala | 10 ++++++---- .../scala/tools/nsc/backend/jvm/opt/Inliner.scala | 6 +----- 2 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 6d3c3f3863c2..9abd1d80067d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -98,7 +98,7 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { * a boolean indicating if the instruction list contains an instantiation of a serializable SAM * type. */ - def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode]): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], Boolean) = { + def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], Boolean) = { val javaLabelMap = labelMap.asJava val result = new InsnList var map = Map.empty[AbstractInsnNode, AbstractInsnNode] @@ -112,9 +112,11 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { } case _ => } - val cloned = ins.clone(javaLabelMap) - result add cloned - map += ((ins, cloned)) + if (keepLineNumbers || !ins.isInstanceOf[LineNumberNode]) { + val cloned = ins.clone(javaLabelMap) + result add cloned + map += ((ins, cloned)) + } } (result, map, hasSerializableClosureInstantiation) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 809b9e310dce..d18963ec8bbd 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -268,11 +268,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { // New labels for the cloned instructions val labelsMap = cloneLabels(callee) - val (clonedInstructions, instructionMap, hasSerializableClosureInstantiation) = cloneInstructions(callee, labelsMap) - val keepLineNumbers = callsiteClass == calleeDeclarationClass - if (!keepLineNumbers) { - removeLineNumberNodes(clonedInstructions) - } + val (clonedInstructions, instructionMap, hasSerializableClosureInstantiation) = cloneInstructions(callee, labelsMap, keepLineNumbers = callsiteClass == calleeDeclarationClass) // local vars in the callee are shifted by the number of locals at the callsite val localVarShift = callsiteMethod.maxLocals From 037a089ad4fb7137513777ccda6d47e30e151838 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 30 May 2016 19:25:14 +0200 Subject: [PATCH 0117/2793] Store source file paths of classes being compiled in the bytecode repo For classes being compiled (vs. being loaded from classfiles), keep the source file path in the bytecode repo. This will allow to keep line numbers when inlining from one class into another in case the two are defined in the same compilation unit. --- .../tools/nsc/backend/jvm/GenBCode.scala | 25 ++++--- .../backend/jvm/opt/ByteCodeRepository.scala | 67 +++++++++---------- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 25 +++---- .../backend/jvm/opt/ClosureOptimizer.scala | 8 +-- .../backend/jvm/opt/InlinerHeuristics.scala | 10 +-- .../jvm/opt/InlinerIllegalAccessTest.scala | 2 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 2 +- 7 files changed, 67 insertions(+), 72 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 02dc2b8edea2..584b11d4edac 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -77,15 +77,16 @@ abstract class GenBCode extends BCodeSyncAndTry { /* ---------------- q2 ---------------- */ - case class Item2(arrivalPos: Int, - mirror: asm.tree.ClassNode, - plain: asm.tree.ClassNode, - bean: asm.tree.ClassNode, - outFolder: scala.tools.nsc.io.AbstractFile) { + case class Item2(arrivalPos: Int, + mirror: asm.tree.ClassNode, + plain: asm.tree.ClassNode, + bean: asm.tree.ClassNode, + sourceFilePath: String, + outFolder: scala.tools.nsc.io.AbstractFile) { def isPoison = { arrivalPos == Int.MaxValue } } - private val poison2 = Item2(Int.MaxValue, null, null, null, null) + private val poison2 = Item2(Int.MaxValue, null, null, null, null, null) private val q2 = new _root_.java.util.LinkedList[Item2] /* ---------------- q3 ---------------- */ @@ -205,6 +206,7 @@ abstract class GenBCode extends BCodeSyncAndTry { val item2 = Item2(arrivalPos, mirrorC, plainC, beanC, + cunit.source.file.canonicalPath, outF) q2 add item2 // at the very end of this method so that no Worker2 thread starts mutating before we're done. @@ -226,10 +228,11 @@ abstract class GenBCode extends BCodeSyncAndTry { // add classes to the bytecode repo before building the call graph: the latter needs to // look up classes and methods in the code repo. if (settings.optAddToBytecodeRepository) q2.asScala foreach { - case Item2(_, mirror, plain, bean, _) => - if (mirror != null) byteCodeRepository.add(mirror, ByteCodeRepository.CompilationUnit) - if (plain != null) byteCodeRepository.add(plain, ByteCodeRepository.CompilationUnit) - if (bean != null) byteCodeRepository.add(bean, ByteCodeRepository.CompilationUnit) + case Item2(_, mirror, plain, bean, sourceFilePath, _) => + val someSourceFilePath = Some(sourceFilePath) + if (mirror != null) byteCodeRepository.add(mirror, someSourceFilePath) + if (plain != null) byteCodeRepository.add(plain, someSourceFilePath) + if (bean != null) byteCodeRepository.add(bean, someSourceFilePath) } if (settings.optBuildCallGraph) q2.asScala foreach { item => // skip call graph for mirror / bean: wd don't inline into tem, and they are not used in the plain class @@ -286,7 +289,7 @@ abstract class GenBCode extends BCodeSyncAndTry { cw.toByteArray } - val Item2(arrivalPos, mirror, plain, bean, outFolder) = item + val Item2(arrivalPos, mirror, plain, bean, _, outFolder) = item val mirrorC = if (mirror == null) null else SubItem3(mirror.name, getByteArray(mirror)) val plainC = SubItem3(plain.name, getByteArray(plain)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 16590ec75c58..78acd72dbab6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -15,7 +15,6 @@ import scala.tools.asm.Attribute import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.util.ClassPath import BytecodeUtils._ -import ByteCodeRepository._ import BTypes.InternalName import java.util.concurrent.atomic.AtomicLong @@ -29,9 +28,10 @@ class ByteCodeRepository[BT <: BTypes](val classPath: ClassPath, val btypes: BT) import btypes._ /** - * ClassNodes for classes being compiled in the current compilation run. + * Contains ClassNodes and the canonical path of the source file path of classes being compiled in + * the current compilation run. */ - val compilingClasses: concurrent.Map[InternalName, ClassNode] = recordPerRunCache(concurrent.TrieMap.empty) + val compilingClasses: concurrent.Map[InternalName, (ClassNode, String)] = recordPerRunCache(concurrent.TrieMap.empty) /** * Cache for parsed ClassNodes. @@ -67,20 +67,35 @@ class ByteCodeRepository[BT <: BTypes](val classPath: ClassPath, val btypes: BT) } } - def add(classNode: ClassNode, source: Source) = { - if (source == CompilationUnit) compilingClasses(classNode.name) = classNode - else parsedClasses(classNode.name) = Right((classNode, lruCounter.incrementAndGet())) + def add(classNode: ClassNode, sourceFilePath: Option[String]) = sourceFilePath match { + case Some(path) if path != "" => compilingClasses(classNode.name) = (classNode, path) + case _ => parsedClasses(classNode.name) = Right((classNode, lruCounter.incrementAndGet())) + } + + private def parsedClassNode(internalName: InternalName): Either[ClassNotFound, ClassNode] = { + val r = parsedClasses.get(internalName) match { + case Some(l @ Left(_)) => l + case Some(r @ Right((classNode, _))) => + parsedClasses(internalName) = Right((classNode, lruCounter.incrementAndGet())) + r + case None => + limitCacheSize() + val res = parseClass(internalName).map((_, lruCounter.incrementAndGet())) + parsedClasses(internalName) = res + res + } + r.map(_._1) } /** - * The class node and source for an internal name. If the class node is not yet available, it is - * parsed from the classfile on the compile classpath. + * The class node and source file path (if the class is being compiled) for an internal name. If + * the class node is not yet available, it is parsed from the classfile on the compile classpath. */ - def classNodeAndSource(internalName: InternalName): Either[ClassNotFound, (ClassNode, Source)] = { - classNode(internalName) map (n => { - val source = if (compilingClasses contains internalName) CompilationUnit else Classfile - (n, source) - }) + def classNodeAndSourceFilePath(internalName: InternalName): Either[ClassNotFound, (ClassNode, Option[String])] = { + compilingClasses.get(internalName) match { + case Some((c, p)) => Right((c, Some(p))) + case _ => parsedClassNode(internalName).map((_, None)) + } } /** @@ -88,19 +103,9 @@ class ByteCodeRepository[BT <: BTypes](val classPath: ClassPath, val btypes: BT) * the classfile on the compile classpath. */ def classNode(internalName: InternalName): Either[ClassNotFound, ClassNode] = { - compilingClasses.get(internalName).map(Right(_)) getOrElse { - val r = parsedClasses.get(internalName) match { - case Some(l @ Left(_)) => l - case Some(r @ Right((classNode, _))) => - parsedClasses(internalName) = Right((classNode, lruCounter.incrementAndGet())) - r - case None => - limitCacheSize() - val res = parseClass(internalName).map((_, lruCounter.incrementAndGet())) - parsedClasses(internalName) = res - res - } - r.map(_._1) + compilingClasses.get(internalName) match { + case Some((c, _)) => Right(c) + case None => parsedClassNode(internalName) } } @@ -289,13 +294,3 @@ class ByteCodeRepository[BT <: BTypes](val classPath: ClassPath, val btypes: BT) } } } - -object ByteCodeRepository { - /** - * The source of a ClassNode in the ByteCodeRepository. Can be either [[CompilationUnit]] if the - * class is being compiled or [[Classfile]] if the class was parsed from the compilation classpath. - */ - sealed trait Source - object CompilationUnit extends Source - object Classfile extends Source -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index d4ff6493a379..40344809bff6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -16,7 +16,6 @@ import scala.collection.JavaConverters._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.backend.jvm.analysis._ -import ByteCodeRepository.{Source, CompilationUnit} import BytecodeUtils._ class CallGraph[BT <: BTypes](val btypes: BT) { @@ -128,17 +127,17 @@ class CallGraph[BT <: BTypes](val btypes: BT) { methodNode.instructions.iterator.asScala foreach { case call: MethodInsnNode if a.frameAt(call) != null => // skips over unreachable code val callee: Either[OptimizerWarning, Callee] = for { - (method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)] - (declarationClassNode, source) <- byteCodeRepository.classNodeAndSource(declarationClass): Either[OptimizerWarning, (ClassNode, Source)] + (method, declarationClass) <- byteCodeRepository.methodNode(call.owner, call.name, call.desc): Either[OptimizerWarning, (MethodNode, InternalName)] + (declarationClassNode, calleeSourceFilePath) <- byteCodeRepository.classNodeAndSourceFilePath(declarationClass): Either[OptimizerWarning, (ClassNode, Option[String])] } yield { val declarationClassBType = classBTypeFromClassNode(declarationClassNode) - val info = analyzeCallsite(method, declarationClassBType, call, source) + val info = analyzeCallsite(method, declarationClassBType, call, calleeSourceFilePath) import info._ Callee( callee = method, calleeDeclarationClass = declarationClassBType, safeToInline = safeToInline, - canInlineFromSource = canInlineFromSource, + sourceFilePath = sourceFilePath, annotatedInline = annotatedInline, annotatedNoInline = annotatedNoInline, samParamTypes = info.samParamTypes, @@ -256,7 +255,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { /** * Just a named tuple used as return type of `analyzeCallsite`. */ - private case class CallsiteInfo(safeToInline: Boolean, canInlineFromSource: Boolean, + private case class CallsiteInfo(safeToInline: Boolean, sourceFilePath: Option[String], annotatedInline: Boolean, annotatedNoInline: Boolean, samParamTypes: IntMap[ClassBType], warning: Option[CalleeInfoWarning]) @@ -264,7 +263,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { /** * Analyze a callsite and gather meta-data that can be used for inlining decisions. */ - private def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, call: MethodInsnNode, calleeSource: Source): CallsiteInfo = { + private def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, call: MethodInsnNode, calleeSourceFilePath: Option[String]): CallsiteInfo = { val methodSignature = calleeMethodNode.name + calleeMethodNode.desc try { @@ -273,8 +272,6 @@ class CallGraph[BT <: BTypes](val btypes: BT) { // callee, we only check there for the methodInlineInfo, we should find it there. calleeDeclarationClassBType.info.orThrow.inlineInfo.methodInfos.get(methodSignature) match { case Some(methodInlineInfo) => - val canInlineFromSource = compilerSettings.optInlineGlobal || calleeSource == CompilationUnit - val isAbstract = BytecodeUtils.isAbstractMethod(calleeMethodNode) val receiverType = classBTypeFromParsedClassfile(call.owner) @@ -308,13 +305,13 @@ class CallGraph[BT <: BTypes](val btypes: BT) { // static impl method first (safeToRewrite). CallsiteInfo( safeToInline = - canInlineFromSource && + inlinerHeuristics.canInlineFromSource(calleeSourceFilePath) && isStaticallyResolved && // (1) !isAbstract && !BytecodeUtils.isConstructor(calleeMethodNode) && !BytecodeUtils.isNativeMethod(calleeMethodNode) && !BytecodeUtils.hasCallerSensitiveAnnotation(calleeMethodNode), - canInlineFromSource = canInlineFromSource, + sourceFilePath = calleeSourceFilePath, annotatedInline = methodInlineInfo.annotatedInline, annotatedNoInline = methodInlineInfo.annotatedNoInline, samParamTypes = samParamTypes(calleeMethodNode, receiverType), @@ -322,12 +319,12 @@ class CallGraph[BT <: BTypes](val btypes: BT) { case None => val warning = MethodInlineInfoMissing(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, calleeDeclarationClassBType.info.orThrow.inlineInfo.warning) - CallsiteInfo(false, false, false, false, IntMap.empty, Some(warning)) + CallsiteInfo(false, None, false, false, IntMap.empty, Some(warning)) } } catch { case Invalid(noInfo: NoClassBTypeInfo) => val warning = MethodInlineInfoError(calleeDeclarationClassBType.internalName, calleeMethodNode.name, calleeMethodNode.desc, noInfo) - CallsiteInfo(false, false, false, false, IntMap.empty, Some(warning)) + CallsiteInfo(false, None, false, false, IntMap.empty, Some(warning)) } } @@ -389,7 +386,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { * gathering the information about this callee. */ final case class Callee(callee: MethodNode, calleeDeclarationClass: btypes.ClassBType, - safeToInline: Boolean, canInlineFromSource: Boolean, + safeToInline: Boolean, sourceFilePath: Option[String], annotatedInline: Boolean, annotatedNoInline: Boolean, samParamTypes: IntMap[btypes.ClassBType], calleeInfoWarning: Option[CalleeInfoWarning]) { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala index 7f9858286eae..081830d61da5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala @@ -17,7 +17,6 @@ import scala.tools.nsc.backend.jvm.BTypes.InternalName import BytecodeUtils._ import BackendReporting._ import Opcodes._ -import scala.tools.nsc.backend.jvm.opt.ByteCodeRepository.CompilationUnit import scala.collection.JavaConverters._ class ClosureOptimizer[BT <: BTypes](val btypes: BT) { @@ -354,16 +353,15 @@ class ClosureOptimizer[BT <: BTypes](val btypes: BT) { // the method node is needed for building the call graph entry val bodyMethod = byteCodeRepository.methodNode(lambdaBodyHandle.getOwner, lambdaBodyHandle.getName, lambdaBodyHandle.getDesc) - def bodyMethodIsBeingCompiled = byteCodeRepository.classNodeAndSource(lambdaBodyHandle.getOwner).map(_._2 == CompilationUnit).getOrElse(false) + val sourceFilePath = byteCodeRepository.compilingClasses.get(lambdaBodyHandle.getOwner).map(_._2) val callee = bodyMethod.map({ case (bodyMethodNode, bodyMethodDeclClass) => val bodyDeclClassType = classBTypeFromParsedClassfile(bodyMethodDeclClass) - val canInlineFromSource = compilerSettings.optInlineGlobal || bodyMethodIsBeingCompiled Callee( callee = bodyMethodNode, calleeDeclarationClass = bodyDeclClassType, - safeToInline = canInlineFromSource, - canInlineFromSource = canInlineFromSource, + safeToInline = inlinerHeuristics.canInlineFromSource(sourceFilePath), + sourceFilePath = sourceFilePath, annotatedInline = false, annotatedNoInline = false, samParamTypes = callGraph.samParamTypes(bodyMethodNode, bodyDeclClassType), diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala index 17807fb385ed..009742501e9e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala @@ -22,6 +22,8 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { for (pr <- post) assert(pr.callsite.callsiteMethod == callsite.callee.get.callee, s"Callsite method mismatch: main $callsite - post ${pr.callsite}") } + def canInlineFromSource(sourceFilePath: Option[String]) = compilerSettings.optInlineGlobal || sourceFilePath.isDefined + /** * Select callsites from the call graph that should be inlined, grouped by the containing method. * Cyclic inlining requests are allowed, the inliner will eliminate requests to break cycles. @@ -32,14 +34,14 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { // classpath. In order to get only the callsites being compiled, we start at the map of // compilingClasses in the byteCodeRepository. val compilingMethods = for { - classNode <- byteCodeRepository.compilingClasses.valuesIterator - methodNode <- classNode.methods.iterator.asScala + (classNode, _) <- byteCodeRepository.compilingClasses.valuesIterator + methodNode <- classNode.methods.iterator.asScala } yield methodNode compilingMethods.map(methodNode => { var requests = Set.empty[InlineRequest] callGraph.callsites(methodNode).valuesIterator foreach { - case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, canInlineFromSource, calleeAnnotatedInline, _, _, callsiteWarning)), _, _, _, pos, _, _) => + case callsite @ Callsite(_, _, _, Right(Callee(callee, calleeDeclClass, safeToInline, sourceFilePath, calleeAnnotatedInline, _, _, callsiteWarning)), _, _, _, pos, _, _) => inlineRequest(callsite, requests) match { case Some(Right(req)) => requests += req case Some(Left(w)) => @@ -50,7 +52,7 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { } case None => - if (canInlineFromSource && calleeAnnotatedInline && !callsite.annotatedNoInline && bTypes.compilerSettings.optWarningEmitAtInlineFailed) { + if (canInlineFromSource(sourceFilePath) && calleeAnnotatedInline && !callsite.annotatedNoInline && bTypes.compilerSettings.optWarningEmitAtInlineFailed) { // if the callsite is annotated @inline, we report an inline warning even if the underlying // reason is, for example, mixed compilation (which has a separate -opt-warning flag). def initMsg = s"${BackendReporting.methodSignature(calleeDeclClass.internalName, callee)} is annotated @inline but cannot be inlined" diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index 3cb1fbdae6ff..3e0b889e9c62 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -19,7 +19,7 @@ class InlinerIllegalAccessTest extends BytecodeTesting { import compiler._ import global.genBCode.bTypes._ - def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, ByteCodeRepository.Classfile) + def addToRepo(cls: List[ClassNode]): Unit = for (c <- cls) byteCodeRepository.add(c, None) def assertEmpty(ins: Option[AbstractInsnNode]) = for (i <- ins) throw new AssertionError(textify(i)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 02cd632af176..4023f1fd3a98 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -43,7 +43,7 @@ class InlinerTest extends BytecodeTesting { // Use the class nodes stored in the byteCodeRepository. The ones returned by compileClasses are not the same, // these are created new from the classfile byte array. They are completely separate instances which cannot // be used to look up methods / callsites in the callGraph hash maps for example. - byteCodeRepository.compilingClasses.valuesIterator.toList.sortBy(_.name) + byteCodeRepository.compilingClasses.valuesIterator.map(_._1).toList.sortBy(_.name) } def checkCallsite(callsite: callGraph.Callsite, callee: MethodNode) = { From 59d6dbc0aac912567d235048b5114cccf965c7ce Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 25 May 2016 15:21:50 +0200 Subject: [PATCH 0118/2793] clear all flags when resetting a symbol this change is a bit scary because it changes code that's not been changed in 11 years https://github.com/scala/scala/commit/7fa7c93#diff-d5789e5ae5061197d782d08324b260dbL214 --- src/reflect/scala/reflect/internal/Flags.scala | 1 - src/reflect/scala/reflect/internal/Symbols.scala | 2 +- .../tools/nsc/backend/jvm/DirectCompileTest.scala | 11 +++++++++++ 3 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 230d30c74ec3..e2522ef280d3 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -235,7 +235,6 @@ class Flags extends ModifierFlags { final val AllFlags = -1L /** These flags can be set when class or module symbol is first created. - * They are the only flags to survive a call to resetFlags(). */ final val TopLevelCreationFlags = MODULE | PACKAGE | FINAL | JAVA diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 6f2d8d802a63..ab52a875f841 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -725,7 +725,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def setFlag(mask: Long): this.type = { _rawflags |= mask ; this } def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this } - def resetFlags() { rawflags &= TopLevelCreationFlags } + def resetFlags() { rawflags = 0 } /** Default implementation calls the generic string function, which * will print overloaded flags as . Subclasses diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index f835e9b14010..36bdb759a6be 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -88,4 +88,15 @@ class DirectCompileTest extends BytecodeTesting { def compileErroneous(): Unit = { compileToBytes("class C { def f: String = 1 }", allowMessage = _.msg contains "type mismatch") } + + @Test + def residentRedefineFinalFlag(): Unit = { + val compiler = newCompiler() + val a = "final class C { def c1 = 0 }" + // for re-defined class symbols (C), the compiler did not clear the `final` flag. + // so compiling `D` would give an error `illegal inheritance from final class C`. + val b = "class C; class D extends C" + compiler.compileToBytes(a) + compiler.compileToBytes(b) + } } From e1084299350bcc20f5d412993d77b8f956ba3165 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 25 May 2016 15:22:19 +0200 Subject: [PATCH 0119/2793] SI-9256 check companions in same compilation unit only if same run --- .../scala/tools/nsc/typechecker/Namers.scala | 1 + .../nsc/backend/jvm/DirectCompileTest.scala | 12 ++++++++++ .../backend/jvm/opt/InlineWarningTest.scala | 24 +++++++++---------- .../nsc/backend/jvm/opt/InlinerTest.scala | 10 ++++---- 4 files changed, 30 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 2773ee19cff1..9c1ba7ced1d7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -431,6 +431,7 @@ trait Namers extends MethodSynthesis { && !(module isCoDefinedWith clazz) && module.exists && clazz.exists + && (currentRun.compiles(clazz) == currentRun.compiles(module)) ) if (fails) { reporter.error(tree.pos, ( diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index 36bdb759a6be..a28599cd9212 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -99,4 +99,16 @@ class DirectCompileTest extends BytecodeTesting { compiler.compileToBytes(a) compiler.compileToBytes(b) } + + @Test + def residentMultipleRunsNotCompanions(): Unit = { + val compiler = newCompiler() + val a = List(("public class A { }", "A.java")) + // when checking that a class and its companion are defined in the same compilation unit, the + // compiler would also emit a warning if the two symbols are defined in separate runs. this + // would lead to an error message when compiling the scala class A. + val b = "class A" + compiler.compileToBytes("", a) + compiler.compileToBytes(b) + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 024cf0c416df..5254d7e1f29f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -97,7 +97,7 @@ class InlineWarningTest extends BytecodeTesting { @Test def cannotInlinePrivateCallIntoDifferentClass(): Unit = { val code = - """class M { + """class A { | @inline final def f = { | @noinline def nested = 0 | nested @@ -106,15 +106,15 @@ class InlineWarningTest extends BytecodeTesting { | def t = f // ok |} | - |class N { - | def t(a: M) = a.f // not possible + |class B { + | def t(a: A) = a.f // not possible |} """.stripMargin val warn = - """M::f()I is annotated @inline but could not be inlined: - |The callee M::f()I contains the instruction INVOKESTATIC M.nested$1 ()I - |that would cause an IllegalAccessError when inlined into class N""".stripMargin + """A::f()I is annotated @inline but could not be inlined: + |The callee A::f()I contains the instruction INVOKESTATIC A.nested$1 ()I + |that would cause an IllegalAccessError when inlined into class B""".stripMargin var c = 0 compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn }) @@ -124,7 +124,7 @@ class InlineWarningTest extends BytecodeTesting { @Test def dontWarnWhenNotIlnineAnnotated(): Unit = { val code = - """class M { + """class A { | final def f(t: Int => Int) = { | @noinline def nested = 0 | nested + t(1) @@ -132,16 +132,16 @@ class InlineWarningTest extends BytecodeTesting { | def t = f(x => x + 1) |} | - |class N { - | def t(a: M) = a.f(x => x + 1) + |class B { + | def t(a: A) = a.f(x => x + 1) |} """.stripMargin compileToBytes(code, allowMessage = _ => false) // no warnings allowed val warn = - """M::f(Lscala/Function1;)I could not be inlined: - |The callee M::f(Lscala/Function1;)I contains the instruction INVOKESTATIC M.nested$1 ()I - |that would cause an IllegalAccessError when inlined into class N""".stripMargin + """A::f(Lscala/Function1;)I could not be inlined: + |The callee A::f(Lscala/Function1;)I contains the instruction INVOKESTATIC A.nested$1 ()I + |that would cause an IllegalAccessError when inlined into class B""".stripMargin var c = 0 compilerWarnAll.compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn }) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 4023f1fd3a98..9b538573dce4 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -835,11 +835,11 @@ class InlinerTest extends BytecodeTesting { @Test def inlineInvokeSpecial(): Unit = { val code = - """class Aa { + """class A { | def f1 = 0 |} - |class B extends Aa { - | @inline final override def f1 = 1 + super.f1 // invokespecial Aa.f1 + |class B extends A { + | @inline final override def f1 = 1 + super.f1 // invokespecial A.f1 | | private def f2m = 0 // public B$$f2m in bytecode | @inline final def f2 = f2m // invokevirtual B.B$$f2m @@ -863,13 +863,13 @@ class InlinerTest extends BytecodeTesting { val warn = """B::f1()I is annotated @inline but could not be inlined: - |The callee B::f1()I contains the instruction INVOKESPECIAL Aa.f1 ()I + |The callee B::f1()I contains the instruction INVOKESPECIAL A.f1 ()I |that would cause an IllegalAccessError when inlined into class T.""".stripMargin var c = 0 val List(a, b, t) = compile(code, allowMessage = i => {c += 1; i.msg contains warn}) assert(c == 1, c) - assertInvoke(getMethod(b, "t1"), "Aa", "f1") + assertInvoke(getMethod(b, "t1"), "A", "f1") assertInvoke(getMethod(b, "t2"), "B", "B$$f2m") assertInvoke(getMethod(b, "t3"), "B", "") assertInvoke(getMethod(b, "t4"), "B", "") From 2980c3921f1270f05add25239da93e05f64ad45f Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 31 May 2016 09:45:10 +0200 Subject: [PATCH 0120/2793] Keep line numbers when inlining from the same compilation unit So far, line numbers were kept only when inlining from the same class. We can also keep them when inlining from a different class defined in the same compilation unit. Longer-term we should support JSR-45, see SI-7518 and scala-dev#3. --- .../tools/nsc/backend/jvm/opt/Inliner.scala | 11 ++++- .../nsc/backend/jvm/opt/InlinerTest.scala | 48 +++++++++++++++++++ .../scala/tools/testing/BytecodeTesting.scala | 2 +- 3 files changed, 58 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index d18963ec8bbd..7b4cfe2a18d2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -253,7 +253,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { def inlineCallsite(callsite: Callsite): Unit = { import callsite.{callsiteClass, callsiteMethod, callsiteInstruction, receiverKnownNotNull, callsiteStackHeight} val Right(callsiteCallee) = callsite.callee - import callsiteCallee.{callee, calleeDeclarationClass} + import callsiteCallee.{callee, calleeDeclarationClass, sourceFilePath} // Inlining requires the callee not to have unreachable code, the analyzer used below should not // return any `null` frames. Note that inlining a method can create unreachable code. Example: @@ -268,7 +268,14 @@ class Inliner[BT <: BTypes](val btypes: BT) { // New labels for the cloned instructions val labelsMap = cloneLabels(callee) - val (clonedInstructions, instructionMap, hasSerializableClosureInstantiation) = cloneInstructions(callee, labelsMap, keepLineNumbers = callsiteClass == calleeDeclarationClass) + val sameSourceFile = sourceFilePath match { + case Some(calleeSource) => byteCodeRepository.compilingClasses.get(callsiteClass.internalName) match { + case Some((_, `calleeSource`)) => true + case _ => false + } + case _ => false + } + val (clonedInstructions, instructionMap, hasSerializableClosureInstantiation) = cloneInstructions(callee, labelsMap, keepLineNumbers = sameSourceFile) // local vars in the callee are shifted by the number of locals at the callsite val localVarShift = callsiteMethod.maxLocals diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 9b538573dce4..9173a1d1893e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1526,4 +1526,52 @@ class InlinerTest extends BytecodeTesting { val c :: _ = compileClassesSeparately(codes, extraArgs = compilerArgs) assertInvoke(getMethod(c, "t"), "p1/Implicits$RichFunction1$", "toRx$extension") } + + @Test + def keepLineNumbersPerCompilationUnit(): Unit = { + val code1 = + """class A { + | def fx(): Unit = () + | @inline final def ma = { + | fx() + | 1 + | } + |} + """.stripMargin + val code2 = + """class B extends A { + | @inline final def mb = { + | fx() + | 1 + | } + |} + |class C extends B { + | @inline final def mc = { + | fx() + | 1 + | } + | def t1 = ma // no lines, not the same source file + | def t2 = mb // lines + | def t3 = mc // lines + |} + """.stripMargin + notPerRun.foreach(_.clear()) + val run = compiler.newRun + run.compileSources(List(makeSourceFile(code1, "A.scala"), makeSourceFile(code2, "B.scala"))) + val List(_, _, c) = readAsmClasses(getGeneratedClassfiles(global.settings.outputDirs.getSingleOutput.get)) + def is(name: String) = getMethod(c, name).instructions.filterNot(_.isInstanceOf[FrameEntry]) + + assertSameCode(is("t1"), List( + Label(0), LineNumber(12, Label(0)), + VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "A", "fx", "()V", false), + Op(ICONST_1), Op(IRETURN), Label(6))) + + assertSameCode(is("t2"), List( + Label(0), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "B", "fx", "()V", false), + Label(4), LineNumber(4, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8))) + + assertSameCode(is("t3"), List( + Label(0), LineNumber(9, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "fx", "()V", false), + Label(4), LineNumber(10, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8))) + } } diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index 1a0c1e210a48..4ddb6580df9e 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -29,7 +29,7 @@ class Compiler(val global: Global) { global.settings.outputDirs.setSingleOutput(new VirtualDirectory("(memory)", None)) } - private def newRun: global.Run = { + def newRun: global.Run = { global.reporter.reset() resetOutput() new global.Run() From a1ea0aa0a6136c13baa41268d7dbd4197924d3c9 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 6 Jun 2016 10:50:09 +0200 Subject: [PATCH 0121/2793] Remove TopLevelCreationFlags --- src/reflect/scala/reflect/internal/Flags.scala | 9 ++------- src/reflect/scala/reflect/internal/Mirrors.scala | 2 +- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index e2522ef280d3..e06decea6d25 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -234,13 +234,8 @@ class Flags extends ModifierFlags { */ final val AllFlags = -1L - /** These flags can be set when class or module symbol is first created. - */ - final val TopLevelCreationFlags = - MODULE | PACKAGE | FINAL | JAVA - // TODO - there's no call to slap four flags onto every package. - final val PackageFlags = TopLevelCreationFlags + final val PackageFlags = MODULE | PACKAGE | FINAL | JAVA // FINAL not included here due to possibility of object overriding. // In fact, FINAL should not be attached regardless. We should be able @@ -300,7 +295,7 @@ class Flags extends ModifierFlags { final val ConstrFlags = JAVA /** Module flags inherited by their module-class */ - final val ModuleToClassFlags = AccessFlags | TopLevelCreationFlags | CASE | SYNTHETIC + final val ModuleToClassFlags = AccessFlags | PackageFlags | CASE | SYNTHETIC /** These flags are not pickled */ final val FlagsNotPickled = IS_ERROR | OVERLOADED | LIFTED | TRANS_FLAG | LOCKED | TRIEDCOOKING diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 756300d4036f..3d1c160d5299 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -273,7 +273,7 @@ trait Mirrors extends api.Mirrors { // is very beneficial for a handful of bootstrap symbols to have // first class identities sealed trait WellKnownSymbol extends Symbol { - this initFlags (TopLevelCreationFlags | STATIC) + this initFlags (PackageFlags | STATIC) } // Features common to RootClass and RootPackage, the roots of all // type and term symbols respectively. From 1149453c9a762b0a5792d6139ec2c201e418c914 Mon Sep 17 00:00:00 2001 From: Antoine Gourlay Date: Fri, 29 Apr 2016 19:33:47 +0200 Subject: [PATCH 0122/2793] SI-9585 hide auto-implicit conversions from scaladoc This hides implicits conversions (and potential members obtained through it) that converts a type into itself, because these conversions are usually non-sensical. They are not completely removed, just placed behind `-doc-implicits-show-all`, like other implicits deemed probably useless. --- Consider the scaladoc for the following class: ``` object Box { implicit def anyToBox[T](t: T): Box[T] = new Box(t) } class Box[T](val t: T) ``` When looking for implicits members to add to class `Box`, it finds the implicit conversion `anyToBox`, and applies it to itself to have an implicit conversion to Box[Box[T]], which brings a useless implicit member `t: Box[T]`. This commit makes scaladoc ignore any conversion from a type to itself (even if type parameters differ) by default. Using the (very useful) `tools/scaladoc-diff` script, I found that this change removes the following conversion from the library doc: ``` Ensuring[A] to Ensuring[Ensuring[A]] anytostringadd[A] to any2stringadd[anytostringadd[A]] ArrowAssoc[A] to ArrowAssoc[ArrowAssoc[A]] =:=[From,To] to =:=[From,To] SearchImpl[A,Repr] to SearchImpl[A,SearchImpl[A,Repr]] CollectionsHaveToParArray[C, T] to CollectionsHaveToParArray[CollectionsHaveToParArray[C, T], T] Ordered[A] to Ordered[Ordered[A]] StringFormat[A] to StringFormat[StringFormat[A]] ``` --- .../model/ModelFactoryImplicitSupport.scala | 14 +++++++++++ test/scaladoc/run/t9585.check | 6 +++++ test/scaladoc/run/t9585.scala | 25 +++++++++++++++++++ 3 files changed, 45 insertions(+) create mode 100644 test/scaladoc/run/t9585.check create mode 100644 test/scaladoc/run/t9585.scala diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index e67a71725734..cedbdd1547bb 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -171,6 +171,20 @@ trait ModelFactoryImplicitSupport { return Nil } + if (!settings.docImplicitsShowAll && viewSimplifiedType.resultType.typeSymbol == sym) { + // If, when looking at views for a class A, we find one that returns A as well + // (possibly with different type parameters), we ignore it. + // It usually is a way to build a "whatever" into an A, but we already have an A, as in: + // {{{ + // object Box { + // implicit def anyToBox[T](t: T): Box[T] = new Box(t) + // } + // class Box[T](val t: T) + // }}} + // We don't want the implicit conversion from Box[T] to Box[Box[T]] to appear. + return Nil + } + // type the view application so we get the exact type of the result (not the formal type) val viewTree = result.tree.setType(viewSimplifiedType) val appliedTree = new ApplyImplicitView(viewTree, List(Ident("") setType viewTree.tpe.paramTypes.head)) diff --git a/test/scaladoc/run/t9585.check b/test/scaladoc/run/t9585.check new file mode 100644 index 000000000000..3784317d543f --- /dev/null +++ b/test/scaladoc/run/t9585.check @@ -0,0 +1,6 @@ +warning: there was one feature warning; re-run with -feature for details +any2stringadd[Box[T]] +StringFormat[Box[T]] +Ensuring[Box[T]] +ArrowAssoc[Box[T]] +Done. diff --git a/test/scaladoc/run/t9585.scala b/test/scaladoc/run/t9585.scala new file mode 100644 index 000000000000..af8350b6cf82 --- /dev/null +++ b/test/scaladoc/run/t9585.scala @@ -0,0 +1,25 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + override def code = """ + object Box { + + implicit def anyToBox[T](t: T): Box[T] = new Box(t) + + } + + class Box[T](val t: T) + """ + + def scaladocSettings = "-implicits" + + def testModel(root: Package) = { + import access._ + + // this used to contain the conversion to Box[Box[T]], + // but not anymore. + val conversions = root._class("Box").conversions + println(conversions.map(_.targetType).mkString("\n")) + } +} From 9b6a65bc890081b48a86e72aa3eb49aaf2d69d09 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Jun 2016 14:45:33 +1000 Subject: [PATCH 0123/2793] SI-9806 Fix incorrect codegen with optimizer, constants, try/catch The constant optimizer phase performs abstract interpretation of the icode representation of the progam in order to eliminate dead code. For each basic block, the possible and impossible states of each local variable is computed for both a normal and an exceptional exit. A bug in this code incorrectly tracked state for exception exits. This appears to have been an oversight: the new state was computed at each instruction, but it was discarded rather than folded through the intepreter. --- .../nsc/backend/opt/ConstantOptimization.scala | 4 ++-- test/files/run/t9806.scala | 18 ++++++++++++++++++ 2 files changed, 20 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t9806.scala diff --git a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala index a7ce7dfa046a..eafaf4193201 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/ConstantOptimization.scala @@ -539,14 +539,14 @@ abstract class ConstantOptimization extends SubComponent { // number of instructions excluding the last one val normalCount = block.size - 1 - val exceptionState = in.cleanStack + var exceptionState = in.cleanStack var normalExitState = in var idx = 0 while (idx < normalCount) { val inst = block(idx) normalExitState = interpretInst(normalExitState, inst) if (normalExitState.locals ne exceptionState.locals) - exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals) + exceptionState = exceptionState.copy(locals = exceptionState mergeLocals normalExitState.locals) idx += 1 } diff --git a/test/files/run/t9806.scala b/test/files/run/t9806.scala new file mode 100644 index 000000000000..ccde989efeb6 --- /dev/null +++ b/test/files/run/t9806.scala @@ -0,0 +1,18 @@ +object Ex extends Exception +object Test { + def main(args: Array[String]) { + try foo catch { case Ex => } + } + + def isTrue(b: Boolean) = b + def foo = { + var streamErrors1 = true + try { + streamErrors1 = false + throw Ex + } catch { + case ex if streamErrors1 => + assert(isTrue(streamErrors1)) + } + } +} From 3f3a3bb57499fb919e798d03bbc4e84ede8e55d6 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 7 Jun 2016 00:38:01 -0700 Subject: [PATCH 0124/2793] SI-9245 Fresher name in Try and test Fresh name for catcher gets a dollar. "Here, have a dollar." Test due to retronym demonstrates possible conflict. Over the lifetime of the universe, surely at least one code monkey would type in that identifier to catch a banana. --- .../tools/nsc/ast/parser/TreeBuilder.scala | 2 +- test/files/pos/t9245.scala | 27 +++++++++++++++++++ 2 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t9245.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index cc9e39f43045..45f731686a44 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -105,7 +105,7 @@ abstract class TreeBuilder { def makeCatchFromExpr(catchExpr: Tree): CaseDef = { val binder = freshTermName() val pat = Bind(binder, Typed(Ident(nme.WILDCARD), Ident(tpnme.Throwable))) - val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr"), TypeTree(), catchExpr) + val catchDef = ValDef(Modifiers(ARTIFACT), freshTermName("catchExpr$"), TypeTree(), catchExpr) val catchFn = Ident(catchDef.name) val body = atPos(catchExpr.pos.makeTransparent)(Block( List(catchDef), diff --git a/test/files/pos/t9245.scala b/test/files/pos/t9245.scala new file mode 100644 index 000000000000..87bc1fa0ef84 --- /dev/null +++ b/test/files/pos/t9245.scala @@ -0,0 +1,27 @@ + +/* +Was: +test/files/pos/t9245.scala:5: error: recursive value catchExpr1 needs type + try {} catch catchExpr1 + ^ + +Now: + def catchExpr1: PartialFunction[Throwable,Any] = scala.this.Predef.???; + def test: Any = try { + () + } catch { + case (x$1 @ (_: Throwable)) => { + val catchExpr$1: PartialFunction[Throwable,Any] = Test.this.catchExpr1; + if (catchExpr$1.isDefinedAt(x$1)) + catchExpr$1.apply(x$1) + else + throw x$1 + } + } +*/ +trait Test { + def catchExpr1: PartialFunction[Throwable, Any] = ??? + def test = { + try {} catch catchExpr1 + } +} From c7e2b2878d0128a1e74888dca98219182df78aa4 Mon Sep 17 00:00:00 2001 From: af Date: Fri, 10 Jun 2016 14:07:17 +0300 Subject: [PATCH 0125/2793] Typo fix --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index da86f6274b79..13bbd8be033d 100644 --- a/build.sbt +++ b/build.sbt @@ -69,7 +69,7 @@ val scalaSwingDep = withoutScalaLang("org.scala-lang.modules" %% "scala-swing" % val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % versionNumber("scala-xml")) val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest")) val junitDep = "junit" % "junit" % "4.11" -val junitIntefaceDep = "com.novocode" % "junit-interface" % "0.11" % "test" +val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % "test" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "jline" % "jline" % versionProps("jline.version") val antDep = "org.apache.ant" % "ant" % "1.9.4" @@ -576,7 +576,7 @@ lazy val junit = project.in(file("test") / "junit") .settings(disablePublishing: _*) .settings( fork in Test := true, - libraryDependencies ++= Seq(junitDep, junitIntefaceDep), + libraryDependencies ++= Seq(junitDep, junitInterfaceDep), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), unmanagedSourceDirectories in Test := List(baseDirectory.value) ) From 956fee80ae15a0b0eebfb52bdd18f4df9511c479 Mon Sep 17 00:00:00 2001 From: Raul Bache Date: Sun, 12 Jun 2016 21:15:50 +0200 Subject: [PATCH 0126/2793] If Range is sealed, it makes sense to have Range.Inclusive final. --- src/library/scala/collection/immutable/Range.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 2e567501155f..82203b3d1a53 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -437,7 +437,7 @@ object Range { def count(start: Int, end: Int, step: Int): Int = count(start, end, step, isInclusive = false) - class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { + final class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { // override def par = new ParRange(this) override def isInclusive = true override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step) From f2d0f1e85d8b348cd2506a45502e4e59f9ec8e49 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 14 Jun 2016 12:38:58 +0200 Subject: [PATCH 0127/2793] Use sbt for PR validation MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Support directories in `-doc-external-doc`: It is documented as accepting a “classpath_entry_path” for the keys but this only worked for JARs and not for individual class files. When checking for external-doc mappings for a Symbol, we now find the root directory relative to a class file instead of using the full class file path. The corresponding tests for SI-191 and SI8557 are also fixed to support individual class files instead of JARs in partest. This is required for the sbt build which runs partest on “quick” instead of “pack”. - Fix version and repository handling for bootstrapping. The bootstrap `scalaInstance` can now be resolved from any repository added to the project (not just the bootstrap repositories) by using a different workaround for https://github.com/sbt/sbt/issues/1872. - Workaround for https://github.com/sbt/sbt/issues/2640 (putting the wrong `scalaInstance` on partest’s classpath). The required `ScalaInstance` constructor is deprecated, so we have to disable deprecation warnings and fatal warnings until there is a better fix. - Add MiMa to the sbt build (port of the old `test.bc` ant task). The sbt-mima plugin doesn’t have all the features we need, so we do it manually in a similar way to what the plugin does. Checks are done in both directions for the `library` and `compiler` projects. The base version has to be set in `build.sbt`. When set to `None`, MiMa checks are skipped. MiMa checks are run sequentially to avoid spurious errors (see https://github.com/typesafehub/migration-manager/issues/115). - Port the OSGi tests to the sbt build. The set of JARs that gets copied into build/osgi as bundles is a bit different from the ant build. We omit the source JARs but add additional modules that are part of the Scala distribution, which seems more correct. - Get rid up `pull-binary-libs.sh` for the sbt build. Add artifacts are resolved from the special bootstrap repository through Ivy. The special `code.jar` and `instrumented.jar` artifacts are copied to the location where partest expects them (because these paths are hardcoded in partest). Other extra JARs for partest in `test/files/lib` are referenced directly from the Ivy cache. - Move common settings that should be available with unqualified names in local `.sbt` files and on the command line into an auto-plugin. - Add an `antStyle` setting to sbt to allow users to easily enable ant-style incremental compilation instead of sbt’s standard name hashing with `set antStyle := true`. - Disable verbose `info`-level logging during sbt startup for both, `validate/test` and `validate/publish-core` jobs. Update logging is no longer disabled when running locally (where it is useful and doesn’t generate excessive output). - Pass optimization flags for scalac down to partest, using the new partest version 1.0.15\6. - Call the new sbt-based PR validation from `scripts/jobs/validate/test`. - Disable the tests `run/t7843-jsr223-service` and `run/t7933` from https://github.com/scala/scala/pull/4959 for now. We need to set up a new test project (either partest or junit) that can run them on a packaged version of Scala, or possibly move them into a separate project that would naturally run from a packaged Scala as part of the community build. --- build.sbt | 187 ++++++++++++------ project/BuildSettings.scala | 11 ++ project/MiMa.scala | 95 +++++++++ project/Osgi.scala | 4 +- project/Quiet.scala | 2 - project/ScalaTool.scala | 12 +- project/ScriptCommands.scala | 21 +- project/VersionUtil.scala | 34 +++- project/plugins.sbt | 5 + scripts/jobs/validate/publish-core | 4 +- scripts/jobs/validate/test | 35 +++- .../tools/nsc/doc/model/MemberLookup.scala | 18 +- .../run/t7843-jsr223-service.check | 0 .../run/t7843-jsr223-service.scala | 0 test/{files => disabled}/run/t7933.check | 0 test/{files => disabled}/run/t7933.scala | 0 test/osgi/src/logback.xml | 10 + test/scaladoc/.gitignore | 2 + test/scaladoc/run/SI-191.scala | 12 +- test/scaladoc/run/t8557.scala | 20 +- versions.properties | 2 +- 21 files changed, 382 insertions(+), 92 deletions(-) create mode 100644 project/BuildSettings.scala create mode 100644 project/MiMa.scala rename test/{files => disabled}/run/t7843-jsr223-service.check (100%) rename test/{files => disabled}/run/t7843-jsr223-service.scala (100%) rename test/{files => disabled}/run/t7933.check (100%) rename test/{files => disabled}/run/t7933.scala (100%) create mode 100644 test/osgi/src/logback.xml create mode 100644 test/scaladoc/.gitignore diff --git a/build.sbt b/build.sbt index 13bbd8be033d..3df0d43c0009 100644 --- a/build.sbt +++ b/build.sbt @@ -55,25 +55,23 @@ import VersionUtil._ -val bootstrapScalaVersion = versionProps("starr.version") - -def withoutScalaLang(moduleId: ModuleID): ModuleID = moduleId exclude("org.scala-lang", "*") - -// exclusion of the scala-library transitive dependency avoids eviction warnings during `update`. -val actorsMigrationDep = withoutScalaLang("org.scala-lang" %% "scala-actors-migration" % versionNumber("actors-migration")) -val akkaActorDep = withoutScalaLang("com.typesafe.akka" %% "akka-actor" % versionNumber("akka-actor")) -val scalaContinuationsLibraryDep = withoutScalaLang("org.scala-lang.plugins" %% "scala-continuations-library" % versionNumber("scala-continuations-library")) -val scalaContinuationsPluginDep = withoutScalaLang("org.scala-lang.plugins" % ("scala-continuations-plugin_" + versionProps("scala.full.version")) % versionNumber("scala-continuations-plugin")) -val scalaParserCombinatorsDep = withoutScalaLang("org.scala-lang.modules" %% "scala-parser-combinators" % versionNumber("scala-parser-combinators")) -val scalaSwingDep = withoutScalaLang("org.scala-lang.modules" %% "scala-swing" % versionNumber("scala-swing")) -val scalaXmlDep = withoutScalaLang("org.scala-lang.modules" %% "scala-xml" % versionNumber("scala-xml")) -val partestDep = withoutScalaLang("org.scala-lang.modules" %% "scala-partest" % versionNumber("partest")) -val junitDep = "junit" % "junit" % "4.11" -val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % "test" -val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") -val jlineDep = "jline" % "jline" % versionProps("jline.version") -val antDep = "org.apache.ant" % "ant" % "1.9.4" -val scalacheckDep = withoutScalaLang("org.scalacheck" %% "scalacheck" % versionNumber("scalacheck") % "it") +// Scala dependencies: +val scalaContinuationsPluginDep = scalaDep("org.scala-lang.plugins", "scala-continuations-plugin", compatibility = "full") +val scalaContinuationsLibraryDep = scalaDep("org.scala-lang.plugins", "scala-continuations-library") +val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-parser-combinators") +val scalaSwingDep = scalaDep("org.scala-lang.modules", "scala-swing") +val scalaXmlDep = scalaDep("org.scala-lang.modules", "scala-xml") +val partestDep = scalaDep("org.scala-lang.modules", "scala-partest", versionProp = "partest") +val akkaActorDep = scalaDep("com.typesafe.akka", "akka-actor") +val actorsMigrationDep = scalaDep("org.scala-lang", "scala-actors-migration", versionProp = "actors-migration") +val scalacheckDep = scalaDep("org.scalacheck", "scalacheck", scope = "it") + +// Non-Scala dependencies: +val junitDep = "junit" % "junit" % "4.11" +val junitInterfaceDep = "com.novocode" % "junit-interface" % "0.11" % "test" +val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") +val jlineDep = "jline" % "jline" % versionProps("jline.version") +val antDep = "org.apache.ant" % "ant" % "1.9.4" /** Publish to ./dists/maven-sbt, similar to the ANT build which publishes to ./dists/maven. This * can be used to compare the output of the sbt and ANT builds during the transition period. Any @@ -117,20 +115,27 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( globalVersionSettings baseVersion in Global := "2.11.9" baseVersionSuffix in Global := "SNAPSHOT" +mimaReferenceVersion in Global := Some("2.11.0") lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]]( organization := "org.scala-lang", - scalaVersion := bootstrapScalaVersion, // we don't cross build Scala itself crossPaths := false, // do not add Scala library jar as a dependency automatically autoScalaLibrary := false, - // we also do not add scala instance automatically because it introduces - // a circular instance, see: https://github.com/sbt/sbt/issues/1872 + // Avoid circular dependencies for scalaInstance (see https://github.com/sbt/sbt/issues/1872) managedScalaInstance := false, - // this is a way to workaround issue described in https://github.com/sbt/sbt/issues/1872 - // check it out for more details - scalaInstance := ScalaInstance(scalaVersion.value, appConfiguration.value.provider.scalaProvider.launcher getScala scalaVersion.value), + scalaInstance := { + val s = (scalaInstance in bootstrap).value + // sbt claims that s.isManagedVersion is false even though s was resolved by Ivy + // We create a managed copy to prevent sbt from putting it on the classpath where we don't want it + if(s.isManagedVersion) s else { + val s2 = new ScalaInstance(s.version, s.loader, s.libraryJar, s.compilerJar, s.extraJars, Some(s.actualVersion)) + assert(s2.isManagedVersion) + s2 + } + }, + scalaVersion := (scalaVersion in bootstrap).value, // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, @@ -151,7 +156,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, classDirectory in Compile := buildDirectory.value / "quick/classes" / thisProject.value.id, target in Compile in doc := buildDirectory.value / "scaladoc" / thisProject.value.id, - // given that classDirectory and doc target are overriden to be _outside_ of target directory, we have + // given that classDirectory and doc target are overridden to be _outside_ of target directory, we have // to make sure they are being cleaned properly cleanFiles += (classDirectory in Compile).value, cleanFiles += (target in Compile in doc).value, @@ -166,9 +171,10 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + "-sourcepath", (baseDirectory in ThisBuild).value.toString, "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH}.scala#L1" ), + incOptions <<= (incOptions in LocalProject("root")), homepage := Some(url("http://www.scala-lang.org")), startYear := Some(2002), - licenses += ("BSD 3-Clause", url("http://www.scala-lang.org/license.html")), + licenses += (("BSD 3-Clause", url("http://www.scala-lang.org/license.html"))), apiURL := Some(url("http://www.scala-lang.org/api/" + versionProperties.value.mavenVersion + "/")), pomIncludeRepository := { _ => false }, pomExtra := { @@ -213,8 +219,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it // directly to stdout outputStrategy in run := Some(StdoutOutput), - Quiet.silenceScalaBinaryVersionWarning, - Quiet.silenceIvyUpdateInfoLogging + Quiet.silenceScalaBinaryVersionWarning ) /** Extra post-processing for the published POM files. These are needed to create POMs that @@ -250,7 +255,7 @@ def removePomDependencies(deps: (String, String)*): Seq[Setting[_]] = Seq( val n2 = pomPostProcess.value.apply(n) import scala.xml._ import scala.xml.transform._ - (new RuleTransformer(new RewriteRule { + new RuleTransformer(new RewriteRule { override def transform(node: Node) = node match { case e: Elem if e.label == "dependency" && deps.exists { case (g, a) => @@ -259,13 +264,13 @@ def removePomDependencies(deps: (String, String)*): Seq[Setting[_]] = Seq( } => Seq.empty case n => Seq(n) } - })).transform(Seq(n2)).head + }).transform(Seq(n2)).head }, deliverLocal := { import scala.xml._ import scala.xml.transform._ val f = deliverLocal.value - val e = (new RuleTransformer(new RewriteRule { + val e = new RuleTransformer(new RewriteRule { override def transform(node: Node) = node match { case e: Elem if e.label == "dependency" && { val org = e.attribute("org").getOrElse("").toString @@ -276,7 +281,7 @@ def removePomDependencies(deps: (String, String)*): Seq[Setting[_]] = Seq( } => Seq.empty case n => Seq(n) } - })).transform(Seq(XML.loadFile(f))).head + }).transform(Seq(XML.loadFile(f))).head XML.save(f.getAbsolutePath, e, xmlDecl = true) f } @@ -309,7 +314,7 @@ lazy val setJarLocation: Setting[_] = lazy val scalaSubprojectSettings: Seq[Setting[_]] = commonSettings :+ setJarLocation def filterDocSources(ff: FileFilter): Seq[Setting[_]] = Seq( - sources in (Compile, doc) ~= (_.filter(ff.accept _)), + sources in (Compile, doc) ~= (_.filter(ff.accept)), // Excluded sources may still be referenced by the included sources, so we add the compiler // output to the scaladoc classpath to resolve them. For the `library` project this is // always required because otherwise the compiler cannot even initialize Definitions without @@ -326,6 +331,11 @@ def regexFileFilter(s: String): FileFilter = new FileFilter { def accept(f: File) = pat.matcher(f.getAbsolutePath.replace('\\', '/')).matches() } +// This project provides the STARR scalaInstance for bootstrapping +lazy val bootstrap = (project in file("target/bootstrap")).settings( + scalaVersion := versionProps("starr.version") +) + lazy val library = configureAsSubproject(project) .settings(generatePropertiesFileSettings: _*) .settings(Osgi.settings: _*) @@ -363,6 +373,7 @@ lazy val library = configureAsSubproject(project) .settings(filterDocSources("*.scala" -- (regexFileFilter(".*/runtime/.*\\$\\.scala") || regexFileFilter(".*/runtime/ScalaRunTime\\.scala") || regexFileFilter(".*/runtime/StringAdd\\.scala"))): _*) + .settings(MiMa.settings: _*) .dependsOn(forkjoin) lazy val reflect = configureAsSubproject(project) @@ -385,6 +396,7 @@ lazy val reflect = configureAsSubproject(project) "/project/packaging" -> jar ) ) + .settings(MiMa.settings: _*) .dependsOn(library) lazy val compiler = configureAsSubproject(project) @@ -581,6 +593,56 @@ lazy val junit = project.in(file("test") / "junit") unmanagedSourceDirectories in Test := List(baseDirectory.value) ) +lazy val osgiTestFelix = osgiTestProject( + project.in(file(".") / "target" / "osgiTestFelix"), + "org.apache.felix" % "org.apache.felix.framework" % "4.4.0") + +lazy val osgiTestEclipse = osgiTestProject( + project.in(file(".") / "target" / "osgiTestEclipse"), + "org.eclipse.osgi" % "org.eclipse.osgi" % "3.7.1") + +def osgiTestProject(p: Project, framework: ModuleID) = p + .dependsOn(library, reflect, compiler, actors, forkjoin) + .settings(clearSourceAndResourceDirectories: _*) + .settings(commonSettings: _*) + .settings(disableDocs: _*) + .settings(disablePublishing: _*) + .settings( + fork in Test := true, + parallelExecution in Test := false, + libraryDependencies ++= { + val paxExamVersion = "3.5.0" // Last version which supports Java 6 + Seq( + junitDep, + junitInterfaceDep, + "org.ops4j.pax.exam" % "pax-exam-container-native" % paxExamVersion + exclude("org.osgi", "org.osgi.core"), // Avoid dragging in a dependency which requires Java >6 + "org.osgi" % "org.osgi.core" % "4.2.0" % "provided", // The framework (Felix / Eclipse) provides the classes + "org.ops4j.pax.exam" % "pax-exam-junit4" % paxExamVersion, + "org.ops4j.pax.exam" % "pax-exam-link-assembly" % paxExamVersion, + "org.ops4j.pax.url" % "pax-url-aether" % "2.2.0", + "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.0", + "ch.qos.logback" % "logback-core" % "1.1.2", + "ch.qos.logback" % "logback-classic" % "1.1.2", + framework % "test" + ) + }, + Keys.test in Test <<= Keys.test in Test dependsOn (packageBin in Compile), + testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v", "-q"), + unmanagedSourceDirectories in Test := List((baseDirectory in ThisBuild).value / "test" / "osgi" / "src"), + unmanagedResourceDirectories in Compile := (unmanagedSourceDirectories in Test).value, + includeFilter in unmanagedResources in Compile := "*.xml", + packageBin in Compile := { // Put the bundle JARs required for the tests into build/osgi + val targetDir = (buildDirectory in ThisBuild).value / "osgi" + val mappings = ((mkPack in dist).value / "lib").listFiles.collect { + case f if f.getName.startsWith("scala-") && f.getName.endsWith(".jar") => (f, targetDir / f.getName) + } + IO.copy(mappings, overwrite = true) + targetDir + }, + cleanFiles += (buildDirectory in ThisBuild).value / "osgi" + ) + lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "partest-javaagent") .settings(commonSettings: _*) .settings(generatePropertiesFileSettings: _*) @@ -610,8 +672,15 @@ lazy val test = project .settings(Defaults.itSettings: _*) .settings( libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep, scalacheckDep), - unmanagedBase in IntegrationTest := baseDirectory.value / "files" / "lib", - unmanagedJars in IntegrationTest <+= (unmanagedBase) (j => Attributed.blank(j)) map(identity), + libraryDependencies ++= { + // Resolve the JARs for all test/files/lib/*.jar.desired.sha1 files through Ivy + val baseDir = (baseDirectory in ThisBuild).value + (baseDir / "test/files/lib").list.toSeq.filter(_.endsWith(".jar.desired.sha1")) + .map(f => bootstrapDep(baseDir, "test/files/lib", f.dropRight(17))) + }, + // Two hardcoded depenencies in partest, resolved in the otherwise unused scope "test": + libraryDependencies += bootstrapDep((baseDirectory in ThisBuild).value, "test/files/codelib", "code") % "test", + libraryDependencies += bootstrapDep((baseDirectory in ThisBuild).value, "test/files/speclib", "instrumented") % "test", // no main sources sources in Compile := Seq.empty, // test sources are compiled in partest run, not here @@ -619,18 +688,24 @@ lazy val test = project fork in IntegrationTest := true, javaOptions in IntegrationTest += "-Xmx1G", testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), - testOptions in IntegrationTest += Tests.Setup( () => root.base.getAbsolutePath + "/pull-binary-libs.sh" ! ), testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M -XX:MaxPermSize=128M"), - definedTests in IntegrationTest += ( - new sbt.TestDefinition( - "partest", - // marker fingerprint since there are no test classes - // to be discovered by sbt: - new sbt.testing.AnnotatedFingerprint { - def isModule = true - def annotationName = "partest" - }, true, Array()) - ) + testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), + testOptions in IntegrationTest += Tests.Setup { () => + val cp = (dependencyClasspath in Test).value + val baseDir = (baseDirectory in ThisBuild).value + // Copy code.jar and instrumented.jar to the location where partest expects them + copyBootstrapJar(cp, baseDir, "test/files/codelib", "code") + copyBootstrapJar(cp, baseDir, "test/files/speclib", "instrumented") + }, + definedTests in IntegrationTest += new sbt.TestDefinition( + "partest", + // marker fingerprint since there are no test classes + // to be discovered by sbt: + new sbt.testing.AnnotatedFingerprint { + def isModule = true + def annotationName = "partest" + }, true, Array() + ) ) lazy val manual = configureAsSubproject(project) @@ -704,7 +779,7 @@ lazy val scalaDist = Project("scala-dist", file(".") / "target" / "scala-dist-di ) .dependsOn(libraryAll, compiler, scalap) -lazy val root = (project in file(".")) +lazy val root: Project = (project in file(".")) .settings(disableDocs: _*) .settings(disablePublishing: _*) .settings(generateBuildCharacterFileSettings: _*) @@ -712,8 +787,9 @@ lazy val root = (project in file(".")) publish := {}, publishLocal := {}, commands ++= ScriptCommands.all, - Quiet.silenceIvyUpdateInfoLogging -) + antStyle := false, + incOptions := incOptions.value.withNameHashing(!antStyle.value).withAntStyle(antStyle.value) + ) .aggregate(library, forkjoin, reflect, compiler, interactive, repl, replJline, replJlineEmbedded, scaladoc, scalap, actors, partestExtras, junit, libraryAll, scalaDist).settings( sources in Compile := Seq.empty, @@ -737,8 +813,9 @@ lazy val dist = (project in file("dist")) val props = new java.util.Properties() props.setProperty("partest.classpath", cp.map(_.data.getAbsolutePath).mkString(sys.props("path.separator"))) IO.write(props, null, propsFile) + (buildDirectory in ThisBuild).value / "quick" } dependsOn ((distDependencies.map(products in Runtime in _) :+ mkBin): _*), - mkPack <<= Def.task {} dependsOn (packagedArtifact in (Compile, packageBin), mkBin), + mkPack <<= Def.task { (buildDirectory in ThisBuild).value / "pack" } dependsOn (packagedArtifact in (Compile, packageBin), mkBin), target := (baseDirectory in ThisBuild).value / "target" / thisProject.value.id, packageBin in Compile := { val extraDeps = Set(scalaContinuationsLibraryDep, scalaContinuationsPluginDep, scalaSwingDep, scalaParserCombinatorsDep, scalaXmlDep) @@ -749,7 +826,7 @@ lazy val dist = (project in file("dist")) case (Some(m), f) if extraModules contains uniqueModule(m) => f } val jlineJAR = (dependencyClasspath in Compile).value.find(_.get(moduleID.key) == Some(jlineDep)).get.data - val mappings = extraJars.map(f => (f, targetDir / f.getName)) :+ (jlineJAR, targetDir / "jline.jar") + val mappings = extraJars.map(f => (f, targetDir / f.getName)) :+ ((jlineJAR, targetDir / "jline.jar")) IO.copy(mappings, overwrite = true) targetDir }, @@ -802,8 +879,8 @@ def configureAsForkOfJavaProject(project: Project): Project = { lazy val buildDirectory = settingKey[File]("The directory where all build products go. By default ./build") lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") -lazy val mkQuick = taskKey[Unit]("Generate a full build, including scripts, in build/quick") -lazy val mkPack = taskKey[Unit]("Generate a full build, including scripts, in build/pack") +lazy val mkQuick = taskKey[File]("Generate a full build, including scripts, in build/quick") +lazy val mkPack = taskKey[File]("Generate a full build, including scripts, in build/pack") // Defining these settings is somewhat redundant as we also redefine settings that depend on them. // However, IntelliJ's project import works better when these are set correctly. @@ -929,7 +1006,7 @@ intellij := { def moduleDep(name: String, jars: Seq[File]) = { val entries = jars.map(f => s""" """).mkString("\n") - s"""| + s"""| | |$entries | diff --git a/project/BuildSettings.scala b/project/BuildSettings.scala new file mode 100644 index 000000000000..76cd888a2d43 --- /dev/null +++ b/project/BuildSettings.scala @@ -0,0 +1,11 @@ +import sbt._ + +/** This object defines keys that should be visible with an unqualified name in all .sbt files and the command line */ +object BuildSettings extends AutoPlugin { + object autoImport { + lazy val antStyle = settingKey[Boolean]("Use ant-style incremental builds instead of name-hashing") + lazy val baseVersion = settingKey[String]("The base version number from which all others are derived") + lazy val baseVersionSuffix = settingKey[String]("Identifies the kind of version to build") + lazy val mimaReferenceVersion = settingKey[Option[String]]("Scala version number to run MiMa against") + } +} diff --git a/project/MiMa.scala b/project/MiMa.scala new file mode 100644 index 000000000000..66442fc7256a --- /dev/null +++ b/project/MiMa.scala @@ -0,0 +1,95 @@ +// It would be nice to use sbt-mima-plugin here, but the plugin is missing +// at least two features we need: +// * ability to run MiMa twice, swapping `curr` and `prev`, to detect +// both forwards and backwards incompatibilities (possibly fixed as of +// https://github.com/typesafehub/migration-manager/commit/2844ffa48b6d2255aa64bd687703aec21dadd55e) +// * ability to pass a filter file (https://github.com/typesafehub/migration-manager/issues/102) +// So we invoke the MiMa CLI directly; it's also what the Ant build did. + +import sbt._ +import sbt.Keys._ +import BuildSettings.autoImport._ + +object MiMa { + lazy val mima = + taskKey[Unit]("run Migration Manager to detect binary incompatibilities") + + lazy val settings = + Seq( + mima := { + val log = streams.value.log + mimaReferenceVersion.value.fold { + log.info(s"No reference version defined - skipping binary compatibility checks") + } { refVersion => + def runOnce(prev: java.io.File, curr: java.io.File, isForward: Boolean): Unit = { + val direction = if (isForward) "forward" else "backward" + log.info(s"Checking $direction binary compatibility") + log.debug(s"prev = $prev, curr = $curr") + runMima( + prev = if (isForward) curr else prev, + curr = if (isForward) prev else curr, + // TODO: it would be nicer if each subproject had its own whitelist, but for now + // for compatibility with how Ant did things, there's just one at the root. + // once Ant is gone we'd be free to split it up. + filter = (baseDirectory in ThisBuild).value / s"bincompat-$direction.whitelist.conf", + log) + } + val artifact = + getPreviousArtifact( + "org.scala-lang" % s"${name.value}" % refVersion, + ivySbt.value, streams.value) + for (isForward <- Seq(false, true)) + runOnce(artifact, (packageBin in Compile).value, isForward) + } + } + ) + + def runMima(prev: java.io.File, curr: java.io.File, filter: java.io.File, log: Logger): Unit = { + val args = Array( + "--prev", prev.getAbsolutePath, + "--curr", curr.getAbsolutePath, + "--filters", filter.getAbsolutePath, + "--generate-filters" + ) + val exitCode = TrapExit(com.typesafe.tools.mima.cli.Main.main(args), log) + if (exitCode != 0) + throw new RuntimeException(s"MiMa failed with exit code $exitCode") + } + + // cribbed from https://github.com/typesafehub/migration-manager/blob/master/sbtplugin/src/main/scala/com/typesafe/tools/mima/plugin/SbtMima.scala + def getPreviousArtifact(m: ModuleID, ivy: IvySbt, s: TaskStreams): File = { + val moduleSettings = InlineConfiguration( + "dummy" % "test" % "version", + ModuleInfo("dummy-test-project-for-resolving"), + dependencies = Seq(m)) + val module = new ivy.Module(moduleSettings) + val report = Deprecated.Inner.ivyUpdate(ivy)(module, s) + val optFile = (for { + config <- report.configurations + module <- config.modules + (artifact, file) <- module.artifacts + // TODO - Hardcode this? + if artifact.name == m.name + } yield file).headOption + optFile getOrElse sys.error("Could not resolve previous artifact: " + m) + } + +} + +// use the SI-7934 workaround to silence a deprecation warning on an sbt API +// we have no choice but to call. on the lack of any suitable alternative, +// see https://gitter.im/sbt/sbt-dev?at=5616e2681b0e279854bd74a4 : +// "it's my intention to eventually come up with a public API" says Eugene Y +object Deprecated { + @deprecated("", "") class Inner { + def ivyUpdate(ivy: IvySbt)(module: ivy.Module, s: TaskStreams) = + IvyActions.update( + module, + new UpdateConfiguration( + retrieve = None, + missingOk = false, + logging = UpdateLogging.DownloadOnly), + s.log) + } + object Inner extends Inner +} diff --git a/project/Osgi.scala b/project/Osgi.scala index d780be2f78ed..b557df1688f3 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -28,8 +28,8 @@ object Osgi { "Bundle-Name" -> bundleName.value, "Bundle-SymbolicName" -> bundleSymbolicName.value, "ver" -> v, - "Export-Package" -> ("*;version=${ver}"), - "Import-Package" -> ("scala.*;version=\"${range;[==,=+);${ver}}\",*"), + "Export-Package" -> "*;version=${ver}", + "Import-Package" -> "scala.*;version=\"${range;[==,=+);${ver}}\",*", "Bundle-Version" -> v, "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.6, JavaSE-1.7", "-eclipse" -> "false" diff --git a/project/Quiet.scala b/project/Quiet.scala index de30ebe6abac..84d01d5544b9 100644 --- a/project/Quiet.scala +++ b/project/Quiet.scala @@ -28,6 +28,4 @@ object Quiet { case x => x } } - - def silenceIvyUpdateInfoLogging = logLevel in update := Level.Warn } diff --git a/project/ScalaTool.scala b/project/ScalaTool.scala index e9531f229eee..5e3f20b1ba61 100644 --- a/project/ScalaTool.scala +++ b/project/ScalaTool.scala @@ -27,12 +27,12 @@ case class ScalaTool(mainClass: String, } else classpath.mkString(":").replace('\\', '/').replaceAll(varRegex, """\${$1}""") val variables = Map( - ("@@" -> "@"), // for backwards compatibility - ("@class@" -> mainClass), - ("@properties@" -> (properties map { case (k, v) => s"""-D$k="$v""""} mkString " ")), - ("@javaflags@" -> javaOpts), - ("@toolflags@" -> toolFlags), - ("@classpath@" -> platformClasspath) + "@@" -> "@", // for backwards compatibility + "@class@" -> mainClass, + "@properties@" -> (properties map { case (k, v) => s"""-D$k="$v""""} mkString " "), + "@javaflags@" -> javaOpts, + "@toolflags@" -> toolFlags, + "@classpath@" -> platformClasspath ) val (from, to) = variables.unzip diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 537990d9853c..efeac95e6de3 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -1,19 +1,32 @@ import sbt._ import Keys._ -import complete.DefaultParsers._ +import BuildSettings.autoImport._ /** Custom commands for use by the Jenkins scripts. This keeps the surface area and call syntax small. */ object ScriptCommands { - def all = Seq(setupPublishCore) + def all = Seq(setupPublishCore, setupValidateTest) /** Set up the environment for `validate/publish-core`. The argument is the Artifactory snapshot repository URL. */ def setupPublishCore = Command.single("setupPublishCore") { case (state, url) => Project.extract(state).append(Seq( - VersionUtil.baseVersionSuffix in Global := "SHA-SNAPSHOT", + baseVersionSuffix in Global := "SHA-SNAPSHOT", // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088): publishTo in Global := Some("scala-pr" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis), publishArtifact in (Compile, packageDoc) in ThisBuild := false, - scalacOptions in Compile in ThisBuild += "-optimise" + scalacOptions in Compile in ThisBuild += "-optimise", + logLevel in ThisBuild := Level.Info, + logLevel in update in ThisBuild := Level.Warn ), state) } + + /** Set up the environment for `validate/test`. The argument is the Artifactory snapshot repository URL. */ + def setupValidateTest = Command.single("setupValidateTest") { case (state, url) => + //TODO When ant is gone, pass starr version as an argument to this command instead of using version.properties + Project.extract(state).append(Seq( + resolvers in Global += "scala-pr" at url, + scalacOptions in Compile in ThisBuild += "-optimise", + logLevel in ThisBuild := Level.Info, + logLevel in update in ThisBuild := Level.Warn + ), state) + } } diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 6c8aebf74fab..4705bbb6ce36 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -1,12 +1,11 @@ import sbt._ import Keys._ import java.util.Properties -import java.io.FileInputStream +import java.io.{File, FileInputStream} import scala.collection.JavaConverters._ +import BuildSettings.autoImport._ object VersionUtil { - lazy val baseVersion = settingKey[String]("The base version number from which all others are derived") - lazy val baseVersionSuffix = settingKey[String]("Identifies the kind of version to build") lazy val copyrightString = settingKey[String]("Copyright string.") lazy val versionProperties = settingKey[Versions]("Version properties.") lazy val generateVersionPropertiesFile = taskKey[File]("Generating version properties file.") @@ -123,4 +122,33 @@ object VersionUtil { /** Get a subproject version number from `versionProps` */ def versionNumber(name: String): String = versionProps(s"$name.version.number") + + /** Build a dependency to a Scala module with the given group and artifact ID */ + def scalaDep(group: String, artifact: String, versionProp: String = null, scope: String = null, compatibility: String = "binary") = { + val vp = if(versionProp eq null) artifact else versionProp + val m = group % (artifact + "_" + versionProps(s"scala.$compatibility.version")) % versionNumber(vp) + val m2 = if(scope eq null) m else m % scope + // exclusion of the scala-library transitive dependency avoids eviction warnings during `update`: + m2.exclude("org.scala-lang", "*") + } + + private def bootstrapOrganization(path: String) = + "org.scala-lang.scala-sha-bootstrap." + path.replace('/', '.') + + /** Build a dependency to a JAR file in the bootstrap repository */ + def bootstrapDep(baseDir: File, path: String, libName: String): ModuleID = { + val sha = IO.read(baseDir / path / s"$libName.jar.desired.sha1").split(' ')(0) + bootstrapOrganization(path) % libName % sha from + s"https://dl.bintray.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap/$sha/$path/$libName.jar" + } + + /** Copy a boostrap dependency JAR that is on the classpath to a file */ + def copyBootstrapJar(cp: Seq[Attributed[File]], baseDir: File, path: String, libName: String): Unit = { + val org = bootstrapOrganization(path) + val resolved = cp.find { a => + val mod = a.get(moduleID.key) + mod.map(_.organization) == Some(org) && mod.map(_.name) == Some(libName) + }.map(_.data).get + IO.copyFile(resolved, baseDir / path / s"$libName.jar") + } } diff --git a/project/plugins.sbt b/project/plugins.sbt index 46203565b483..4c0a6e7b8a3f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,3 +1,6 @@ +scalacOptions ++= Seq("-unchecked", "-feature", /*"-deprecation",*/ + "-Xlint" /*, "-Xfatal-warnings"*/) + libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.3" @@ -15,3 +18,5 @@ buildClasspath := (externalDependencyClasspath in Compile).value.map(_.data).mkS buildInfoKeys := Seq[BuildInfoKey](buildClasspath) buildInfoPackage := "scalabuild" + +libraryDependencies += "com.typesafe" %% "mima-reporter" % "0.1.8" diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core index bb0056722dc0..b0bfd480836a 100755 --- a/scripts/jobs/validate/publish-core +++ b/scripts/jobs/validate/publish-core @@ -16,7 +16,7 @@ case $prDryRun in ;; *) echo ">>> Getting Scala version number." - $SBT_CMD "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile + $SBT_CMD --warn "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile parseScalaProperties buildcharacter.properties # produce maven_version_number echo ">>> Checking availability of Scala ${maven_version_number} in $prRepoUrl." @@ -27,7 +27,7 @@ case $prDryRun in if $libraryAvailable && $reflectAvailable && $compilerAvailable; then echo "Scala core already built!" else - $SBT_CMD "setupPublishCore $prRepoUrl" $antBuildArgs publish + $SBT_CMD --warn "setupPublishCore $prRepoUrl" publish fi mv buildcharacter.properties jenkins.properties # parsed by the jenkins job diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index bedef2e458b9..3cd8af56081b 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -1,17 +1,36 @@ -#!/bin/bash -e +#!/bin/bash -e -v -x + +baseDir=${WORKSPACE-`pwd`} +scriptsDir="$baseDir/scripts" +. $scriptsDir/common case $prDryRun in + yep) echo "DRY RUN" ;; + *) - ./pull-binary-libs.sh # build quick using STARR built upstream, as specified by scalaVersion - # (in that sense it's locker, since it was built with starr by that upstream job) - ant -Dstarr.version=$scalaVersion \ - -Dscalac.args.optimise=-optimise \ - -Dlocker.skip=1 -Dextra.repo.url=$prRepoUrl \ - $testExtraArgs ${testTarget-test.core docs.done} + # (in that sense it's locker, since it was built with starr by that upstream job); + # and run JUnit tests, partest, OSGi tests, MiMa and scaladoc + $SBT_CMD \ + -Dstarr.version=$scalaVersion \ + --warn \ + "setupValidateTest $prRepoUrl" \ + $testExtraArgs \ + "test" \ + "partest run pos neg jvm" \ + "partest res scalap specialized scalacheck" \ + "partest instrumented presentation" \ + "partest --srcpath scaladoc" \ + osgiTestFelix/test \ + osgiTestEclipse/test \ + library/mima \ + reflect/mima \ + doc + ;; -esac \ No newline at end of file + +esac diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala index 64eb1adbea24..a649c175d068 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala @@ -44,8 +44,22 @@ trait MemberLookup extends base.MemberLookupBase { /* Get package object which has associatedFile ne null */ sym.info.member(newTermName("package")) else sym - Option(sym1.associatedFile) flatMap (_.underlyingSource) flatMap { src => - val path = src.canonicalPath + def classpathEntryFor(s: Symbol): Option[String] = { + Option(s.associatedFile).flatMap(_.underlyingSource).map { src => + val path = src.canonicalPath + if(path.endsWith(".class")) { // Individual class file -> Classpath entry is root dir + var nesting = s.ownerChain.count(_.hasPackageFlag) + if(nesting > 0) { + val p = 0.until(nesting).foldLeft(src) { + case (null, _) => null + case (f, _) => f.container + } + if(p eq null) path else p.canonicalPath + } else path + } else path // JAR file (and fallback option) + } + } + classpathEntryFor(sym1) flatMap { path => settings.extUrlMapping get path map { url => LinkToExternal(name, url + "#" + name) } diff --git a/test/files/run/t7843-jsr223-service.check b/test/disabled/run/t7843-jsr223-service.check similarity index 100% rename from test/files/run/t7843-jsr223-service.check rename to test/disabled/run/t7843-jsr223-service.check diff --git a/test/files/run/t7843-jsr223-service.scala b/test/disabled/run/t7843-jsr223-service.scala similarity index 100% rename from test/files/run/t7843-jsr223-service.scala rename to test/disabled/run/t7843-jsr223-service.scala diff --git a/test/files/run/t7933.check b/test/disabled/run/t7933.check similarity index 100% rename from test/files/run/t7933.check rename to test/disabled/run/t7933.check diff --git a/test/files/run/t7933.scala b/test/disabled/run/t7933.scala similarity index 100% rename from test/files/run/t7933.scala rename to test/disabled/run/t7933.scala diff --git a/test/osgi/src/logback.xml b/test/osgi/src/logback.xml new file mode 100644 index 000000000000..692ccbfdd9c3 --- /dev/null +++ b/test/osgi/src/logback.xml @@ -0,0 +1,10 @@ + + + + *** \(%logger{30}\)%green(%X{debugId}) %msg%n + + + + + + diff --git a/test/scaladoc/.gitignore b/test/scaladoc/.gitignore new file mode 100644 index 000000000000..161be5b55fad --- /dev/null +++ b/test/scaladoc/.gitignore @@ -0,0 +1,2 @@ +*.log +*.obj/ diff --git a/test/scaladoc/run/SI-191.scala b/test/scaladoc/run/SI-191.scala index 29b1e7dd29ee..f3d269ceb0e9 100644 --- a/test/scaladoc/run/SI-191.scala +++ b/test/scaladoc/run/SI-191.scala @@ -33,10 +33,14 @@ object Test extends ScaladocModelTest { def scalaURL = "http://bog.us" override def scaladocSettings = { - val scalaLibUri = getClass.getClassLoader.getResource("scala/Function1.class").getPath.split("!")(0) - val scalaLibPath = new URI(scalaLibUri).getPath - val externalArg = s"$scalaLibPath#$scalaURL" - "-no-link-warnings -doc-external-doc " + externalArg + val samplePath = getClass.getClassLoader.getResource("scala/Function1.class").getPath + val scalaLibPath = if(samplePath.contains("!")) { // in scala-library.jar + val scalaLibUri = samplePath.split("!")(0) + new URI(scalaLibUri).getPath + } else { // individual class files on disk + samplePath.replace('\\', '/').dropRight("scala/Function1.class".length) + } + s"-no-link-warnings -doc-external-doc $scalaLibPath#$scalaURL" } def testModel(rootPackage: Package) { diff --git a/test/scaladoc/run/t8557.scala b/test/scaladoc/run/t8557.scala index 451f004d7d43..7876896bb7ea 100644 --- a/test/scaladoc/run/t8557.scala +++ b/test/scaladoc/run/t8557.scala @@ -1,3 +1,5 @@ +import java.net.URI + import scala.tools.nsc.doc.base._ import scala.tools.nsc.doc.model._ import scala.tools.partest.ScaladocModelTest @@ -15,10 +17,22 @@ object Test extends ScaladocModelTest { class A """ + def scalaURL = "http://www.scala-lang.org/api/current/" + // a non-canonical path to scala-library.jar should still work - // this is a bit fragile (depends on the current directory being the root of the repo ; - // ant & partest seem to do that properly) - def scaladocSettings = "-doc-external-doc build/pack/bin/../lib/scala-library.jar#http://www.scala-lang.org/api/current/" + override def scaladocSettings = { + val samplePath = getClass.getClassLoader.getResource("scala/Function1.class").getPath.replace('\\', '/') + val scalaLibPath = if(samplePath.contains("!")) { // in scala-library.jar + val scalaLibUri = samplePath.split("!")(0) + val p = new URI(scalaLibUri).getPath + // this is a bit fragile (depends on the scala library being in build/pack as produced by ant) + p.replace("/pack/lib/scala-library.jar", "/pack/bin/../lib/scala-library.jar") + } else { // individual class files on disk + val p = samplePath.dropRight("scala/Function1.class".length + 1) + p + "/.." + p.takeRight(p.length - p.lastIndexOf('/')) + } + s"-doc-external-doc $scalaLibPath#$scalaURL" + } def testModel(rootPackage: Package) = { // get the quick access implicit defs in scope (_package(s), _class(es), _trait(s), object(s) _method(s), _value(s)) diff --git a/versions.properties b/versions.properties index afea93f666f1..9cfd3598000c 100644 --- a/versions.properties +++ b/versions.properties @@ -36,7 +36,7 @@ jline.version=2.12.1 scala-asm.version=5.0.4-scala-3 # external modules, used internally (not shipped) -partest.version.number=1.0.13 +partest.version.number=1.0.16 scalacheck.version.number=1.11.6 # TODO: modularize the compiler From c2c08a45a99f6ba48d8dc3b7c06d44196e50a2a2 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 14 Jun 2016 16:27:01 +0200 Subject: [PATCH 0128/2793] Remove bnd warnings from log output --- project/Osgi.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Osgi.scala b/project/Osgi.scala index b557df1688f3..c5d4734cab8f 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -28,7 +28,7 @@ object Osgi { "Bundle-Name" -> bundleName.value, "Bundle-SymbolicName" -> bundleSymbolicName.value, "ver" -> v, - "Export-Package" -> "*;version=${ver}", + "Export-Package" -> "*;version=${ver};-split-package:=merge-first", "Import-Package" -> "scala.*;version=\"${range;[==,=+);${ver}}\",*", "Bundle-Version" -> v, "Bundle-RequiredExecutionEnvironment" -> "JavaSE-1.6, JavaSE-1.7", From 8eaa53d57c8d3346e03ccde7b7d4c4c8103d9253 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 16 Jun 2016 00:43:51 -0700 Subject: [PATCH 0129/2793] Avoid triple-quoting triple quotes The boolean test for triples was inadvertently flipped. Adds test for pretty printed multiline strings --- src/reflect/scala/reflect/internal/Printers.scala | 2 +- test/junit/scala/reflect/internal/PrintersTest.scala | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 37b99c4345f6..4ad34ff8c789 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -1050,7 +1050,7 @@ trait Printers extends api.Printers { self: SymbolTable => x match { case Constant(v: String) if { val strValue = x.stringValue - strValue.contains(LF) && strValue.contains("\"\"\"") && strValue.size > 1 + strValue.contains(LF) && !strValue.contains("\"\"\"") && strValue.size > 1 } => val splitValue = x.stringValue.split(s"$LF").toList val multilineStringValue = if (x.stringValue.endsWith(s"$LF")) splitValue :+ "" else splitValue diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index 2305e7ea50de..916f21adc876 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -79,6 +79,14 @@ class BasePrintTest { @Test def testConstantLong = assertTreeCode(Literal(Constant(42l)))("42L") + @Test def testConstantMultiline = assertTreeCode(Literal(Constant("hello\nworld")))("\"\"\"hello\nworld\"\"\"") + + val sq = "\"" + val teq = "\\\"" * 3 + val tq = "\"" * 3 + + @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${sq}${teq}hello${teq}\\nworld${sq}") + @Test def testOpExpr = assertPrintedCode("(5).+(4)", checkTypedTree = false) @Test def testName1 = assertPrintedCode("class test") From 14d3b9e7062662ae55cca02ad653a68aa6aef78d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 16 Jun 2016 09:52:20 -0700 Subject: [PATCH 0130/2793] Refactor triple quote quoting To quote a triple quote, only quote one quote. Refactors the code for legibility. Adds test for other inline cruft like control chars. --- .../scala/reflect/internal/Printers.scala | 30 +++++++++---------- .../scala/reflect/internal/PrintersTest.scala | 12 +++++--- 2 files changed, 23 insertions(+), 19 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 4ad34ff8c789..9602a2859bd6 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -1045,23 +1045,23 @@ trait Printers extends api.Printers { self: SymbolTable => print("") } - case l @ Literal(x) => - import Chars.LF - x match { - case Constant(v: String) if { - val strValue = x.stringValue - strValue.contains(LF) && !strValue.contains("\"\"\"") && strValue.size > 1 - } => - val splitValue = x.stringValue.split(s"$LF").toList - val multilineStringValue = if (x.stringValue.endsWith(s"$LF")) splitValue :+ "" else splitValue - val trQuotes = "\"\"\"" - print(trQuotes); printSeq(multilineStringValue) { print(_) } { print(LF) }; print(trQuotes) - case _ => - // processing Float constants - val printValue = x.escapedStringValue + (if (x.value.isInstanceOf[Float]) "F" else "") - print(printValue) + case Literal(k @ Constant(s: String)) if s.contains(Chars.LF) => + val tq = "\"" * 3 + val lines = s.lines.toList + if (lines.lengthCompare(1) <= 0) print(k.escapedStringValue) + else { + val tqp = """["]{3}""".r + val tqq = """""\\"""" // ""\" is triple-quote quoted + print(tq) + printSeq(lines.map(x => tqp.replaceAllIn(x, tqq)))(print(_))(print(Chars.LF)) + print(tq) } + case Literal(x) => + // processing Float constants + val suffix = x.value match { case _: Float => "F" case _ => "" } + print(s"${x.escapedStringValue}${suffix}") + case an @ Annotated(ap, tree) => val printParentheses = needsParentheses(tree)() parenthesize(printParentheses) { print(tree) }; print(if (tree.isType) " " else ": ") diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index 916f21adc876..38fe205af764 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -79,13 +79,17 @@ class BasePrintTest { @Test def testConstantLong = assertTreeCode(Literal(Constant(42l)))("42L") - @Test def testConstantMultiline = assertTreeCode(Literal(Constant("hello\nworld")))("\"\"\"hello\nworld\"\"\"") - val sq = "\"" - val teq = "\\\"" * 3 val tq = "\"" * 3 + val teq = "\"\"\\\"" + + @Test def testConstantMultiline = assertTreeCode(Literal(Constant("hello\nworld")))(s"${tq}hello\nworld${tq}") + + @Test def testConstantFormfeed = assertTreeCode(Literal(Constant("hello\fworld")))(s"${sq}hello\\fworld${sq}") + + @Test def testConstantControl = assertTreeCode(Literal(Constant("hello\u0003world")))(s"${sq}hello\\03world${sq}") - @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${sq}${teq}hello${teq}\\nworld${sq}") + @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${tq}${teq}hello${teq}\nworld${tq}") @Test def testOpExpr = assertPrintedCode("(5).+(4)", checkTypedTree = false) From 0eac3cb85dd4720b015a0d060691f68b63032b85 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 16 Jun 2016 10:15:56 -0700 Subject: [PATCH 0131/2793] Constant print control in unicode Since octal escape is deprecated, use unicode escape for string representation of constants. --- src/reflect/scala/reflect/internal/Constants.scala | 3 +-- test/files/run/reflection-java-annotations.check | 5 ++--- test/files/run/reflection-java-annotations/Test_2.scala | 4 ++-- test/junit/scala/reflect/internal/PrintersTest.scala | 6 +++++- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala index 85d0efdcba79..7b47798ff725 100644 --- a/src/reflect/scala/reflect/internal/Constants.scala +++ b/src/reflect/scala/reflect/internal/Constants.scala @@ -7,7 +7,6 @@ package scala package reflect package internal -import java.lang.Integer.toOctalString import scala.annotation.switch trait Constants extends api.Constants { @@ -212,7 +211,7 @@ trait Constants extends api.Constants { case '"' => "\\\"" case '\'' => "\\\'" case '\\' => "\\\\" - case _ => if (ch.isControl) "\\0" + toOctalString(ch.toInt) else String.valueOf(ch) + case _ => if (ch.isControl) "\\u%04X".format(ch.toInt) else String.valueOf(ch) } def escapedStringValue: String = { diff --git a/test/files/run/reflection-java-annotations.check b/test/files/run/reflection-java-annotations.check index 67317d20729e..4c20727ea8fd 100644 --- a/test/files/run/reflection-java-annotations.check +++ b/test/files/run/reflection-java-annotations.check @@ -1,4 +1,3 @@ -warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details -List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)) +List(JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = [101, 101], v102 = [102, 102], v103 = ['g', 'g'], v104 = [104, 104], v105 = [105L, 105L], v106 = [106.0, 106.0], v107 = [107.0, 107.0], v108 = [false, true], v11 = classOf[JavaAnnottee_1], v110 = ["hello", "world"], v111 = [classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]], v112 = [FOO, BAR], v113 = [JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\u0017', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)], v12 = FOO, v13 = JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\u0003', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false)) ======= -new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\027', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)), v12 = FOO, v13 = new JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\03', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false) +new JavaComplexAnnotation_1(v1 = 1, v10 = "hello", v101 = Array(101, 101), v102 = Array(102, 102), v103 = Array('g', 'g'), v104 = Array(104, 104), v105 = Array(105L, 105L), v106 = Array(106.0, 106.0), v107 = Array(107.0, 107.0), v108 = Array(false, true), v11 = classOf[JavaAnnottee_1], v110 = Array("hello", "world"), v111 = Array(classOf[JavaSimpleAnnotation_1], classOf[JavaComplexAnnotation_1]), v112 = Array(FOO, BAR), v113 = Array(new JavaSimpleAnnotation_1(v1 = 21, v10 = "world2", v11 = classOf[JavaComplexAnnotation_1], v12 = BAR, v2 = 22, v3 = '\u0017', v4 = 24, v5 = 25L, v6 = 26.0, v7 = 27.0, v8 = false)), v12 = FOO, v13 = new JavaSimpleAnnotation_1(v1 = 11, v10 = "world1", v11 = classOf[JavaSimpleAnnotation_1], v12 = FOO, v2 = 12, v3 = '\r', v4 = 14, v5 = 15L, v6 = 16.0, v7 = 17.0, v8 = false), v2 = 2, v3 = '\u0003', v4 = 4, v5 = 5L, v6 = 6.0, v7 = 7.0, v8 = false) diff --git a/test/files/run/reflection-java-annotations/Test_2.scala b/test/files/run/reflection-java-annotations/Test_2.scala index dec5b45ca74e..6d457ebe64f5 100644 --- a/test/files/run/reflection-java-annotations/Test_2.scala +++ b/test/files/run/reflection-java-annotations/Test_2.scala @@ -2,8 +2,8 @@ object Test extends App { import scala.reflect.runtime.universe._ val sym = typeOf[JavaAnnottee_1].typeSymbol sym.info - sym.annotations foreach (_.javaArgs) + sym.annotations foreach (_.tree.children.tail) println(sym.annotations) println("=======") sym.annotations.map(_.tree).map(println) -} \ No newline at end of file +} diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index 38fe205af764..d581ca8cf432 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -87,7 +87,11 @@ class BasePrintTest { @Test def testConstantFormfeed = assertTreeCode(Literal(Constant("hello\fworld")))(s"${sq}hello\\fworld${sq}") - @Test def testConstantControl = assertTreeCode(Literal(Constant("hello\u0003world")))(s"${sq}hello\\03world${sq}") + @Test def testConstantControl = assertTreeCode(Literal(Constant("hello\u0003world")))(s"${sq}hello\\u0003world${sq}") + + @Test def testConstantFormfeedChar = assertTreeCode(Literal(Constant('\f')))("'\\f'") + + @Test def testConstantControlChar = assertTreeCode(Literal(Constant(3.toChar)))("'\\u0003'") @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${tq}${teq}hello${teq}\nworld${tq}") From a6ce8e3a423b7118eab48b2c94e159e637ace13a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 16 Jun 2016 00:43:51 -0700 Subject: [PATCH 0132/2793] Avoid triple-quoting triple quotes The boolean test for triples was inadvertently flipped. Adds test for pretty printed multiline strings --- src/reflect/scala/reflect/internal/Printers.scala | 2 +- test/junit/scala/reflect/internal/PrintersTest.scala | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index b44c4022f660..9a5314192fff 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -1050,7 +1050,7 @@ trait Printers extends api.Printers { self: SymbolTable => x match { case Constant(v: String) if { val strValue = x.stringValue - strValue.contains(LF) && strValue.contains("\"\"\"") && strValue.size > 1 + strValue.contains(LF) && !strValue.contains("\"\"\"") && strValue.size > 1 } => val splitValue = x.stringValue.split(s"$LF").toList val multilineStringValue = if (x.stringValue.endsWith(s"$LF")) splitValue :+ "" else splitValue diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index 9bfe6eecb8ed..cacff6a01286 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -86,6 +86,14 @@ trait BasePrintTests { @Test def testConstantLong = assertTreeCode(Literal(Constant(42l)))("42L") + @Test def testConstantMultiline = assertTreeCode(Literal(Constant("hello\nworld")))("\"\"\"hello\nworld\"\"\"") + + val sq = "\"" + val teq = "\\\"" * 3 + val tq = "\"" * 3 + + @Test def testConstantEmbeddedTriple = assertTreeCode(Literal(Constant(s"${tq}hello${tq}\nworld")))(s"${sq}${teq}hello${teq}\\nworld${sq}") + @Test def testOpExpr = assertPrintedCode("(5).+(4)", checkTypedTree = false) @Test def testName1 = assertPrintedCode("class test") From cf0390d94e59c637ba83170232864b069b90474e Mon Sep 17 00:00:00 2001 From: Dmitriy Pogretskiy Date: Tue, 14 Jun 2016 15:08:57 +0300 Subject: [PATCH 0133/2793] SI-9817 forall and exists SI-9817 Immutable queue formatting SI-9817 Added comments SI-9817 Comment formatting --- src/library/scala/collection/immutable/Queue.scala | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 3ad6656636d8..1dd0d7683a07 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -84,6 +84,14 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L else if (in.nonEmpty) new Queue(Nil, in.reverse.tail) else throw new NoSuchElementException("tail on empty queue") + /* This is made to avoid inefficient implementation of iterator. */ + override def forall(p: A => Boolean): Boolean = + in.forall(p) && out.forall(p) + + /* This is made to avoid inefficient implementation of iterator. */ + override def exists(p: A => Boolean): Boolean = + in.exists(p) || out.exists(p) + /** Returns the length of the queue. */ override def length = in.length + out.length From ad77623b5d26b9139deb0663bac444217bb61297 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 27 May 2016 17:55:46 +0100 Subject: [PATCH 0134/2793] Make removing forkjoin dependant on the organization key --- build.sbt | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/build.sbt b/build.sbt index 3df0d43c0009..3b0c74a0ee86 100644 --- a/build.sbt +++ b/build.sbt @@ -220,7 +220,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // directly to stdout outputStrategy in run := Some(StdoutOutput), Quiet.silenceScalaBinaryVersionWarning -) +) ++ removePomDependencies /** Extra post-processing for the published POM files. These are needed to create POMs that * are equivalent to the ones from the ANT build. In the long term this should be removed and @@ -249,10 +249,16 @@ def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = { ) ++ extra) } } +val pomDependencyExclusions = + settingKey[Seq[(String, String)]]("List of (groupId, artifactId) pairs to exclude from the POM and ivy.xml") + +pomDependencyExclusions in Global := Nil + /** Remove unwanted dependencies from the POM and ivy.xml. */ -def removePomDependencies(deps: (String, String)*): Seq[Setting[_]] = Seq( +lazy val removePomDependencies: Seq[Setting[_]] = Seq( pomPostProcess := { n => val n2 = pomPostProcess.value.apply(n) + val deps = pomDependencyExclusions.value import scala.xml._ import scala.xml.transform._ new RuleTransformer(new RewriteRule { @@ -270,6 +276,7 @@ def removePomDependencies(deps: (String, String)*): Seq[Setting[_]] = Seq( import scala.xml._ import scala.xml.transform._ val f = deliverLocal.value + val deps = pomDependencyExclusions.value val e = new RuleTransformer(new RewriteRule { override def transform(node: Node) = node match { case e: Elem if e.label == "dependency" && { @@ -366,10 +373,10 @@ lazy val library = configureAsSubproject(project) "/project/name" -> Scala Library, "/project/description" -> Standard library for the Scala Programming Language, "/project/packaging" -> jar - ) + ), + // Remove the dependency on "forkjoin" from the POM because it is included in the JAR: + pomDependencyExclusions += ((organization.value, "forkjoin")) ) - // Remove the dependency on "forkjoin" from the POM because it is included in the JAR: - .settings(removePomDependencies(("org.scala-lang", "forkjoin")): _*) .settings(filterDocSources("*.scala" -- (regexFileFilter(".*/runtime/.*\\$\\.scala") || regexFileFilter(".*/runtime/ScalaRunTime\\.scala") || regexFileFilter(".*/runtime/StringAdd\\.scala"))): _*) @@ -451,12 +458,9 @@ lazy val compiler = configureAsSubproject(project) "/project/description" -> Compiler for the Scala Programming Language, "/project/packaging" -> jar ), - apiURL := None + apiURL := None, + pomDependencyExclusions ++= List(("org.apache.ant", "ant"), ("org.scala-lang.modules", "scala-asm")) ) - .settings(removePomDependencies( - ("org.apache.ant", "ant"), - ("org.scala-lang.modules", "scala-asm") - ): _*) .dependsOn(library, reflect) lazy val interactive = configureAsSubproject(project) From 7f2e6a2f22c97989ac130bfef95284047a29876d Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Tue, 21 Jun 2016 10:40:07 +0200 Subject: [PATCH 0135/2793] doc: capitalize only works on BMP characters --- src/library/scala/collection/immutable/StringLike.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 1b52e40b7235..232d67df4f9e 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -137,6 +137,7 @@ self => /** Returns this string with first character converted to upper case. * If the first character of the string is capitalized, it is returned unchanged. + * This method does not convert characters outside the Basic Multilingual Plane (BMP). */ def capitalize: String = if (toString == null) null From 8c01343908a4cb7a5d9d2432e06097e5f89592b8 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 21 Jun 2016 12:22:58 -0400 Subject: [PATCH 0136/2793] SI-9336 Enable paste detect in jline When the next char is available immediately after a tab, the tab is taken raw instead of invoking completion. --- .../scala/tools/nsc/interpreter/jline/JLineReader.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala index b5db4c209845..0983f24fbb4f 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala @@ -33,11 +33,14 @@ class InteractiveReader(completer: () => Completion) extends interpreter.Interac private val consoleReader = { val reader = new JLineConsoleReader() - reader setPaginationEnabled interpreter.`package`.isPaged + reader setPaginationEnabled interpreter.isPaged - // ASAP + // turn off magic ! reader setExpandEvents false + // enable detecting pasted tab char (when next char is immediately available) which is taken raw, not completion + reader setCopyPasteDetection true + reader setHistory history.asInstanceOf[JHistory] reader From 2d85fe47fddd3759db37e0106920688dcbceb7ef Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 21 Jun 2016 12:07:06 +0200 Subject: [PATCH 0137/2793] Fix test failures of the sbt build in 2.12.x MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - More memory for compiling partest tests - Enable more detailed logging for partest failures: Partest’s `AntRunner` has extra code for logging details of all failed tests at the end of a run which is not present in `SbtRunner` but we can set the `--show-log` and `--show-diff` options to get similar output interspersed with the main test log. - Update OSGi test dependencies for sbt build to same versions as in ant (see 8247b8f077c96ba9a017b53de4d8062fe428c4a7 for the motivation for this change). - Disable MiMa checks (to be reactivated after M5). --- build.sbt | 23 +++++++++++------------ project/ScriptCommands.scala | 1 + 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/build.sbt b/build.sbt index 98ea8f0b3266..20ae42b3e6b1 100644 --- a/build.sbt +++ b/build.sbt @@ -112,7 +112,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( globalVersionSettings baseVersion in Global := "2.12.0" baseVersionSuffix in Global := "SNAPSHOT" -mimaReferenceVersion in Global := Some("2.11.0") +mimaReferenceVersion in Global := None lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]]( organization := "org.scala-lang", @@ -567,11 +567,11 @@ lazy val junit = project.in(file("test") / "junit") lazy val osgiTestFelix = osgiTestProject( project.in(file(".") / "target" / "osgiTestFelix"), - "org.apache.felix" % "org.apache.felix.framework" % "4.4.0") + "org.apache.felix" % "org.apache.felix.framework" % "5.0.1") lazy val osgiTestEclipse = osgiTestProject( project.in(file(".") / "target" / "osgiTestEclipse"), - "org.eclipse.osgi" % "org.eclipse.osgi" % "3.7.1") + "org.eclipse.tycho" % "org.eclipse.osgi" % "3.10.100.v20150521-1310") def osgiTestProject(p: Project, framework: ModuleID) = p .dependsOn(library, reflect, compiler) @@ -583,19 +583,18 @@ def osgiTestProject(p: Project, framework: ModuleID) = p fork in Test := true, parallelExecution in Test := false, libraryDependencies ++= { - val paxExamVersion = "3.5.0" // Last version which supports Java 6 + val paxExamVersion = "4.5.0" // Last version which supports Java 6 Seq( junitDep, junitInterfaceDep, - "org.ops4j.pax.exam" % "pax-exam-container-native" % paxExamVersion - exclude("org.osgi", "org.osgi.core"), // Avoid dragging in a dependency which requires Java >6 - "org.osgi" % "org.osgi.core" % "4.2.0" % "provided", // The framework (Felix / Eclipse) provides the classes + "org.ops4j.pax.exam" % "pax-exam-container-native" % paxExamVersion, "org.ops4j.pax.exam" % "pax-exam-junit4" % paxExamVersion, "org.ops4j.pax.exam" % "pax-exam-link-assembly" % paxExamVersion, - "org.ops4j.pax.url" % "pax-url-aether" % "2.2.0", - "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.0", - "ch.qos.logback" % "logback-core" % "1.1.2", - "ch.qos.logback" % "logback-classic" % "1.1.2", + "org.ops4j.pax.url" % "pax-url-aether" % "2.4.1", + "org.ops4j.pax.swissbox" % "pax-swissbox-tracker" % "1.8.1", + "ch.qos.logback" % "logback-core" % "1.1.3", + "ch.qos.logback" % "logback-classic" % "1.1.3", + "org.slf4j" % "slf4j-api" % "1.7.12", framework % "test" ) }, @@ -658,7 +657,7 @@ lazy val test = project // test sources are compiled in partest run, not here sources in IntegrationTest := Seq.empty, fork in IntegrationTest := true, - javaOptions in IntegrationTest += "-Xmx1G", + javaOptions in IntegrationTest += "-Xmx2G", testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M -XX:MaxPermSize=128M"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index b6565803b49e..e5ff38617eab 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -25,6 +25,7 @@ object ScriptCommands { Project.extract(state).append(Seq( resolvers in Global += "scala-pr" at url, scalacOptions in Compile in ThisBuild += "-opt:l:classpath", + testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")), logLevel in ThisBuild := Level.Info, logLevel in update in ThisBuild := Level.Warn ), state) From d45a9a6a70a8180987c4b65cd36107955d20ec8f Mon Sep 17 00:00:00 2001 From: Martin Olsson Date: Sun, 26 Jun 2016 00:22:08 +0200 Subject: [PATCH 0138/2793] Fix typo in test comment --- test/files/run/t3326.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/files/run/t3326.scala b/test/files/run/t3326.scala index 4ac7ef9138d8..b6b4eac784bb 100644 --- a/test/files/run/t3326.scala +++ b/test/files/run/t3326.scala @@ -19,7 +19,7 @@ import scala.math.Ordering * This is why `collection.SortedMap` used to resort to the generic * `TraversableLike.++` which knows nothing about the ordering. * - * To avoid `collection.SortedMap`s resort to the more generic `TraverableLike.++`, + * To avoid `collection.SortedMap`s resort to the more generic `TraversableLike.++`, * we override the `MapLike.++` overload in `collection.SortedMap` to return * the proper type `SortedMap`. */ From 22dac3118e97b2a4707d42ef1f47ac292a8ed385 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 14 Jun 2016 16:54:08 +0200 Subject: [PATCH 0139/2793] Temporarily insource Scalacheck 1.11.6 This is a temporary measure until we release Scala 2.12.0. It means we are able to release milestones, and RCs of Scala without needing a public release of Scalacheck. While we've never had to wait very long for these in the past (Thanks, Rickard!) we'd like to spare the maintainer some work betwen now and 2.12.0. After we release Scala 2.12.0, we'll revert to a binary dependency on the standard Scalacheck. I have replaced the scala-parser-combinator based command line option parsing with a quick and dirty version. I've had to remove scalacheck as a SBT test framework in our build. We don't use it directly as such (instead, it is used indirectly through `partest --scalacheck`), and it's test discovery (which we expect to return nothing) fails after re-STARR-ing due to an unsolved problem with SBT's testLoader including either STARR or sbt-launch.jar on the classpath used to discover and spawn tests. For the record, I tried the following to no avail: ``` // Two modifications are needed from the stock SBT configuration in order to exclude STARR // from the classloader that performs test discovery. // - We make `isManagedVersion` hold by providing an explicit Scala version, in order to go into the desired // branch in `createTestLoader` // - We remove STARR from the classloader of the scala instance def fixTestLoader = testLoader := { val s = scalaInstance.value val scalaInstance1 = new ScalaInstance(s.version, appConfiguration.value.provider.scalaProvider.loader(), s.libraryJar, s.compilerJar, s.extraJars, Some(s.actualVersion)) assert(scalaInstance1.isManagedVersion) TestFramework.createTestLoader(Attributed.data(fullClasspath.value), scalaInstance1, IO.createUniqueDirectory(taskTemporaryDirectory.value)) } ``` f --- build.sbt | 5 +- build.xml | 21 +- doc/LICENSE.md | 1 + doc/licenses/bsd_scalacheck.txt | 32 + .../scala/org/scalacheck/Arbitrary.scala | 433 ++++++++ .../scala/org/scalacheck/Commands.scala | 146 +++ .../scala/org/scalacheck/Commands2.scala | 150 +++ .../scala/org/scalacheck/Gen.scala | 813 +++++++++++++++ .../scala/org/scalacheck/Prop.scala | 953 ++++++++++++++++++ .../scala/org/scalacheck/Properties.scala | 82 ++ .../org/scalacheck/ScalaCheckFramework.scala | 93 ++ .../scala/org/scalacheck/Shrink.scala | 215 ++++ .../scala/org/scalacheck/Test.scala | 372 +++++++ .../scala/org/scalacheck/util/Buildable.scala | 77 ++ .../org/scalacheck/util/CmdLineParser.scala | 41 + .../org/scalacheck/util/ConsoleReporter.scala | 44 + .../scala/org/scalacheck/util/FreqMap.scala | 65 ++ .../scala/org/scalacheck/util/Pretty.scala | 129 +++ versions.properties | 4 +- 19 files changed, 3653 insertions(+), 23 deletions(-) create mode 100644 doc/licenses/bsd_scalacheck.txt create mode 100644 src/partest-extras/scala/org/scalacheck/Arbitrary.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Commands.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Commands2.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Gen.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Prop.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Properties.scala create mode 100644 src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Shrink.scala create mode 100644 src/partest-extras/scala/org/scalacheck/Test.scala create mode 100644 src/partest-extras/scala/org/scalacheck/util/Buildable.scala create mode 100644 src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala create mode 100644 src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala create mode 100644 src/partest-extras/scala/org/scalacheck/util/FreqMap.scala create mode 100644 src/partest-extras/scala/org/scalacheck/util/Pretty.scala diff --git a/build.sbt b/build.sbt index 20ae42b3e6b1..1d4e208da2e3 100644 --- a/build.sbt +++ b/build.sbt @@ -60,7 +60,6 @@ val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-par val scalaSwingDep = scalaDep("org.scala-lang.modules", "scala-swing") val scalaXmlDep = scalaDep("org.scala-lang.modules", "scala-xml") val partestDep = scalaDep("org.scala-lang.modules", "scala-partest", versionProp = "partest") -val scalacheckDep = scalaDep("org.scalacheck", "scalacheck", scope = "it") // Non-Scala dependencies: val junitDep = "junit" % "junit" % "4.11" @@ -562,6 +561,7 @@ lazy val junit = project.in(file("test") / "junit") fork in Test := true, libraryDependencies ++= Seq(junitDep, junitInterfaceDep, jolDep), testOptions += Tests.Argument(TestFrameworks.JUnit, "-a", "-v"), + testFrameworks -= new TestFramework("org.scalacheck.ScalaCheckFramework"), unmanagedSourceDirectories in Test := List(baseDirectory.value) ) @@ -642,7 +642,7 @@ lazy val test = project .settings(disablePublishing: _*) .settings(Defaults.itSettings: _*) .settings( - libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep, scalacheckDep), + libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep), libraryDependencies ++= { // Resolve the JARs for all test/files/lib/*.jar.desired.sha1 files through Ivy val baseDir = (baseDirectory in ThisBuild).value @@ -659,6 +659,7 @@ lazy val test = project fork in IntegrationTest := true, javaOptions in IntegrationTest += "-Xmx2G", testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), + testFrameworks -= new TestFramework("org.scalacheck.ScalaCheckFramework"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M -XX:MaxPermSize=128M"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), testOptions in IntegrationTest += Tests.Setup { () => diff --git a/build.xml b/build.xml index 519d3597cc6c..6b2c9ade0da4 100644 --- a/build.xml +++ b/build.xml @@ -319,7 +319,6 @@ TODO: - @@ -339,11 +338,6 @@ TODO: - - - - - @@ -567,7 +561,6 @@ TODO: - @@ -577,7 +570,6 @@ TODO: - @@ -922,7 +914,7 @@ TODO: (but not scala-library, so we filter that one out...) so we provide them: scala-[library/reflect/compiler], scalap built here, scala-xml, scala-parser-combinators via external-modules-nocore, - scalacheck as part of `partest.classpath` --> + as part of `partest.classpath` --> @@ -933,17 +925,6 @@ TODO: - - - - - - - - - - - diff --git a/doc/LICENSE.md b/doc/LICENSE.md index a07ba32e0b07..0718c43e05df 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -46,6 +46,7 @@ This license is used by the following third-party libraries: This license is used by the following third-party libraries: * jline + * scalacheck ### [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause) This license is used by the following third-party libraries: diff --git a/doc/licenses/bsd_scalacheck.txt b/doc/licenses/bsd_scalacheck.txt new file mode 100644 index 000000000000..f1920752e0f6 --- /dev/null +++ b/doc/licenses/bsd_scalacheck.txt @@ -0,0 +1,32 @@ +ScalaCheck LICENSE + +Copyright (c) 2007-2013, Rickard Nilsson +All rights reserved. + +Permission to use, copy, modify, and distribute this software in source +or binary form for any purpose with or without fee is hereby granted, +provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of the author nor the names of its contributors + may be used to endorse or promote products derived from this + software without specific prior written permission. + + +THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/src/partest-extras/scala/org/scalacheck/Arbitrary.scala b/src/partest-extras/scala/org/scalacheck/Arbitrary.scala new file mode 100644 index 000000000000..1cbd668f0c34 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Arbitrary.scala @@ -0,0 +1,433 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import util.{FreqMap, Buildable, Buildable2} + + +sealed abstract class Arbitrary[T] { + val arbitrary: Gen[T] +} + +/** Defines implicit [[org.scalacheck.Arbitrary]] instances for common types. + *

+ * ScalaCheck + * uses implicit [[org.scalacheck.Arbitrary]] instances when creating properties + * out of functions with the `Prop.property` method, and when + * the `Arbitrary.arbitrary` method is used. For example, the + * following code requires that there exists an implicit + * `Arbitrary[MyClass]` instance: + *

+ * + * {{{ + * val myProp = Prop.forAll { myClass: MyClass => + * ... + * } + * + * val myGen = Arbitrary.arbitrary[MyClass] + * }}} + * + *

+ * The required implicit definition could look like this: + *

+ * + * {{{ + * implicit val arbMyClass: Arbitrary[MyClass] = Arbitrary(...) + * }}} + * + *

+ * The factory method `Arbitrary(...)` takes a generator of type + * `Gen[T]` and returns an instance of `Arbitrary[T]`. + *

+ * + *

+ * The `Arbitrary` module defines implicit [[org.scalacheck.Arbitrary]] + * instances for common types, for convenient use in your properties and + * generators. + *

+ */ +object Arbitrary { + + import Gen.{const, choose, sized, frequency, oneOf, containerOf, resize} + import collection.{immutable, mutable} + import java.util.Date + + /** Creates an Arbitrary instance */ + def apply[T](g: => Gen[T]): Arbitrary[T] = new Arbitrary[T] { + lazy val arbitrary = g + } + + /** Returns an arbitrary generator for the type T. */ + def arbitrary[T](implicit a: Arbitrary[T]): Gen[T] = a.arbitrary + + /**** Arbitrary instances for each AnyVal ****/ + + /** Arbitrary AnyVal */ + implicit lazy val arbAnyVal: Arbitrary[AnyVal] = Arbitrary(oneOf( + arbitrary[Unit], arbitrary[Boolean], arbitrary[Char], arbitrary[Byte], + arbitrary[Short], arbitrary[Int], arbitrary[Long], arbitrary[Float], + arbitrary[Double] + )) + + /** Arbitrary instance of Boolean */ + implicit lazy val arbBool: Arbitrary[Boolean] = + Arbitrary(oneOf(true, false)) + + /** Arbitrary instance of Int */ + implicit lazy val arbInt: Arbitrary[Int] = Arbitrary( + Gen.chooseNum(Int.MinValue, Int.MaxValue) + ) + + /** Arbitrary instance of Long */ + implicit lazy val arbLong: Arbitrary[Long] = Arbitrary( + Gen.chooseNum(Long.MinValue, Long.MaxValue) + ) + + /** Arbitrary instance of Float */ + implicit lazy val arbFloat: Arbitrary[Float] = Arbitrary( + Gen.chooseNum( + Float.MinValue, Float.MaxValue + // I find that including these by default is a little TOO testy. + // Float.Epsilon, Float.NaN, Float.PositiveInfinity, Float.NegativeInfinity + ) + ) + + /** Arbitrary instance of Double */ + implicit lazy val arbDouble: Arbitrary[Double] = Arbitrary( + Gen.chooseNum( + Double.MinValue / 2, Double.MaxValue / 2 + // As above. Perhaps behind some option? + // Double.Epsilon, Double.NaN, Double.PositiveInfinity, Double.NegativeInfinity + ) + ) + + /** Arbitrary instance of Char */ + implicit lazy val arbChar: Arbitrary[Char] = Arbitrary( + Gen.frequency( + (0xD800-Char.MinValue, Gen.choose[Char](Char.MinValue,0xD800-1)), + (Char.MaxValue-0xDFFF, Gen.choose[Char](0xDFFF+1,Char.MaxValue)) + ) + ) + + /** Arbitrary instance of Byte */ + implicit lazy val arbByte: Arbitrary[Byte] = Arbitrary( + Gen.chooseNum(Byte.MinValue, Byte.MaxValue) + ) + + /** Arbitrary instance of Short */ + implicit lazy val arbShort: Arbitrary[Short] = Arbitrary( + Gen.chooseNum(Short.MinValue, Short.MaxValue) + ) + + /** Absolutely, totally, 100% arbitrarily chosen Unit. */ + implicit lazy val arbUnit: Arbitrary[Unit] = Arbitrary(const(())) + + /**** Arbitrary instances of other common types ****/ + + /** Arbitrary instance of String */ + implicit lazy val arbString: Arbitrary[String] = + Arbitrary(arbitrary[List[Char]] map (_.mkString)) + + /** Arbitrary instance of Date */ + implicit lazy val arbDate: Arbitrary[Date] = Arbitrary(for { + l <- arbitrary[Long] + d = new Date + } yield new Date(d.getTime + l)) + + /** Arbitrary instance of Throwable */ + implicit lazy val arbThrowable: Arbitrary[Throwable] = + Arbitrary(oneOf(const(new Exception), const(new Error))) + + /** Arbitrary instance of Exception */ + implicit lazy val arbException: Arbitrary[Exception] = + Arbitrary(const(new Exception)) + + /** Arbitrary instance of Error */ + implicit lazy val arbError: Arbitrary[Error] = + Arbitrary(const(new Error)) + + /** Arbitrary BigInt */ + implicit lazy val arbBigInt: Arbitrary[BigInt] = { + def chooseBigInt: Gen[BigInt] = + sized((s: Int) => choose(-s, s)) map (x => BigInt(x)) + + def chooseReallyBigInt: Gen[BigInt] = for { + bi <- chooseBigInt + n <- choose(32,128) + } yield bi << n + + Arbitrary( + frequency( + (5, chooseBigInt), + (10, chooseReallyBigInt), + (1, BigInt(0)), + (1, BigInt(1)), + (1, BigInt(-1)), + (1, BigInt(Int.MaxValue) + 1), + (1, BigInt(Int.MinValue) - 1), + (1, BigInt(Long.MaxValue)), + (1, BigInt(Long.MinValue)), + (1, BigInt(Long.MaxValue) + 1), + (1, BigInt(Long.MinValue) - 1) + ) + ) + } + + /** Arbitrary BigDecimal */ + implicit lazy val arbBigDecimal: Arbitrary[BigDecimal] = { + import java.math.MathContext._ + val mcGen = oneOf(UNLIMITED, DECIMAL32, DECIMAL64, DECIMAL128) + val bdGen = for { + x <- arbBigInt.arbitrary + mc <- mcGen + limit <- const(if(mc == UNLIMITED) 0 else math.max(x.abs.toString.length - mc.getPrecision, 0)) + scale <- Gen.chooseNum(Int.MinValue + limit , Int.MaxValue) + } yield { + try { + BigDecimal(x, scale, mc) + } catch { + case ae: java.lang.ArithmeticException => BigDecimal(x, scale, UNLIMITED) // Handle the case where scale/precision conflict + } + } + Arbitrary(bdGen) + } + + /** Arbitrary java.lang.Number */ + implicit lazy val arbNumber: Arbitrary[Number] = { + val gen = Gen.oneOf( + arbitrary[Byte], arbitrary[Short], arbitrary[Int], arbitrary[Long], + arbitrary[Float], arbitrary[Double] + ) + Arbitrary(gen map (_.asInstanceOf[Number])) + // XXX TODO - restore BigInt and BigDecimal + // Arbitrary(oneOf(arbBigInt.arbitrary :: (arbs map (_.arbitrary) map toNumber) : _*)) + } + + /** Generates an arbitrary property */ + implicit lazy val arbProp: Arbitrary[Prop] = { + import Prop._ + val undecidedOrPassed = forAll { b: Boolean => + b ==> true + } + Arbitrary(frequency( + (4, falsified), + (4, passed), + (3, proved), + (3, undecidedOrPassed), + (2, undecided), + (1, exception(null)) + )) + } + + /** Arbitrary instance of test parameters */ + implicit lazy val arbTestParameters: Arbitrary[Test.Parameters] = + Arbitrary(for { + _minSuccTests <- choose(10,200) + _maxDiscardRatio <- choose(0.2f,10f) + _minSize <- choose(0,500) + sizeDiff <- choose(0,500) + _maxSize <- choose(_minSize, _minSize + sizeDiff) + _workers <- choose(1,4) + } yield new Test.Parameters.Default { + override val minSuccessfulTests = _minSuccTests + override val maxDiscardRatio = _maxDiscardRatio + override val minSize = _minSize + override val maxSize = _maxSize + override val workers = _workers + }) + + /** Arbitrary instance of gen params */ + implicit lazy val arbGenParams: Arbitrary[Gen.Parameters] = + Arbitrary(for { + sz <- arbitrary[Int] suchThat (_ >= 0) + } yield (new Gen.Parameters.Default { + override val size = sz + })) + + + // Higher-order types // + + /** Arbitrary instance of [[org.scalacheck.Gen]] */ + implicit def arbGen[T](implicit a: Arbitrary[T]): Arbitrary[Gen[T]] = + Arbitrary(frequency( + (5, arbitrary[T] map (const(_))), + (1, Gen.fail) + )) + + /** Arbitrary instance of the Option type */ + implicit def arbOption[T](implicit a: Arbitrary[T]): Arbitrary[Option[T]] = + Arbitrary(sized(n => + // When n is larger, make it less likely that we generate None, + // but still do it some of the time. When n is zero, we always + // generate None, since it's the smallest value. + frequency( + (n, resize(n / 2, arbitrary[T]).map(Some(_))), + (1, const(None))))) + + /** Arbitrary instance of the Either type */ + implicit def arbEither[T, U](implicit at: Arbitrary[T], au: Arbitrary[U]): Arbitrary[Either[T, U]] = + Arbitrary(oneOf(arbitrary[T].map(Left(_)), arbitrary[U].map(Right(_)))) + + /** Arbitrary instance of any [[org.scalacheck.util.Buildable]] container + * (such as lists, arrays, streams, etc). The maximum size of the container + * depends on the size generation parameter. */ + implicit def arbContainer[C[_],T](implicit + a: Arbitrary[T], b: Buildable[T,C], t: C[T] => Traversable[T] + ): Arbitrary[C[T]] = Arbitrary(containerOf[C,T](arbitrary[T])) + + /** Arbitrary instance of any [[org.scalacheck.util.Buildable2]] container + * (such as maps, etc). The maximum size of the container depends on the size + * generation parameter. */ + implicit def arbContainer2[C[_,_],T,U](implicit + a: Arbitrary[(T,U)], b: Buildable2[T,U,C], t: C[T,U] => Traversable[(T,U)] + ): Arbitrary[C[T,U]] = Arbitrary(containerOf[C,T,U](arbitrary[(T,U)])) + + // Functions // + + /** Arbitrary instance of Function1 */ + implicit def arbFunction1[T1,R](implicit a: Arbitrary[R] + ): Arbitrary[T1 => R] = Arbitrary( + for(r <- arbitrary[R]) yield (t1: T1) => r + ) + + /** Arbitrary instance of Function2 */ + implicit def arbFunction2[T1,T2,R](implicit a: Arbitrary[R] + ): Arbitrary[(T1,T2) => R] = Arbitrary( + for(r <- arbitrary[R]) yield (t1: T1, t2: T2) => r + ) + + /** Arbitrary instance of Function3 */ + implicit def arbFunction3[T1,T2,T3,R](implicit a: Arbitrary[R] + ): Arbitrary[(T1,T2,T3) => R] = Arbitrary( + for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3) => r + ) + + /** Arbitrary instance of Function4 */ + implicit def arbFunction4[T1,T2,T3,T4,R](implicit a: Arbitrary[R] + ): Arbitrary[(T1,T2,T3,T4) => R] = Arbitrary( + for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4) => r + ) + + /** Arbitrary instance of Function5 */ + implicit def arbFunction5[T1,T2,T3,T4,T5,R](implicit a: Arbitrary[R] + ): Arbitrary[(T1,T2,T3,T4,T5) => R] = Arbitrary( + for(r <- arbitrary[R]) yield (t1: T1, t2: T2, t3: T3, t4: T4, t5: T5) => r + ) + + + // Tuples // + + /** Arbitrary instance of 2-tuple */ + implicit def arbTuple2[T1,T2](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2] + ): Arbitrary[(T1,T2)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + } yield (t1,t2)) + + /** Arbitrary instance of 3-tuple */ + implicit def arbTuple3[T1,T2,T3](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3] + ): Arbitrary[(T1,T2,T3)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + } yield (t1,t2,t3)) + + /** Arbitrary instance of 4-tuple */ + implicit def arbTuple4[T1,T2,T3,T4](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4] + ): Arbitrary[(T1,T2,T3,T4)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + t4 <- arbitrary[T4] + } yield (t1,t2,t3,t4)) + + /** Arbitrary instance of 5-tuple */ + implicit def arbTuple5[T1,T2,T3,T4,T5](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5] + ): Arbitrary[(T1,T2,T3,T4,T5)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + t4 <- arbitrary[T4] + t5 <- arbitrary[T5] + } yield (t1,t2,t3,t4,t5)) + + /** Arbitrary instance of 6-tuple */ + implicit def arbTuple6[T1,T2,T3,T4,T5,T6](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5], a6: Arbitrary[T6] + ): Arbitrary[(T1,T2,T3,T4,T5,T6)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + t4 <- arbitrary[T4] + t5 <- arbitrary[T5] + t6 <- arbitrary[T6] + } yield (t1,t2,t3,t4,t5,t6)) + + /** Arbitrary instance of 7-tuple */ + implicit def arbTuple7[T1,T2,T3,T4,T5,T6,T7](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7] + ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + t4 <- arbitrary[T4] + t5 <- arbitrary[T5] + t6 <- arbitrary[T6] + t7 <- arbitrary[T7] + } yield (t1,t2,t3,t4,t5,t6,t7)) + + /** Arbitrary instance of 8-tuple */ + implicit def arbTuple8[T1,T2,T3,T4,T5,T6,T7,T8](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8] + ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + t4 <- arbitrary[T4] + t5 <- arbitrary[T5] + t6 <- arbitrary[T6] + t7 <- arbitrary[T7] + t8 <- arbitrary[T8] + } yield (t1,t2,t3,t4,t5,t6,t7,t8)) + + /** Arbitrary instance of 9-tuple */ + implicit def arbTuple9[T1,T2,T3,T4,T5,T6,T7,T8,T9](implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8], + a9: Arbitrary[T9] + ): Arbitrary[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] = + Arbitrary(for { + t1 <- arbitrary[T1] + t2 <- arbitrary[T2] + t3 <- arbitrary[T3] + t4 <- arbitrary[T4] + t5 <- arbitrary[T5] + t6 <- arbitrary[T6] + t7 <- arbitrary[T7] + t8 <- arbitrary[T8] + t9 <- arbitrary[T9] + } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9)) + +} diff --git a/src/partest-extras/scala/org/scalacheck/Commands.scala b/src/partest-extras/scala/org/scalacheck/Commands.scala new file mode 100644 index 000000000000..5ff3a397e557 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Commands.scala @@ -0,0 +1,146 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +/** See User Guide for usage examples */ +@deprecated("Will be replaced with a new implementation in 1.12.0", "1.11.4") +trait Commands extends Prop { + + /** The abstract state data type. This type must be immutable. + * The state type that encodes the abstract state. The abstract state + * should model all the features we need from the real state, the system + * under test. We should leave out all details that aren't needed for + * specifying our pre- and postconditions. The state type must be called + * State and be immutable. */ + type State <: AnyRef + + class Binding(private val key: State) { + def get: Any = bindings.find(_._1 eq key) match { + case None => sys.error("No value bound") + case Some(x) => x._2 + } + } + + /** Abstract commands are defined as subtypes of the traits Command or SetCommand. + * Each command must have a run method and a method that returns the new abstract + * state, as it should look after the command has been run. + * A command can also define a precondition that states how the current + * abstract state must look if the command should be allowed to run. + * Finally, we can also define a postcondition which verifies that the + * system under test is in a correct state after the command exectution. */ + trait Command { + + /** Used internally. */ + protected[Commands] def run_(s: State) = run(s) + + def run(s: State): Any + def nextState(s: State): State + + /** Returns all preconditions merged into a single function */ + def preCondition: (State => Boolean) = + s => preConditions.toList.forall(_.apply(s)) + + /** A precondition is a function that + * takes the current abstract state as parameter and returns a boolean + * that says if the precondition is fulfilled or not. You can add several + * conditions to the precondition list */ + val preConditions = new collection.mutable.ListBuffer[State => Boolean] + + /** Returns all postconditions merged into a single function */ + def postCondition: (State,State,Any) => Prop = + (s0,s1,r) => Prop.all(postConditions.map(_.apply(s0,s1,r)): _*) + + /** A postcondition is a function that + * takes three parameters, s0, s1 and r. s0 is the abstract state before + * the command was run, s1 is the abstract state after the command was + * run, and r is the result from the command's run + * method. The postcondition function should return a Boolean (or + * a Prop instance) that says if the condition holds or not. You can add several + * conditions to the postConditions list. */ + val postConditions = new collection.mutable.ListBuffer[(State,State,Any) => Prop] + } + + /** A command that binds its result for later use */ + trait SetCommand extends Command { + /** Used internally. */ + protected[Commands] final override def run_(s: State) = { + val r = run(s) + bindings += ((s,r)) + r + } + + final def nextState(s: State) = nextState(s, new Binding(s)) + def nextState(s: State, b: Binding): State + } + + private case class Cmds(cs: List[Command], ss: List[State]) { + override def toString = cs.map(_.toString).mkString(", ") + } + + private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)] + + private def initState() = { + bindings.clear() + initialState() + } + + private def genCmds: Gen[Cmds] = { + def sizedCmds(s: State, sz: Int): Gen[Cmds] = { + if(sz <= 0) Gen.const(Cmds(Nil, Nil)) else for { + c <- genCommand(s) suchThat (_.preCondition(s)) + Cmds(cs,ss) <- sizedCmds(c.nextState(s), sz-1) + } yield Cmds(c::cs, s::ss) + } + + Gen.sized(sz => sizedCmds(initialState(), sz)) + } + + private def validCmds(s: State, cs: List[Command]): Option[Cmds] = + cs match { + case Nil => Some(Cmds(Nil, s::Nil)) + case c::_ if !c.preCondition(s) => None + case c::cmds => for { + Cmds(_, ss) <- validCmds(c.nextState(s), cmds) + } yield Cmds(cs, s::ss) + } + + private def runCommands(cmds: Cmds): Prop = Prop.all { + cmds.cs.indices.map { i => + val (c,s) = (cmds.cs(i), cmds.ss(i)) + c.postCondition(s,c.nextState(s),c.run_(s)) + } : _* + } + + private def commandsProp: Prop = { + def shrinkCmds(cmds: Cmds) = + Shrink.shrink(cmds.cs)(Shrink.shrinkContainer).flatMap { cs => + validCmds(initialState(), cs).toList + } + + Prop.forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _) + } + + def apply(p: Gen.Parameters) = commandsProp(p) + + /** initialState should reset the system under test to a well defined + * initial state, and return the abstract version of that state. */ + def initialState(): State + + /** The command generator. Given an abstract state, the generator + * should return a command that is allowed to run in that state. Note that + * it is still neccessary to define preconditions on the commands if there + * are any. The generator is just giving a hint of which commands that are + * suitable for a given state, the preconditions will still be checked before + * a command runs. Sometimes you maybe want to adjust the distribution of + * your command generator according to the state, or do other calculations + * based on the state. */ + def genCommand(s: State): Gen[Command] + +} diff --git a/src/partest-extras/scala/org/scalacheck/Commands2.scala b/src/partest-extras/scala/org/scalacheck/Commands2.scala new file mode 100644 index 000000000000..67393a7a7055 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Commands2.scala @@ -0,0 +1,150 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +private[scalacheck] trait Commands2 { + + /** The abstract state type. Must be immutable. + * The [[Commands2.State]] type should model the state of the system under test (SUT). + * It should leave out all details that aren't needed for specifying our + * pre- and postconditions. */ + type State + + /** A type representing one instance of the system under test (SUT). + * The [[Commands2.System]] type should be a proxy to the actual system under test. + * It is used in the postconditions to verify that the real system + * behaves according to specification. It should be possible to have + * up to [[Commands2.maxSystemInstanceCount]] co-existing instances of the System + * type, and each System instance should be a proxy to a distinct + * SUT instance. There should be no dependencies between the System + * instances, as they might be used in parallel by ScalaCheck. + * System instances are created by [[Commands2.newSystemInstance]] and destroyed by + * [[Commands2.destroySystemInstance]]. [[Commands2.newSystemInstance]] and + * [[Commands2.destroySystemInstance]] might be called at any time by ScalaCheck, + * as long as [[Commands2.maxSystemInstanceCount]] isn't violated. */ + type System + + /** The maximum number of concurrent [[Commands2.System]] instances allowed to exist. */ + def maxSystemInstanceCount: Int + + /** Should create a new [[Commands2.System]] instance with an internal state that + * corresponds to the provided abstract state instance. The provided state + * is guaranteed to fulfill [[Commands2.initialPreCondition]], and + * [[Commands2.newSystemInstance]] will never be called if there already + * is [[Commands2.maxSystemInstanceCount]] instances of [[Commands2.System]] */ + def newSystemInstance(state: State): System + + /** Should destroy the given SUT, so that a new [[Commands2.System]] instance can be + * created with [[Commands2.newSystemInstance]]. */ + def destroySystemInstance(system: System): Unit + + /** The precondition for the initial state, when no commands yet have + * run. This is used by ScalaCheck when command sequences are shrinked + * and the first state might differ from what is returned from + * [[Commands2.initialState]]. */ + def initialPreCondition(state: State): Boolean + + /** A generator that should produce an initial [[Commands2.State]] instance that is + * usable by [[Commands2.newSystemInstance]] to create a new system under test. + * The state returned by this generator is always checked with the + * [[Commands2.initialPreCondition]] method before it is used. */ + def genInitialState: Gen[State] + + /** A generator that, given the current abstract state, should produce + * a suitable Command instance. */ + def genCommand(state: State): Gen[Command] + + /** Abstract commands are defined as subtypes of the trait [[Commands2.Command]]. + * Each command must have a run method and a method + * that returns the new abstract state, as it is supposed to look after + * the command has been run. A command can also define a precondition + * that defines how the current abstract state must look if the command + * should be allowed to run. Finally, you can also define a postcondition + * that verifies that the system under test is in a correct state after + * the command execution. */ + trait Command { + /** Runs this command in the system under test, + * represented by the provided [[Commands2.System]] instance. This method + * can return any value as result. The returned value will be + * used by the postcondition to decide if the system behaves as + * expected. */ + def run(state: State, system: System): Any + + /** Returns a new abstract [[Commands2.State]] instance that represents the + * state of the system after this command has run. */ + def nextState(state: State): State + + /** The precondition that decides if this command is allowed to run + * when the system under test is in the specified (abstract) state. */ + def preCondition(state: State): Boolean + + /** The postcondition that decides if the system under test behaved + * correctly when the command ran. + * @param s0 The abstract state as it looked before this command ran. + * @param s1 The abstract state as it looked after this command ran. + * @param system The proxy for the system under test. The postcondition + * can query the system for its current state, but care must be taken + * not to mutate the system under test in any way. + * @param result The result returned from the [[Command.run]] method. + */ + def postCondition(s0: State, s1: State, system: System, result: Any): Prop + } + +/* WIP + private case class Cmds(cs: List[Command], ss: List[State]) { + override def toString = cs.map(_.toString).mkString(", ") + } + + private val bindings = new scala.collection.mutable.ListBuffer[(State,Any)] + + private def initState() = { + bindings.clear() + initialState() + } + + private def genCmds: Gen[Cmds] = { + def sizedCmds(s: State, sz: Int): Gen[Cmds] = { + if(sz <= 0) Gen.const(Cmds(Nil, Nil)) else for { + c <- genCommand(s) suchThat (_.preCondition(s)) + Cmds(cs,ss) <- sizedCmds(c.nextState(s), sz-1) + } yield Cmds(c::cs, s::ss) + } + + Gen.sized(sz => sizedCmds(initialState(), sz)) + } + + private def validCmds(s: State, cs: List[Command]): Option[Cmds] = + cs match { + case Nil => Some(Cmds(Nil, s::Nil)) + case c::_ if !c.preCondition(s) => None + case c::cmds => for { + Cmds(_, ss) <- validCmds(c.nextState(s), cmds) + } yield Cmds(cs, s::ss) + } + + private def runCommands(cmds: Cmds): Prop = Prop.all { + cmds.cs.indices.map { i => + val (c,s) = (cmds.cs(i), cmds.ss(i)) + c.postCondition(s,c.nextState(s),c.run_(s)) + } : _* + } + + private def commandsProp: Prop = { + def shrinkCmds(cmds: Cmds) = + Shrink.shrink(cmds.cs)(Shrink.shrinkContainer).flatMap { cs => + validCmds(initialState(), cs).toList + } + + Prop.forAllShrink(genCmds label "COMMANDS", shrinkCmds)(runCommands _) + } + + def apply(p: Prop.Params) = commandsProp(p) +*/ +} diff --git a/src/partest-extras/scala/org/scalacheck/Gen.scala b/src/partest-extras/scala/org/scalacheck/Gen.scala new file mode 100644 index 000000000000..ba82c9ea95d5 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Gen.scala @@ -0,0 +1,813 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import util.{Buildable, Buildable2} +import scala.collection.immutable.TreeMap + +sealed trait Gen[+T] { + + //// Private interface //// + + import Gen.{R, r, gen} + + /** Just an alias */ + private type P = Gen.Parameters + + /** Should be a copy of R.sieve. Used internally in Gen when some generators + * with suchThat-claues are created (when R is not available). This method + * actually breaks covariance, but since this method will only ever be + * called with a value of exactly type T, it is OK. */ + protected def sieveCopy(x: Any): Boolean = true + + private[scalacheck] def doApply(p: P): R[T] + + + //// Public interface //// + + /** A class supporting filtered operations. */ + final class WithFilter(p: T => Boolean) { + def map[U](f: T => U): Gen[U] = Gen.this.suchThat(p).map(f) + def flatMap[U](f: T => Gen[U]): Gen[U] = Gen.this.suchThat(p).flatMap(f) + def withFilter(q: T => Boolean): WithFilter = Gen.this.withFilter(x => p(x) && q(x)) + } + + /** Evaluate this generator with the given parameters */ + def apply(p: Gen.Parameters): Option[T] = doApply(p).retrieve + + /** Create a new generator by mapping the result of this generator */ + def map[U](f: T => U): Gen[U] = gen { p => doApply(p).map(f) } + + /** Create a new generator by flat-mapping the result of this generator */ + def flatMap[U](f: T => Gen[U]): Gen[U] = gen { p => + doApply(p).flatMap(t => f(t).doApply(p)) + } + + /** Create a new generator that uses this generator to produce a value + * that fulfills the given condition. If the condition is not fulfilled, + * the generator fails (returns None). */ + def filter(p: T => Boolean): Gen[T] = suchThat(p) + + /** Creates a non-strict filtered version of this generator. */ + def withFilter(p: T => Boolean): WithFilter = new WithFilter(p) + + /** Create a new generator that uses this generator to produce a value + * that fulfills the given condition. If the condition is not fulfilled, + * the generator fails (returns None). This method is identical to + * [Gen.filter]. */ + def suchThat(f: T => Boolean): Gen[T] = new Gen[T] { + def doApply(p: P) = { + val res = Gen.this.doApply(p) + res.copy(s = { x:T => res.sieve(x) && f(x) }) + } + override def sieveCopy(x: Any) = + try Gen.this.sieveCopy(x) && f(x.asInstanceOf[T]) + catch { case _: java.lang.ClassCastException => false } + } + + /** Create a generator that calls this generator repeatedly until + * the given condition is fulfilled. The generated value is then + * returned. Use this combinator with care, since it may result + * in infinite loops. */ + def retryUntil(p: T => Boolean): Gen[T] = flatMap { t => + if (p(t)) Gen.const(t).suchThat(p) else retryUntil(p) + } + + def sample: Option[T] = doApply(Gen.Parameters.default).retrieve + + /** Returns a new property that holds if and only if both this + * and the given generator generates the same result, or both + * generators generate no result. */ + def ==[U](g: Gen[U]) = Prop { prms => + (doApply(prms).retrieve, g.doApply(prms).retrieve) match { + case (None,None) => Prop.proved(prms) + case (Some(r1),Some(r2)) if r1 == r2 => Prop.proved(prms) + case _ => Prop.falsified(prms) + } + } + + def !=[U](g: Gen[U]) = Prop.forAll(this)(r => Prop.forAll(g)(_ != r)) + + def !==[U](g: Gen[U]) = Prop { prms => + (doApply(prms).retrieve, g.doApply(prms).retrieve) match { + case (None,None) => Prop.falsified(prms) + case (Some(r1),Some(r2)) if r1 == r2 => Prop.falsified(prms) + case _ => Prop.proved(prms) + } + } + + /** Put a label on the generator to make test reports clearer */ + def label(l: String) = new Gen[T] { + def doApply(p: P) = { + val r = Gen.this.doApply(p) + r.copy(l = r.labels + l) + } + override def sieveCopy(x: Any) = Gen.this.sieveCopy(x) + } + + /** Put a label on the generator to make test reports clearer */ + def :|(l: String) = label(l) + + /** Put a label on the generator to make test reports clearer */ + def |:(l: String) = label(l) + + /** Put a label on the generator to make test reports clearer */ + def :|(l: Symbol) = label(l.toString.drop(1)) + + /** Put a label on the generator to make test reports clearer */ + def |:(l: Symbol) = label(l.toString.drop(1)) + +} + +object Gen { + + //// Private interface //// + + import Arbitrary.arbitrary + + /** Just an alias */ + private type P = Parameters + + private[scalacheck] trait R[+T] { + def labels: Set[String] = Set() + def sieve[U >: T]: U => Boolean = _ => true + protected def result: Option[T] + + def retrieve = result.filter(sieve) + + def copy[U >: T]( + l: Set[String] = this.labels, + s: U => Boolean = this.sieve, + r: Option[U] = this.result + ): R[U] = new R[U] { + override val labels = l + override def sieve[V >: U] = { x:Any => + try s(x.asInstanceOf[U]) + catch { case _: java.lang.ClassCastException => false } + } + val result = r + } + + def map[U](f: T => U): R[U] = r(retrieve.map(f)).copy(l = labels) + + def flatMap[U](f: T => R[U]): R[U] = retrieve match { + case None => r(None).copy(l = labels) + case Some(t) => + val r = f(t) + r.copy(l = labels ++ r.labels) + } + } + + private[scalacheck] def r[T](r: Option[T]): R[T] = new R[T] { + val result = r + } + + /** Generator factory method */ + private[scalacheck] def gen[T](f: P => R[T]): Gen[T] = new Gen[T] { + def doApply(p: P) = f(p) + } + + //// Public interface //// + + /** Generator parameters, used by [[org.scalacheck.Gen.apply]] */ + trait Parameters { + + /** The size of the generated value. Generator implementations are allowed + * to freely interpret (or ignore) this value. During test execution, the + * value of this parameter is controlled by [[Test.Parameters.minSize]] and + * [[Test.Parameters.maxSize]]. */ + val size: Int + + /** Create a copy of this [[Gen.Parameters]] instance with + * [[Gen.Parameters.size]] set to the specified value. */ + def withSize(size: Int): Parameters = cp(size = size) + + /** The random number generator used. */ + val rng: scala.util.Random + + /** Create a copy of this [[Gen.Parameters]] instance with + * [[Gen.Parameters.rng]] set to the specified value. */ + def withRng(rng: scala.util.Random): Parameters = cp(rng = rng) + + /** Change the size parameter. + * @deprecated Use [[Gen.Parameters.withSize]] instead. */ + @deprecated("Use withSize instead.", "1.11.2") + def resize(newSize: Int): Parameters = withSize(newSize) + + // private since we can't guarantee binary compatibility for this one + private case class cp( + size: Int = size, + rng: scala.util.Random = rng + ) extends Parameters + } + + /** Provides methods for creating [[org.scalacheck.Gen.Parameters]] values */ + object Parameters { + /** Default generator parameters trait. This can be overriden if you + * need to tweak the parameters. */ + trait Default extends Parameters { + val size: Int = 100 + val rng: scala.util.Random = scala.util.Random + } + + /** Default generator parameters instance. */ + val default: Parameters = new Default {} + } + + /** A wrapper type for range types */ + trait Choose[T] { + /** Creates a generator that returns a value in the given inclusive range */ + def choose(min: T, max: T): Gen[T] + } + + /** Provides implicit [[org.scalacheck.Gen.Choose]] instances */ + object Choose { + + private def chLng(l: Long, h: Long)(p: P): R[Long] = { + if (h < l) r(None) else { + val d = h - l + 1 + if (d <= 0) { + var n = p.rng.nextLong + while (n < l || n > h) { + n = p.rng.nextLong + } + r(Some(n)) + } else { + r(Some(l + math.abs(p.rng.nextLong % d))) + } + } + } + + private def chDbl(l: Double, h: Double)(p: P): R[Double] = { + val d = h-l + if (d < 0 || d > Double.MaxValue) r(None) + else if (d == 0) r(Some(l)) + else r(Some(p.rng.nextDouble * (h-l) + l)) + } + + implicit val chooseLong: Choose[Long] = new Choose[Long] { + def choose(low: Long, high: Long) = + gen(chLng(low,high)).suchThat(x => x >= low && x <= high) + } + implicit val chooseInt: Choose[Int] = new Choose[Int] { + def choose(low: Int, high: Int) = + gen(chLng(low,high)).map(_.toInt).suchThat(x => x >= low && x <= high) + } + implicit val chooseByte: Choose[Byte] = new Choose[Byte] { + def choose(low: Byte, high: Byte) = + gen(chLng(low,high)).map(_.toByte).suchThat(x => x >= low && x <= high) + } + implicit val chooseShort: Choose[Short] = new Choose[Short] { + def choose(low: Short, high: Short) = + gen(chLng(low,high)).map(_.toShort).suchThat(x => x >= low && x <= high) + } + implicit val chooseChar: Choose[Char] = new Choose[Char] { + def choose(low: Char, high: Char) = + gen(chLng(low,high)).map(_.toChar).suchThat(x => x >= low && x <= high) + } + implicit val chooseDouble: Choose[Double] = new Choose[Double] { + def choose(low: Double, high: Double) = + gen(chDbl(low,high)).suchThat(x => x >= low && x <= high) + } + implicit val chooseFloat: Choose[Float] = new Choose[Float] { + def choose(low: Float, high: Float) = + gen(chDbl(low,high)).map(_.toFloat).suchThat(x => x >= low && x <= high) + } + + /** Transform a Choose[T] to a Choose[U] where T and U are two isomorphic types + * whose relationship is described by the provided transformation functions. + * (exponential functor map) */ + def xmap[T, U](from: T => U, to: U => T)(implicit c: Choose[T]): Choose[U] = new Choose[U] { + def choose(low: U, high: U) = + c.choose(to(low), to(high)).map(from) + } + } + + + //// Various Generator Combinators //// + + /** A generator that always generates the given value */ + @deprecated("Use Gen.const instead", "1.11.0") + def value[T](x: T): Gen[T] = const(x) + + /** A generator that always generates the given value */ + implicit def const[T](x: T): Gen[T] = gen(_ => r(Some(x))).suchThat(_ == x) + + /** A generator that never generates a value */ + def fail[T]: Gen[T] = gen(_ => r(None)).suchThat(_ => false) + + /** A generator that generates a random value in the given (inclusive) + * range. If the range is invalid, the generator will not generate + * any value. */ + def choose[T](min: T, max: T)(implicit c: Choose[T]): Gen[T] = + c.choose(min, max) + + /** Sequences generators. If any of the given generators fails, the + * resulting generator will also fail. */ + def sequence[C[_],T](gs: Traversable[Gen[T]])(implicit b: Buildable[T,C]): Gen[C[T]] = { + val g = gen { p => + gs.foldLeft(r(Some(collection.immutable.Vector.empty[T]))) { + case (rs,g) => g.doApply(p).flatMap(r => rs.map(_ :+ r)) + } + } + g.map(b.fromIterable) + } + + /** Sequences generators. If any of the given generators fails, the + * resulting generator will also fail. */ + def sequence[C[_,_],T,U](gs: Traversable[Gen[(T,U)]])(implicit b: Buildable2[T,U,C]): Gen[C[T,U]] = { + val g = gen { p => + gs.foldLeft(r(Some(collection.immutable.Vector.empty[(T,U)]))) { + case (rs,g) => g.doApply(p).flatMap(r => rs.map(_ :+ r)) + } + } + g.map(b.fromIterable) + } + + /** Wraps a generator lazily. The given parameter is only evaluated once, + * and not until the wrapper generator is evaluated. */ + def lzy[T](g: => Gen[T]): Gen[T] = { + lazy val h = g + gen { p => h.doApply(p) } + } + + /** Wraps a generator for later evaluation. The given parameter is + * evaluated each time the wrapper generator is evaluated. */ + def wrap[T](g: => Gen[T]) = gen { p => g.doApply(p) } + + /** Creates a generator that can access its generation parameters */ + def parameterized[T](f: Parameters => Gen[T]) = gen { p => f(p).doApply(p) } + + /** Creates a generator that can access its generation size */ + def sized[T](f: Int => Gen[T]) = gen { p => f(p.size).doApply(p) } + + /** A generator that returns the current generation size */ + lazy val size: Gen[Int] = sized { sz => sz } + + /** Creates a resized version of a generator */ + def resize[T](s: Int, g: Gen[T]) = gen(p => g.doApply(p.withSize(s))) + + /** Picks a random value from a list */ + def oneOf[T](xs: Seq[T]): Gen[T] = + choose(0, xs.size-1).map(xs(_)).suchThat(xs.contains) + + /** Picks a random value from a list */ + def oneOf[T](t0: T, t1: T, tn: T*): Gen[T] = oneOf(t0 +: t1 +: tn) + + /** Picks a random generator from a list */ + def oneOf[T](g0: Gen[T], g1: Gen[T], gn: Gen[T]*): Gen[T] = { + val gs = g0 +: g1 +: gn + choose(0,gs.size-1).flatMap(gs(_)).suchThat(x => gs.exists(_.sieveCopy(x))) + } + + /** Makes a generator result optional. Either `Some(T)` or `None` will be provided. */ + def option[T](g: Gen[T]): Gen[Option[T]] = + oneOf[Option[T]](g.map(Some.apply), None) + + /** Chooses one of the given generators with a weighted random distribution */ + def frequency[T](gs: (Int,Gen[T])*): Gen[T] = { + gs.filter(_._1 > 0) match { + case Nil => fail + case filtered => + var tot = 0l + val tree: TreeMap[Long, Gen[T]] = { + val builder = TreeMap.newBuilder[Long, Gen[T]] + filtered.foreach { + case (f, v) => + tot += f + builder.+=((tot, v)) + } + builder.result() + } + choose(1L, tot).flatMap(r => tree.from(r).head._2).suchThat { x => + gs.exists(_._2.sieveCopy(x)) + } + } + } + + /** Implicit convenience method for using the `frequency` method + * like this: + * {{{ + * frequency((1, "foo"), (3, "bar")) + * }}} + */ + implicit def freqTuple[T](t: (Int,T)): (Int,Gen[T]) = (t._1, const(t._2)) + + + //// List Generators //// + + /** Generates a container of any Traversable type for which there exists an + * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the + * container will be generated by the given generator. The size of the + * generated container is limited by `n`. Depending on what kind of container + * that is generated, the resulting container may contain fewer elements than + * `n`, but not more. If the given generator fails generating a value, the + * complete container generator will also fail. */ + def containerOfN[C[_],T](n: Int, g: Gen[T])(implicit + evb: Buildable[T,C], evt: C[T] => Traversable[T] + ): Gen[C[T]] = + sequence[C,T](Traversable.fill(n)(g)) suchThat { c => + // TODO: Can we guarantee c.size == n (See issue #89)? + c.forall(g.sieveCopy) + } + + /** Generates a container of any Traversable type for which there exists an + * implicit [[org.scalacheck.util.Buildable]] instance. The elements in the + * container will be generated by the given generator. The size of the + * container is bounded by the size parameter used when generating values. */ + def containerOf[C[_],T](g: Gen[T])(implicit + evb: Buildable[T,C], evt: C[T] => Traversable[T] + ): Gen[C[T]] = + sized(s => choose(0,s).flatMap(containerOfN[C,T](_,g))) suchThat { c => + c.forall(g.sieveCopy) + } + + /** Generates a non-empty container of any Traversable type for which there + * exists an implicit [[org.scalacheck.util.Buildable]] instance. The + * elements in the container will be generated by the given generator. The + * size of the container is bounded by the size parameter used when + * generating values. */ + def nonEmptyContainerOf[C[_],T](g: Gen[T])(implicit + evb: Buildable[T,C], evt: C[T] => Traversable[T] + ): Gen[C[T]] = + sized(s => choose(1,s).flatMap(containerOfN[C,T](_,g))) suchThat { c => + c.size > 0 && c.forall(g.sieveCopy) + } + + /** Generates a non-empty container of any Traversable type for which there + * exists an implicit [[org.scalacheck.util.Buildable]] instance. The + * elements in the container will be generated by the given generator. The + * size of the container is bounded by the size parameter used when + * generating values. */ + @deprecated("Use Gen.nonEmptyContainerOf instead", "1.11.0") + def containerOf1[C[_],T](g: Gen[T])(implicit + evb: Buildable[T,C], evt: C[T] => Traversable[T] + ): Gen[C[T]] = nonEmptyContainerOf[C,T](g) + + /** Generates a container of any Traversable type for which there exists an + * implicit [[org.scalacheck.util.Buildable2]] instance. The elements in + * container will be generated by the given generator. The size of the + * generated container is limited by `n`. Depending on what kind of container + * that is generated, the resulting container may contain fewer elements than + * `n`, but not more. If the given generator fails generating a value, the + * complete container generator will also fail. */ + def containerOfN[C[_,_],T,U](n: Int, g: Gen[(T,U)])(implicit + evb: Buildable2[T,U,C], evt: C[T,U] => Traversable[(T,U)] + ): Gen[C[T,U]] = + sequence[C,T,U](Traversable.fill(n)(g)).suchThat { c => + // TODO: Can we guarantee c.size == n (See issue #89)? + c.forall(g.sieveCopy) + } + + /** Generates a container of any Traversable type for which there exists + * an implicit Buildable2 instance. The elements in the + * container will be generated by the given generator. The size of the + * container is bounded by the size parameter used when generating values. */ + def containerOf[C[_,_],T,U](g: Gen[(T,U)])(implicit + evb: Buildable2[T,U,C], evt: C[T,U] => Traversable[(T,U)] + ): Gen[C[T,U]] = + sized(s => choose(0,s).flatMap(containerOfN[C,T,U](_,g))) suchThat { c => + c.forall(g.sieveCopy) + } + + /** Generates a non-empty container of any type for which there exists an + * implicit Buildable2 instance. The elements in the container + * will be generated by the given generator. The size of the container is + * bounded by the size parameter used when generating values. */ + def nonEmptyContainerOf[C[_,_],T,U](g: Gen[(T,U)])(implicit + evb: Buildable2[T,U,C], evt: C[T,U] => Traversable[(T,U)] + ): Gen[C[T,U]] = + sized(s => choose(1,s).flatMap(containerOfN[C,T,U](_,g))) suchThat { c => + c.size > 0 && c.forall(g.sieveCopy) + } + + /** Generates a list of random length. The maximum length depends on the + * size parameter. This method is equal to calling + * `containerOf[List,T](g)`. */ + def listOf[T](g: => Gen[T]) = containerOf[List,T](g) + + /** Generates a non-empty list of random length. The maximum length depends + * on the size parameter. This method is equal to calling + * `nonEmptyContainerOf[List,T](g)`. */ + def nonEmptyListOf[T](g: => Gen[T]) = nonEmptyContainerOf[List,T](g) + + /** Generates a non-empty list of random length. The maximum length depends + * on the size parameter. This method is equal to calling + * `nonEmptyContainerOf[List,T](g)`. */ + @deprecated("Use Gen.nonEmptyListOf instead", "1.11.0") + def listOf1[T](g: => Gen[T]) = nonEmptyListOf[T](g) + + /** Generates a list of the given length. This method is equal to calling + * `containerOfN[List,T](n,g)`. */ + def listOfN[T](n: Int, g: Gen[T]) = containerOfN[List,T](n,g) + + /** Generates a map of random length. The maximum length depends on the + * size parameter. This method is equal to calling + * containerOf[Map,T,U](g). */ + def mapOf[T,U](g: => Gen[(T,U)]) = containerOf[Map,T,U](g) + + /** Generates a non-empty map of random length. The maximum length depends + * on the size parameter. This method is equal to calling + * nonEmptyContainerOf[Map,T,U](g). */ + def nonEmptyMap[T,U](g: => Gen[(T,U)]) = nonEmptyContainerOf[Map,T,U](g) + + /** Generates a map of with at least the given number of elements. This method + * is equal to calling containerOfN[Map,T,U](n,g). */ + def mapOfN[T,U](n: Int, g: Gen[(T,U)]) = containerOfN[Map,T,U](n,g) + + /** A generator that picks a random number of elements from a list */ + def someOf[T](l: Iterable[T]) = choose(0,l.size).flatMap(pick(_,l)) + + /** A generator that picks a random number of elements from a list */ + def someOf[T](g1: Gen[T], g2: Gen[T], gs: Gen[T]*) = + choose(0, gs.length+2).flatMap(pick(_, g1, g2, gs: _*)) + + /** A generator that picks a given number of elements from a list, randomly */ + def pick[T](n: Int, l: Iterable[T]): Gen[Seq[T]] = + if(n > l.size || n < 0) fail + else (gen { p => + val b = new collection.mutable.ListBuffer[T] + b ++= l + while(b.length > n) b.remove(choose(0, b.length-1).doApply(p).retrieve.get) + r(Some(b)) + }).suchThat(_.forall(x => l.exists(x == _))) + + /** A generator that picks a given number of elements from a list, randomly */ + def pick[T](n: Int, g1: Gen[T], g2: Gen[T], gn: Gen[T]*): Gen[Seq[T]] = { + val gs = g1 +: g2 +: gn + pick(n, 0 until gs.size).flatMap(idxs => + sequence[List,T](idxs.toList.map(gs(_))) + ).suchThat(_.forall(x => gs.exists(_.sieveCopy(x)))) + } + + + //// Character Generators //// + + /** Generates a numerical character */ + def numChar: Gen[Char] = choose(48.toChar, 57.toChar) + + /** Generates an upper-case alpha character */ + def alphaUpperChar: Gen[Char] = choose(65.toChar, 90.toChar) + + /** Generates a lower-case alpha character */ + def alphaLowerChar: Gen[Char] = choose(97.toChar, 122.toChar) + + /** Generates an alpha character */ + def alphaChar = frequency((1,alphaUpperChar), (9,alphaLowerChar)) + + /** Generates an alphanumerical character */ + def alphaNumChar = frequency((1,numChar), (9,alphaChar)) + + + //// String Generators //// + + /** Generates a string that starts with a lower-case alpha character, + * and only contains alphanumerical characters */ + def identifier: Gen[String] = (for { + c <- alphaLowerChar + cs <- listOf(alphaNumChar) + } yield (c::cs).mkString).suchThat(_.forall(c => c.isLetter || c.isDigit)) + + /** Generates a string of alpha characters */ + def alphaStr: Gen[String] = + listOf(alphaChar).map(_.mkString).suchThat(_.forall(_.isLetter)) + + /** Generates a string of digits */ + def numStr: Gen[String] = + listOf(numChar).map(_.mkString).suchThat(_.forall(_.isDigit)) + + + //// Number Generators //// + + /** Generates positive numbers of uniform distribution, with an + * upper bound of the generation size parameter. */ + def posNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = { + import num._ + sized(max => c.choose(one, fromInt(max))) + } + + /** Generates negative numbers of uniform distribution, with an + * lower bound of the negated generation size parameter. */ + def negNum[T](implicit num: Numeric[T], c: Choose[T]): Gen[T] = { + import num._ + sized(max => c.choose(-fromInt(max), -one)) + } + + /** Generates numbers within the given inclusive range, with + * extra weight on zero, +/- unity, both extremities, and any special + * numbers provided. The special numbers must lie within the given range, + * otherwise they won't be included. */ + def chooseNum[T](minT: T, maxT: T, specials: T*)( + implicit num: Numeric[T], c: Choose[T] + ): Gen[T] = { + import num._ + val basics = List(minT, maxT, zero, one, -one) + val basicsAndSpecials = for { + t <- specials ++ basics if t >= minT && t <= maxT + } yield (1, const(t)) + val allGens = basicsAndSpecials ++ List( + (basicsAndSpecials.length, c.choose(minT, maxT)) + ) + frequency(allGens: _*) + } + + /** Generates a version 4 (random) UUID. */ + lazy val uuid: Gen[java.util.UUID] = for { + l1 <- Gen.choose(Long.MinValue, Long.MaxValue) + l2 <- Gen.choose(Long.MinValue, Long.MaxValue) + y <- Gen.oneOf('8', '9', 'a', 'b') + } yield java.util.UUID.fromString( + new java.util.UUID(l1,l2).toString.updated(14, '4').updated(19, y) + ) + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2](g1: Gen[T1], g2: Gen[T2]): Gen[(T1,T2)] = { + val g = for { + t1 <- g1; t2 <- g2 + } yield (t1,t2) + g.suchThat { case (t1,t2) => g1.sieveCopy(t1) && g2.sieveCopy(t2) } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3]): Gen[(T1,T2,T3)] = { + val g0 = zip(g1,g2) + val g = for { + (t1,t2) <- g0; t3 <- g3 + } yield (t1,t2,t3) + g.suchThat { case (t1,t2,t3) => g0.sieveCopy(t1,t2) && g3.sieveCopy(t3) } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3,T4](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4] + ): Gen[(T1,T2,T3,T4)] = { + val g0 = zip(g1,g2,g3) + val g = for { + (t1,t2,t3) <- g0; t4 <- g4 + } yield (t1,t2,t3,t4) + g.suchThat { case (t1,t2,t3,t4) => g0.sieveCopy(t1,t2,t3) && g4.sieveCopy(t4) } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3,T4,T5](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], + g5: Gen[T5] + ): Gen[(T1,T2,T3,T4,T5)] = { + val g0 = zip(g1,g2,g3,g4) + val g = for { + (t1,t2,t3,t4) <- g0; t5 <- g5 + } yield (t1,t2,t3,t4,t5) + g.suchThat { case (t1,t2,t3,t4,t5) => + g0.sieveCopy(t1,t2,t3,t4) && g5.sieveCopy(t5) + } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3,T4,T5,T6](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], + g5: Gen[T5], g6: Gen[T6] + ): Gen[(T1,T2,T3,T4,T5,T6)] = { + val g0 = zip(g1,g2,g3,g4,g5) + val g = for { + (t1,t2,t3,t4,t5) <- g0; t6 <- g6 + } yield (t1,t2,t3,t4,t5,t6) + g.suchThat { case (t1,t2,t3,t4,t5,t6) => + g0.sieveCopy(t1,t2,t3,t4,t5) && g6.sieveCopy(t6) + } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3,T4,T5,T6,T7](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], + g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7] + ): Gen[(T1,T2,T3,T4,T5,T6,T7)] = { + val g0 = zip(g1,g2,g3,g4,g5,g6) + val g = for { + (t1,t2,t3,t4,t5,t6) <- g0; t7 <- g7 + } yield (t1,t2,t3,t4,t5,t6,t7) + g.suchThat { case (t1,t2,t3,t4,t5,t6,t7) => + g0.sieveCopy(t1,t2,t3,t4,t5,t6) && g7.sieveCopy(t7) + } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3,T4,T5,T6,T7,T8](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], + g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8] + ): Gen[(T1,T2,T3,T4,T5,T6,T7,T8)] = { + val g0 = zip(g1,g2,g3,g4,g5,g6,g7) + val g = for { + (t1,t2,t3,t4,t5,t6,t7) <- g0; t8 <- g8 + } yield (t1,t2,t3,t4,t5,t6,t7,t8) + g.suchThat { case (t1,t2,t3,t4,t5,t6,t7,t8) => + g0.sieveCopy(t1,t2,t3,t4,t5,t6,t7) && g8.sieveCopy(t8) + } + } + + /** Combines the given generators into one generator that produces a + * tuple of their generated values. */ + def zip[T1,T2,T3,T4,T5,T6,T7,T8,T9](g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], + g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8], g9: Gen[T9] + ): Gen[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] = { + val g0 = zip(g1,g2,g3,g4,g5,g6,g7,g8) + val g = for { + (t1,t2,t3,t4,t5,t6,t7,t8) <- g0; t9 <- g9 + } yield (t1,t2,t3,t4,t5,t6,t7,t8,t9) + g.suchThat { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) => + g0.sieveCopy(t1,t2,t3,t4,t5,t6,t7,t8) && g9.sieveCopy(t9) + } + } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T,R](f: T => R)(implicit a: Arbitrary[T]): Gen[R] = + arbitrary[T] map f + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,R](f: (T1,T2) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2] + ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2)) } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,R](f: (T1,T2,T3) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3] + ): Gen[R] = arbitrary[T1] flatMap { t => resultOf(f(t, _:T2, _:T3)) } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,T4,R](f: (T1,T2,T3,T4) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4] + ): Gen[R] = arbitrary[T1] flatMap { + t => resultOf(f(t, _:T2, _:T3, _:T4)) + } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,T4,T5,R](f: (T1,T2,T3,T4,T5) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5] + ): Gen[R] = arbitrary[T1] flatMap { + t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5)) + } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,T4,T5,T6,R]( + f: (T1,T2,T3,T4,T5,T6) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], + a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6] + ): Gen[R] = arbitrary[T1] flatMap { + t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6)) + } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,T4,T5,T6,T7,R]( + f: (T1,T2,T3,T4,T5,T6,T7) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], + a4: Arbitrary[T4], a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7] + ): Gen[R] = arbitrary[T1] flatMap { + t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7)) + } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,R]( + f: (T1,T2,T3,T4,T5,T6,T7,T8) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8] + ): Gen[R] = arbitrary[T1] flatMap { + t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8)) + } + + /** Takes a function and returns a generator that generates arbitrary + * results of that function by feeding it with arbitrarily generated input + * parameters. */ + def resultOf[T1,T2,T3,T4,T5,T6,T7,T8,T9,R]( + f: (T1,T2,T3,T4,T5,T6,T7,T8,T9) => R)(implicit + a1: Arbitrary[T1], a2: Arbitrary[T2], a3: Arbitrary[T3], a4: Arbitrary[T4], + a5: Arbitrary[T5], a6: Arbitrary[T6], a7: Arbitrary[T7], a8: Arbitrary[T8], + a9: Arbitrary[T9] + ): Gen[R] = arbitrary[T1] flatMap { + t => resultOf(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8, _:T9)) + } +} diff --git a/src/partest-extras/scala/org/scalacheck/Prop.scala b/src/partest-extras/scala/org/scalacheck/Prop.scala new file mode 100644 index 000000000000..6b607002fd22 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Prop.scala @@ -0,0 +1,953 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import util.{Pretty, FreqMap, Buildable, ConsoleReporter} +import scala.annotation.tailrec + +trait Prop { + + import Prop.{Result, Proof, True, False, Exception, Undecided, + provedToTrue, secure, mergeRes} + import Gen.Parameters + + def apply(prms: Parameters): Result + + def map(f: Result => Result): Prop = Prop(prms => f(this(prms))) + + def flatMap(f: Result => Prop): Prop = Prop(prms => f(this(prms))(prms)) + + // TODO In 1.12.0, make p call-by-name, and remove the calls to secure() + // in the methods that use combine() + def combine(p: Prop)(f: (Result, Result) => Result) = + for(r1 <- this; r2 <- p) yield f(r1,r2) + + /** Convenience method that checks this property with the given parameters + * and reports the result on the console. */ + def check(prms: Test.Parameters): Unit = Test.check( + if(prms.testCallback.isInstanceOf[ConsoleReporter]) prms + else prms.withTestCallback(prms.testCallback.chain(ConsoleReporter(1))), + this + ) + + /** Convenience method that checks this property and reports the + * result on the console. The default test parameters + * ([[Test.Parameters.default]]) are used for the check. */ + def check: Unit = check(Test.Parameters.default) + + /** Convenience method that checks this property and reports the result + * on the console. The provided argument should be a function that takes + * the default test parameters ([[Test.Parameters.default]]) + * as input and outputs a modified [[Test.Parameters]] instance that + * Example use: + * + * {{{ + * p.check(_.withMinSuccessfulTests(500)) + + * p.check { _. + * withMinSuccessfulTests(80000). + * withWorkers(4) + * } + * }}} + */ + def check(paramFun: Test.Parameters => Test.Parameters): Unit = check( + paramFun(Test.Parameters.default) + ) + + /** Convenience method that checks this property with specified minimal + * number of successful test and the given testing parameters, and + * reports the result on the console. If you need to get the results + * from the test use the `check` methods in [[org.scalacheck.Test]] + * instead. */ + @deprecated("Use check(prms.withMinSuccessfulTests(n)) instead", "1.11.2") + def check(minSuccessfulTests: Int, prms: Test.Parameters): Unit = check( + prms.withMinSuccessfulTests(minSuccessfulTests) + ) + + /** Convenience method that checks this property with specified minimal + * number of successful test and reports the result on the console. + * If you need to get the results from the test use + * the `check` methods in [[org.scalacheck.Test]] instead. */ + @deprecated("Use check(_.withMinSuccessfulTests(n)) instead", "1.11.2") + def check(minSuccessfulTests: Int): Unit = check( + _.withMinSuccessfulTests(minSuccessfulTests) + ) + + /** The logic for main, separated out to make it easier to + * avoid System.exit calls. Returns exit code. + */ + def mainRunner(args: Array[String]): Int = { + Test.parseParams(args) match { + case Some(params) => + if (Test.check(params, this).passed) 0 + else 1 + case None => + println("Incorrect options") + -1 + } + } + + /** Whether main should call System.exit with an exit code. + * Defaults to true; override to change. */ + def mainCallsExit = true + + /** Convenience method that makes it possible to use this property + * as an application that checks itself on execution */ + def main(args: Array[String]): Unit = { + val code = mainRunner(args) + if (mainCallsExit && code != 0) + System exit code + } + + /** Returns a new property that holds if and only if both this + * and the given property hold. If one of the properties doesn't + * generate a result, the new property will generate false. */ + def &&(p: => Prop) = combine(secure(p))(_ && _) + + /** Returns a new property that holds if either this + * or the given property (or both) hold. */ + def ||(p: => Prop) = combine(secure(p))(_ || _) + + /** Returns a new property that holds if and only if both this + * and the given property hold. If one of the properties doesn't + * generate a result, the new property will generate the same result + * as the other property. */ + def ++(p: => Prop): Prop = combine(secure(p))(_ ++ _) + + /** Combines two properties through implication */ + def ==>(p: => Prop): Prop = flatMap { r1 => + if(r1.proved) p map { r2 => mergeRes(r1,r2,r2.status) } + else if(!r1.success) Prop(r1.copy(status = Undecided)) + else p map { r2 => provedToTrue(mergeRes(r1,r2,r2.status)) } + } + + /** Returns a new property that holds if and only if both this + * and the given property generates a result with the exact + * same status. Note that this means that if one of the properties is + * proved, and the other one passed, then the resulting property + * will fail. */ + def ==(p: => Prop) = this.flatMap { r1 => + p.map { r2 => + mergeRes(r1, r2, if(r1.status == r2.status) True else False) + } + } + + override def toString = "Prop" + + /** Put a label on the property to make test reports clearer */ + def label(l: String) = map(_.label(l)) + + /** Put a label on the property to make test reports clearer */ + def :|(l: String) = label(l) + + /** Put a label on the property to make test reports clearer */ + def |:(l: String) = label(l) + + /** Put a label on the property to make test reports clearer */ + def :|(l: Symbol) = label(l.toString.drop(1)) + + /** Put a label on the property to make test reports clearer */ + def |:(l: Symbol) = label(l.toString.drop(1)) + +} + +object Prop { + + import Gen.{value, fail, frequency, oneOf, Parameters} + import Arbitrary.{arbitrary} + import Shrink.{shrink} + + // Types + + /** A property argument */ + case class Arg[+T]( + label: String, + arg: T, + shrinks: Int, + origArg: T, + prettyArg: Pretty, + prettyOrigArg: Pretty + ) + + object Result { + @deprecated("Will be removed in 1.12.0", "1.11.2") + def apply(st: Status): Result = Result(status = st) + @deprecated("Will be removed in 1.12.0", "1.11.2") + def merge(x: Result, y: Result, status: Status) = mergeRes(x,y,status) + } + + private[scalacheck] def mergeRes(x: Result, y: Result, st: Status) = Result( + status = st, + args = x.args ++ y.args, + collected = x.collected ++ y.collected, + labels = x.labels ++ y.labels + ) + + /** The result of evaluating a property */ + case class Result( + status: Status, + args: List[Arg[Any]] = Nil, + collected: Set[Any] = Set.empty, + labels: Set[String] = Set.empty + ) { + def success = status match { + case True => true + case Proof => true + case _ => false + } + + def failure = status match { + case False => true + case Exception(_) => true + case _ => false + } + + def proved = status == Proof + + def addArg(a: Arg[Any]) = copy(args = a::args) + + def collect(x: Any) = copy(collected = collected+x) + + def label(l: String) = copy(labels = labels+l) + + def &&(r: Result) = (this.status, r.status) match { + case (Exception(_),_) => this + case (_,Exception(_)) => r + + case (False,_) => this + case (_,False) => r + + case (Undecided,_) => this + case (_,Undecided) => r + + case (_,Proof) => mergeRes(this, r, this.status) + case (Proof,_) => mergeRes(this, r, r.status) + + case (True,True) => mergeRes(this, r, True) + } + + def ||(r: Result) = (this.status, r.status) match { + case (Exception(_),_) => this + case (_,Exception(_)) => r + + case (False,False) => mergeRes(this, r, False) + case (False,_) => r + case (_,False) => this + + case (Proof,_) => this + case (_,Proof) => r + + case (True,_) => this + case (_,True) => r + + case (Undecided,Undecided) => mergeRes(this, r, Undecided) + } + + def ++(r: Result) = (this.status, r.status) match { + case (Exception(_),_) => this + case (_,Exception(_)) => r + + case (_, Undecided) => this + case (Undecided, _) => r + + case (_, Proof) => this + case (Proof, _) => r + + case (_, True) => this + case (True, _) => r + + case (False, _) => this + case (_, False) => r + } + + def ==>(r: Result) = (this.status, r.status) match { + case (Exception(_),_) => this + case (_,Exception(_)) => r + + case (False,_) => mergeRes(this, r, Undecided) + + case (Undecided,_) => this + + case (Proof,_) => mergeRes(this, r, r.status) + case (True,_) => mergeRes(this, r, r.status) + } + } + + sealed trait Status + + /** The property was proved */ + case object Proof extends Status + + /** The property was true */ + case object True extends Status + + /** The property was false */ + case object False extends Status + + /** The property could not be falsified or proved */ + case object Undecided extends Status + + /** Evaluating the property raised an exception */ + sealed case class Exception(e: Throwable) extends Status { + override def equals(o: Any) = o match { + case Exception(_) => true + case _ => false + } + } + + /** Create a new property from the given function. */ + def apply(f: Parameters => Result): Prop = new Prop { + def apply(prms: Parameters) = try f(prms) catch { + case e: Throwable => Result(status = Exception(e)) + } + } + + /** Create a property that returns the given result */ + def apply(r: Result): Prop = Prop.apply(prms => r) + + /** Create a property from a boolean value */ + def apply(b: Boolean): Prop = if(b) proved else falsified + + + // Implicits + + /** A collection of property operators on `Any` values. + * Import [[Prop.AnyOperators]] to make the operators available. */ + class ExtendedAny[T <% Pretty](x: => T) { + /** See [[Prop.imply]] */ + def imply(f: PartialFunction[T,Prop]) = Prop.imply(x,f) + /** See [[Prop.iff]] */ + def iff(f: PartialFunction[T,Prop]) = Prop.iff(x,f) + /** See [[Prop.?=]] */ + def ?=(y: T) = Prop.?=(x, y) + /** See [[Prop.=?]] */ + def =?(y: T) = Prop.=?(x, y) + } + + /** A collection of property operators on `Boolean` values. + * Import [[Prop.BooleanOperators]] to make the operators available. */ + class ExtendedBoolean(b: => Boolean) { + /** See the documentation for [[org.scalacheck.Prop]] */ + def ==>(p: => Prop) = Prop(b) ==> p + /** See the documentation for [[org.scalacheck.Prop]] */ + def :|(l: String) = Prop(b) :| l + /** See the documentation for [[org.scalacheck.Prop]] */ + def |:(l: String) = l |: Prop(b) + /** See the documentation for [[org.scalacheck.Prop]] */ + def :|(l: Symbol) = Prop(b) :| l + /** See the documentation for [[org.scalacheck.Prop]] */ + def |:(l: Symbol) = l |: Prop(b) + } + + /** Implicit method that makes a number of property operators on values of + * type `Any` available in the current scope. + * See [[Prop.ExtendedAny]] for documentation on the operators. */ + implicit def AnyOperators[T <% Pretty](x: => T) = new ExtendedAny[T](x) + + /** Implicit method that makes a number of property operators on boolean + * values available in the current scope. See [[Prop.ExtendedBoolean]] for + * documentation on the operators. */ + implicit def BooleanOperators(b: => Boolean) = new ExtendedBoolean(b) + + /** Implicit conversion of Boolean values to Prop values. */ + implicit def propBoolean(b: Boolean): Prop = Prop(b) + + + // Private support functions + + private def provedToTrue(r: Result) = r.status match { + case Proof => r.copy(status = True) + case _ => r + } + + + // Property combinators + + /** A property that never is proved or falsified */ + lazy val undecided = Prop(Result(status = Undecided)) + + /** A property that always is false */ + lazy val falsified = Prop(Result(status = False)) + + /** A property that always is proved */ + lazy val proved = Prop(Result(status = Proof)) + + /** A property that always is passed */ + lazy val passed = Prop(Result(status = True)) + + /** A property that denotes an exception */ + def exception(e: Throwable): Prop = Prop(Result(status = Exception(e))) + + /** A property that denotes an exception */ + lazy val exception: Prop = exception(null) + + /** Create a property that compares to values. If the values aren't equal, + * the property will fail and report that first value doesn't match the + * expected (second) value. */ + def ?=[T](x: T, y: T)(implicit pp: T => Pretty): Prop = + if(x == y) proved else falsified :| { + val exp = Pretty.pretty[T](y, Pretty.Params(0)) + val act = Pretty.pretty[T](x, Pretty.Params(0)) + "Expected "+exp+" but got "+act + } + + /** Create a property that compares to values. If the values aren't equal, + * the property will fail and report that second value doesn't match the + * expected (first) value. */ + def =?[T](x: T, y: T)(implicit pp: T => Pretty): Prop = ?=(y, x) + + /** A property that depends on the generator size */ + def sizedProp(f: Int => Prop): Prop = Prop { prms => + // provedToTrue since if the property is proved for + // one size, it shouldn't be regarded as proved for + // all sizes. + provedToTrue(f(prms.size)(prms)) + } + + /** Implication with several conditions */ + def imply[T](x: T, f: PartialFunction[T,Prop]): Prop = secure { + if(f.isDefinedAt(x)) f(x) else undecided + } + + /** Property holds only if the given partial function is defined at + * `x`, and returns a property that holds */ + def iff[T](x: T, f: PartialFunction[T,Prop]): Prop = secure { + if(f.isDefinedAt(x)) f(x) else falsified + } + + /** Combines properties into one, which is true if and only if all the + * properties are true */ + def all(ps: Prop*) = if(ps.isEmpty) proved else Prop(prms => + ps.map(p => p(prms)).reduceLeft(_ && _) + ) + + /** Combines properties into one, which is true if at least one of the + * properties is true */ + def atLeastOne(ps: Prop*) = if(ps.isEmpty) falsified else Prop(prms => + ps.map(p => p(prms)).reduceLeft(_ || _) + ) + + /** A property that holds if at least one of the given generators + * fails generating a value */ + def someFailing[T](gs: Seq[Gen[T]]) = atLeastOne(gs.map(_ == fail):_*) + + /** A property that holds iff none of the given generators + * fails generating a value */ + def noneFailing[T](gs: Seq[Gen[T]]) = all(gs.map(_ !== fail):_*) + + /** Returns true if the given statement throws an exception + * of the specified type */ + def throws[T <: Throwable](c: Class[T])(x: => Any): Boolean = + try { x; false } catch { case e if c.isInstance(e) => true } + + /** Collect data for presentation in test report */ + def collect[T, P <% Prop](f: T => P): T => Prop = t => Prop { prms => + val prop = f(t) + prop(prms).collect(t) + } + + /** Collect data for presentation in test report */ + def collect[T](t: T)(prop: Prop) = Prop { prms => + prop(prms).collect(t) + } + + /** Collect data for presentation in test report */ + def classify(c: => Boolean, ifTrue: Any)(prop: Prop): Prop = + if(c) collect(ifTrue)(prop) else collect(())(prop) + + /** Collect data for presentation in test report */ + def classify(c: => Boolean, ifTrue: Any, ifFalse: Any)(prop: Prop): Prop = + if(c) collect(ifTrue)(prop) else collect(ifFalse)(prop) + + /** Wraps and protects a property */ + def secure[P <% Prop](p: => P): Prop = + try (p: Prop) catch { case e: Throwable => exception(e) } + + /** Existential quantifier for an explicit generator. */ + def exists[A,P](f: A => P)(implicit + pv: P => Prop, + pp: A => Pretty, + aa: Arbitrary[A] + ): Prop = exists(aa.arbitrary)(f) + + /** Existential quantifier for an explicit generator. */ + def exists[A,P](g: Gen[A])(f: A => P)(implicit + pv: P => Prop, + pp: A => Pretty + ): Prop = Prop { prms => + val gr = g.doApply(prms) + gr.retrieve match { + case None => undecided(prms) + case Some(x) => + val p = secure(f(x)) + val labels = gr.labels.mkString(",") + val r = p(prms).addArg(Arg(labels,x,0,x,pp(x),pp(x))) + r.status match { + case True => r.copy(status = Proof) + case False => r.copy(status = Undecided) + case _ => r + } + } + } + + /** Universal quantifier for an explicit generator. Does not shrink failed + * test cases. */ + def forAllNoShrink[T1,P]( + g1: Gen[T1])( + f: T1 => P)(implicit + pv: P => Prop, + pp1: T1 => Pretty + ): Prop = Prop { prms => + val gr = g1.doApply(prms) + gr.retrieve match { + case None => undecided(prms) + case Some(x) => + val p = secure(f(x)) + val labels = gr.labels.mkString(",") + provedToTrue(p(prms)).addArg(Arg(labels,x,0,x,pp1(x),pp1(x))) + } + } + + /** Universal quantifier for two explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,P]( + g1: Gen[T1], g2: Gen[T2])( + f: (T1,T2) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2)(f(t, _:T2))) + + /** Universal quantifier for three explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,T3,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])( + f: (T1,T2,T3) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty, + pp3: T3 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3)(f(t, _:T2, _:T3))) + + /** Universal quantifier for four explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,T3,T4,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])( + f: (T1,T2,T3,T4) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty, + pp3: T3 => Pretty, + pp4: T4 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4)(f(t, _:T2, _:T3, _:T4))) + + /** Universal quantifier for five explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,T3,T4,T5,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])( + f: (T1,T2,T3,T4,T5) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty, + pp3: T3 => Pretty, + pp4: T4 => Pretty, + pp5: T5 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5))) + + /** Universal quantifier for six explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,T3,T4,T5,T6,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])( + f: (T1,T2,T3,T4,T5,T6) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty, + pp3: T3 => Pretty, + pp4: T4 => Pretty, + pp5: T5 => Pretty, + pp6: T6 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))) + + /** Universal quantifier for seven explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])( + f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty, + pp3: T3 => Pretty, + pp4: T4 => Pretty, + pp5: T5 => Pretty, + pp6: T6 => Pretty, + pp7: T7 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))) + + /** Universal quantifier for eight explicit generators. + * Does not shrink failed test cases. */ + def forAllNoShrink[T1,T2,T3,T4,T5,T6,T7,T8,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])( + f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit + p: P => Prop, + pp1: T1 => Pretty, + pp2: T2 => Pretty, + pp3: T3 => Pretty, + pp4: T4 => Pretty, + pp5: T5 => Pretty, + pp6: T6 => Pretty, + pp7: T7 => Pretty, + pp8: T8 => Pretty + ): Prop = forAllNoShrink(g1)(t => forAllNoShrink(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,P]( + f: A1 => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty + ): Prop = forAllNoShrink(arbitrary[A1])(f) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,P]( + f: (A1,A2) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty + ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2])(f) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,A3,P]( + f: (A1,A2,A3) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], pp3: A3 => Pretty + ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3])(f) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,A3,A4,P]( + f: (A1,A2,A3,A4) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], pp4: A4 => Pretty + ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4])(f) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,A3,A4,A5,P]( + f: (A1,A2,A3,A4,A5) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], pp5: A5 => Pretty + ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5])(f) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,A3,A4,A5,A6,P]( + f: (A1,A2,A3,A4,A5,A6) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], pp5: A5 => Pretty, + a6: Arbitrary[A6], pp6: A6 => Pretty + ): Prop = forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5], arbitrary[A6])(f) + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,A3,A4,A5,A6,A7,P]( + f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], pp5: A5 => Pretty, + a6: Arbitrary[A6], pp6: A6 => Pretty, + a7: Arbitrary[A7], pp7: A7 => Pretty + ): Prop = { + forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5], arbitrary[A6], + arbitrary[A7])(f) + } + + /** Converts a function into a universally quantified property */ + def forAllNoShrink[A1,A2,A3,A4,A5,A6,A7,A8,P]( + f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit + pv: P => Prop, + a1: Arbitrary[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], pp5: A5 => Pretty, + a6: Arbitrary[A6], pp6: A6 => Pretty, + a7: Arbitrary[A7], pp7: A7 => Pretty, + a8: Arbitrary[A8], pp8: A8 => Pretty + ): Prop = { + forAllNoShrink(arbitrary[A1], arbitrary[A2], arbitrary[A3], arbitrary[A4], arbitrary[A5], arbitrary[A6], + arbitrary[A7], arbitrary[A8])(f) + } + + /** Universal quantifier for an explicit generator. Shrinks failed arguments + * with the given shrink function */ + def forAllShrink[T, P](g: Gen[T], + shrink: T => Stream[T])(f: T => P + )(implicit pv: P => Prop, pp: T => Pretty + ): Prop = Prop { prms => + + val gr = g.doApply(prms) + val labels = gr.labels.mkString(",") + + def result(x: T) = { + val p = secure(pv(f(x))) + provedToTrue(p(prms)) + } + + /** Returns the first failed result in Left or success in Right */ + def getFirstFailure(xs: Stream[T]): Either[(T,Result),(T,Result)] = { + assert(!xs.isEmpty, "Stream cannot be empty") + val results = xs.map(x => (x, result(x))) + results.dropWhile(!_._2.failure).headOption match { + case None => Right(results.head) + case Some(xr) => Left(xr) + } + } + + def shrinker(x: T, r: Result, shrinks: Int, orig: T): Result = { + val xs = shrink(x).filter(gr.sieve) + val res = r.addArg(Arg(labels,x,shrinks,orig,pp(x),pp(orig))) + if(xs.isEmpty) res else getFirstFailure(xs) match { + case Right((x2,r2)) => res + case Left((x2,r2)) => shrinker(x2, replOrig(r,r2), shrinks+1, orig) + } + } + + def replOrig(r0: Result, r1: Result) = (r0.args,r1.args) match { + case (a0::_,a1::as) => + r1.copy( + args = a1.copy( + origArg = a0.origArg, + prettyOrigArg = a0.prettyOrigArg + ) :: as + ) + case _ => r1 + } + + gr.retrieve match { + case None => undecided(prms) + case Some(x) => + val r = result(x) + if (!r.failure) r.addArg(Arg(labels,x,0,x,pp(x),pp(x))) + else shrinker(x,r,0,x) + } + + } + + /** Universal quantifier for an explicit generator. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,P]( + g1: Gen[T1])( + f: T1 => P)(implicit + p: P => Prop, + s1: Shrink[T1], + pp1: T1 => Pretty + ): Prop = forAllShrink[T1,P](g1, shrink[T1])(f) + + /** Universal quantifier for two explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,P]( + g1: Gen[T1], g2: Gen[T2])( + f: (T1,T2) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty + ): Prop = forAll(g1)(t => forAll(g2)(f(t, _:T2))) + + /** Universal quantifier for three explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,T3,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3])( + f: (T1,T2,T3) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty, + s3: Shrink[T3], pp3: T3 => Pretty + ): Prop = forAll(g1)(t => forAll(g2,g3)(f(t, _:T2, _:T3))) + + /** Universal quantifier for four explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,T3,T4,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4])( + f: (T1,T2,T3,T4) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty, + s3: Shrink[T3], pp3: T3 => Pretty, + s4: Shrink[T4], pp4: T4 => Pretty + ): Prop = forAll(g1)(t => forAll(g2,g3,g4)(f(t, _:T2, _:T3, _:T4))) + + /** Universal quantifier for five explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,T3,T4,T5,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5])( + f: (T1,T2,T3,T4,T5) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty, + s3: Shrink[T3], pp3: T3 => Pretty, + s4: Shrink[T4], pp4: T4 => Pretty, + s5: Shrink[T5], pp5: T5 => Pretty + ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5)(f(t, _:T2, _:T3, _:T4, _:T5))) + + /** Universal quantifier for six explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,T3,T4,T5,T6,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6])( + f: (T1,T2,T3,T4,T5,T6) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty, + s3: Shrink[T3], pp3: T3 => Pretty, + s4: Shrink[T4], pp4: T4 => Pretty, + s5: Shrink[T5], pp5: T5 => Pretty, + s6: Shrink[T6], pp6: T6 => Pretty + ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6))) + + /** Universal quantifier for seven explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,T3,T4,T5,T6,T7,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7])( + f: (T1,T2,T3,T4,T5,T6,T7) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty, + s3: Shrink[T3], pp3: T3 => Pretty, + s4: Shrink[T4], pp4: T4 => Pretty, + s5: Shrink[T5], pp5: T5 => Pretty, + s6: Shrink[T6], pp6: T6 => Pretty, + s7: Shrink[T7], pp7: T7 => Pretty + ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7))) + + /** Universal quantifier for eight explicit generators. Shrinks failed arguments + * with the default shrink function for the type */ + def forAll[T1,T2,T3,T4,T5,T6,T7,T8,P]( + g1: Gen[T1], g2: Gen[T2], g3: Gen[T3], g4: Gen[T4], g5: Gen[T5], g6: Gen[T6], g7: Gen[T7], g8: Gen[T8])( + f: (T1,T2,T3,T4,T5,T6,T7,T8) => P)(implicit + p: P => Prop, + s1: Shrink[T1], pp1: T1 => Pretty, + s2: Shrink[T2], pp2: T2 => Pretty, + s3: Shrink[T3], pp3: T3 => Pretty, + s4: Shrink[T4], pp4: T4 => Pretty, + s5: Shrink[T5], pp5: T5 => Pretty, + s6: Shrink[T6], pp6: T6 => Pretty, + s7: Shrink[T7], pp7: T7 => Pretty, + s8: Shrink[T8], pp8: T8 => Pretty + ): Prop = forAll(g1)(t => forAll(g2,g3,g4,g5,g6,g7,g8)(f(t, _:T2, _:T3, _:T4, _:T5, _:T6, _:T7, _:T8))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,P] ( + f: A1 => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty + ): Prop = forAllShrink(arbitrary[A1],shrink[A1])(f andThen p) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,P] ( + f: (A1,A2) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,A3,P] ( + f: (A1,A2,A3) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,A3,A4,P] ( + f: (A1,A2,A3,A4) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,A3,A4,A5,P] ( + f: (A1,A2,A3,A4,A5) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,A3,A4,A5,A6,P] ( + f: (A1,A2,A3,A4,A5,A6) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty, + a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,A3,A4,A5,A6,A7,P] ( + f: (A1,A2,A3,A4,A5,A6,A7) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty, + a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty, + a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7))) + + /** Converts a function into a universally quantified property */ + def forAll[A1,A2,A3,A4,A5,A6,A7,A8,P] ( + f: (A1,A2,A3,A4,A5,A6,A7,A8) => P)(implicit + p: P => Prop, + a1: Arbitrary[A1], s1: Shrink[A1], pp1: A1 => Pretty, + a2: Arbitrary[A2], s2: Shrink[A2], pp2: A2 => Pretty, + a3: Arbitrary[A3], s3: Shrink[A3], pp3: A3 => Pretty, + a4: Arbitrary[A4], s4: Shrink[A4], pp4: A4 => Pretty, + a5: Arbitrary[A5], s5: Shrink[A5], pp5: A5 => Pretty, + a6: Arbitrary[A6], s6: Shrink[A6], pp6: A6 => Pretty, + a7: Arbitrary[A7], s7: Shrink[A7], pp7: A7 => Pretty, + a8: Arbitrary[A8], s8: Shrink[A8], pp8: A8 => Pretty + ): Prop = forAll((a: A1) => forAll(f(a, _:A2, _:A3, _:A4, _:A5, _:A6, _:A7, _:A8))) + + /** Ensures that the property expression passed in completes within the given + * space of time. */ + def within(maximumMs: Long)(wrappedProp: => Prop): Prop = new Prop { + @tailrec private def attempt(prms: Parameters, endTime: Long): Result = { + val result = wrappedProp.apply(prms) + if (System.currentTimeMillis > endTime) { + (if(result.failure) result else Result(status = False)).label("Timeout") + } else { + if (result.success) result + else attempt(prms, endTime) + } + } + def apply(prms: Parameters) = attempt(prms, System.currentTimeMillis + maximumMs) + } +} diff --git a/src/partest-extras/scala/org/scalacheck/Properties.scala b/src/partest-extras/scala/org/scalacheck/Properties.scala new file mode 100644 index 000000000000..abaac61c7f09 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Properties.scala @@ -0,0 +1,82 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import util.ConsoleReporter + +/** Represents a collection of properties, with convenient methods + * for checking all properties at once. This class is itself a property, which + * holds if and only if all of the contained properties hold. + *

Properties are added in the following way:

+ * + * {{{ + * object MyProps extends Properties("MyProps") { + * property("myProp1") = forAll { (n:Int, m:Int) => + * n+m == m+n + * } + * } + * }}} + */ +class Properties(val name: String) extends Prop { + + private val props = new scala.collection.mutable.ListBuffer[(String,Prop)] + + /** Returns one property which holds if and only if all of the + * properties in this property collection hold */ + private def oneProperty: Prop = Prop.all((properties map (_._2)):_*) + + /** Returns all properties of this collection in a list of name/property + * pairs. */ + def properties: Seq[(String,Prop)] = props + + def apply(p: Gen.Parameters) = oneProperty(p) + + /** Convenience method that checks the properties with the given parameters + * and reports the result on the console. If you need to get the results + * from the test use the `check` methods in [[org.scalacheck.Test]] + * instead. */ + override def check(prms: Test.Parameters): Unit = Test.checkProperties( + prms.withTestCallback(ConsoleReporter(1) chain prms.testCallback), this + ) + + /** Convenience method that checks the properties and reports the + * result on the console. If you need to get the results from the test use + * the `check` methods in [[org.scalacheck.Test]] instead. */ + override def check: Unit = check(Test.Parameters.default) + + /** The logic for main, separated out to make it easier to + * avoid System.exit calls. Returns exit code. + */ + override def mainRunner(args: Array[String]): Int = { + Test.parseParams(args) match { + case Some(params) => + val res = Test.checkProperties(params, this) + val failed = res.filter(!_._2.passed).size + failed + case None => + println("Incorrect options") + -1 + } + } + + /** Adds all properties from another property collection to this one. */ + def include(ps: Properties) = for((n,p) <- ps.properties) property(n) = p + + /** Used for specifying properties. Usage: + * {{{ + * property("myProp") = ... + * }}} + */ + class PropertySpecifier() { + def update(propName: String, p: Prop) = props += ((name+"."+propName, p)) + } + + lazy val property = new PropertySpecifier() +} diff --git a/src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala b/src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala new file mode 100644 index 000000000000..754b67764de3 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/ScalaCheckFramework.scala @@ -0,0 +1,93 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import util.Pretty + +import org.scalatools.testing._ + +class ScalaCheckFramework extends Framework { + + private def mkFP(mod: Boolean, cname: String) = + new SubclassFingerprint { + val superClassName = cname + val isModule = mod + } + + val name = "ScalaCheck" + + val tests = Array[Fingerprint]( + mkFP(true, "org.scalacheck.Properties"), + mkFP(false, "org.scalacheck.Prop"), + mkFP(false, "org.scalacheck.Properties"), + mkFP(true, "org.scalacheck.Prop") + ) + + def testRunner(loader: ClassLoader, loggers: Array[Logger]) = new Runner2 { + + private def asEvent(nr: (String, Test.Result)) = nr match { + case (n: String, r: Test.Result) => new Event { + val testName = n + val description = n + val result = r.status match { + case Test.Passed => Result.Success + case _:Test.Proved => Result.Success + case _:Test.Failed => Result.Failure + case Test.Exhausted => Result.Skipped + case _:Test.PropException | _:Test.GenException => Result.Error + } + val error = r.status match { + case Test.PropException(_, e, _) => e + case _:Test.Failed => new Exception(Pretty.pretty(r,Pretty.Params(0))) + case _ => null + } + } + } + + def run(testClassName: String, fingerprint: Fingerprint, handler: EventHandler, args: Array[String]) { + + val testCallback = new Test.TestCallback { + override def onPropEval(n: String, w: Int, s: Int, d: Int) = {} + + override def onTestResult(n: String, r: Test.Result) = { + for (l <- loggers) { + import Pretty._ + val verbosityOpts = Set("-verbosity", "-v") + val verbosity = args.grouped(2).filter(twos => verbosityOpts(twos.head)).toSeq.headOption.map(_.last).map(_.toInt).getOrElse(0) + l.info( + (if (r.passed) "+ " else "! ") + n + ": " + pretty(r, Params(verbosity)) + ) + } + handler.handle(asEvent((n,r))) + } + } + + val prms = Test.parseParams(args) match { + case Some(params) => + params.withTestCallback(testCallback).withCustomClassLoader(Some(loader)) + // TODO: Maybe handle this a bit better than throwing exception? + case None => throw new Exception() + } + + fingerprint match { + case fp: SubclassFingerprint => + val obj = + if(fp.isModule) Class.forName(testClassName + "$", true, loader).getField("MODULE$").get(null) + else Class.forName(testClassName, true, loader).newInstance + if(obj.isInstanceOf[Properties]) + Test.checkProperties(prms, obj.asInstanceOf[Properties]) + else + handler.handle(asEvent((testClassName, Test.check(prms, obj.asInstanceOf[Prop])))) + } + } + + } + +} diff --git a/src/partest-extras/scala/org/scalacheck/Shrink.scala b/src/partest-extras/scala/org/scalacheck/Shrink.scala new file mode 100644 index 000000000000..8ec28f4c4b28 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Shrink.scala @@ -0,0 +1,215 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import util.{Buildable,Buildable2} +import scala.collection.{ JavaConversions => jcl } + +sealed abstract class Shrink[T] { + def shrink(x: T): Stream[T] +} + +object Shrink { + + import Stream.{cons, empty} + import scala.collection._ + import java.util.ArrayList + + /** Interleaves two streams */ + private def interleave[T](xs: Stream[T], ys: Stream[T]): Stream[T] = + if(xs.isEmpty) ys + else if(ys.isEmpty) xs + else cons(xs.head, cons(ys.head, interleave(xs.tail, ys.tail))) + + /** Shrink instance factory */ + def apply[T](s: T => Stream[T]): Shrink[T] = new Shrink[T] { + override def shrink(x: T) = s(x) + } + + /** Shrink a value */ + def shrink[T](x: T)(implicit s: Shrink[T]): Stream[T] = s.shrink(x) + + /** Default shrink instance */ + implicit def shrinkAny[T]: Shrink[T] = Shrink(x => empty) + + /** Shrink instance of container */ + implicit def shrinkContainer[C[_],T](implicit v: C[T] => Traversable[T], s: Shrink[T], + b: Buildable[T,C] + ): Shrink[C[T]] = Shrink { xs: C[T] => + val ys = v(xs) + val zs = ys.toStream + removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable) + } + + /** Shrink instance of container2 */ + implicit def shrinkContainer2[C[_,_],T,U](implicit v: C[T,U] => Traversable[(T,U)], s: Shrink[(T,U)], + b: Buildable2[T,U,C] + ): Shrink[C[T,U]] = Shrink { xs: C[T,U] => + val ys = v(xs) + val zs = ys.toStream + removeChunks(ys.size,zs).append(shrinkOne(zs)).map(b.fromIterable) + } + + private def removeChunks[T](n: Int, xs: Stream[T]): Stream[Stream[T]] = + if (xs.isEmpty) empty + else if (xs.tail.isEmpty) cons(empty, empty) + else { + val n1 = n / 2 + val n2 = n - n1 + lazy val xs1 = xs.take(n1) + lazy val xs2 = xs.drop(n1) + lazy val xs3 = + for (ys1 <- removeChunks(n1, xs1) if !ys1.isEmpty) yield ys1 append xs2 + lazy val xs4 = + for (ys2 <- removeChunks(n2, xs2) if !ys2.isEmpty) yield xs1 append ys2 + + cons(xs1, cons(xs2, interleave(xs3, xs4))) + } + + private def shrinkOne[T : Shrink](zs: Stream[T]): Stream[Stream[T]] = + if (zs.isEmpty) empty + else { + val x = zs.head + val xs = zs.tail + shrink(x).map(cons(_,xs)).append(shrinkOne(xs).map(cons(x,_))) + } + + /** Shrink instance of integer */ + implicit lazy val shrinkInt: Shrink[Int] = Shrink { n => + + def halfs(n: Int): Stream[Int] = + if(n == 0) empty else cons(n, halfs(n/2)) + + if(n == 0) empty else { + val ns = halfs(n/2).map(n - _) + cons(0, interleave(ns, ns.map(-1 * _))) + } + } + + /** Shrink instance of String */ + implicit lazy val shrinkString: Shrink[String] = Shrink { s => + shrinkContainer[List,Char].shrink(s.toList).map(_.mkString) + } + + /** Shrink instance of Option */ + implicit def shrinkOption[T : Shrink]: Shrink[Option[T]] = Shrink { + case None => empty + case Some(x) => cons(None, for(y <- shrink(x)) yield Some(y)) + } + + /** Shrink instance of 2-tuple */ + implicit def shrinkTuple2[ + T1:Shrink, T2:Shrink + ]: Shrink[(T1,T2)] = + Shrink { case (t1,t2) => + shrink(t1).map((_,t2)) append + shrink(t2).map((t1,_)) + } + + /** Shrink instance of 3-tuple */ + implicit def shrinkTuple3[ + T1:Shrink, T2:Shrink, T3:Shrink + ]: Shrink[(T1,T2,T3)] = + Shrink { case (t1,t2,t3) => + shrink(t1).map((_, t2, t3)) append + shrink(t2).map((t1, _, t3)) append + shrink(t3).map((t1, t2, _)) + } + + /** Shrink instance of 4-tuple */ + implicit def shrinkTuple4[ + T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink + ]: Shrink[(T1,T2,T3,T4)] = + Shrink { case (t1,t2,t3,t4) => + shrink(t1).map((_, t2, t3, t4)) append + shrink(t2).map((t1, _, t3, t4)) append + shrink(t3).map((t1, t2, _, t4)) append + shrink(t4).map((t1, t2, t3, _)) + } + + /** Shrink instance of 5-tuple */ + implicit def shrinkTuple5[ + T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink + ]: Shrink[(T1,T2,T3,T4,T5)] = + Shrink { case (t1,t2,t3,t4,t5) => + shrink(t1).map((_, t2, t3, t4, t5)) append + shrink(t2).map((t1, _, t3, t4, t5)) append + shrink(t3).map((t1, t2, _, t4, t5)) append + shrink(t4).map((t1, t2, t3, _, t5)) append + shrink(t5).map((t1, t2, t3, t4, _)) + } + + /** Shrink instance of 6-tuple */ + implicit def shrinkTuple6[ + T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink + ]: Shrink[(T1,T2,T3,T4,T5,T6)] = + Shrink { case (t1,t2,t3,t4,t5,t6) => + shrink(t1).map((_, t2, t3, t4, t5, t6)) append + shrink(t2).map((t1, _, t3, t4, t5, t6)) append + shrink(t3).map((t1, t2, _, t4, t5, t6)) append + shrink(t4).map((t1, t2, t3, _, t5, t6)) append + shrink(t5).map((t1, t2, t3, t4, _, t6)) append + shrink(t6).map((t1, t2, t3, t4, t5, _)) + } + + /** Shrink instance of 7-tuple */ + implicit def shrinkTuple7[ + T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink, T7:Shrink + ]: Shrink[(T1,T2,T3,T4,T5,T6,T7)] = + Shrink { case (t1,t2,t3,t4,t5,t6,t7) => + shrink(t1).map((_, t2, t3, t4, t5, t6, t7)) append + shrink(t2).map((t1, _, t3, t4, t5, t6, t7)) append + shrink(t3).map((t1, t2, _, t4, t5, t6, t7)) append + shrink(t4).map((t1, t2, t3, _, t5, t6, t7)) append + shrink(t5).map((t1, t2, t3, t4, _, t6, t7)) append + shrink(t6).map((t1, t2, t3, t4, t5, _, t7)) append + shrink(t7).map((t1, t2, t3, t4, t5, t6, _)) + } + + /** Shrink instance of 8-tuple */ + implicit def shrinkTuple8[ + T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink, + T7:Shrink, T8:Shrink + ]: Shrink[(T1,T2,T3,T4,T5,T6,T7,T8)] = + Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8) => + shrink(t1).map((_, t2, t3, t4, t5, t6, t7, t8)) append + shrink(t2).map((t1, _, t3, t4, t5, t6, t7, t8)) append + shrink(t3).map((t1, t2, _, t4, t5, t6, t7, t8)) append + shrink(t4).map((t1, t2, t3, _, t5, t6, t7, t8)) append + shrink(t5).map((t1, t2, t3, t4, _, t6, t7, t8)) append + shrink(t6).map((t1, t2, t3, t4, t5, _, t7, t8)) append + shrink(t7).map((t1, t2, t3, t4, t5, t6, _, t8)) append + shrink(t8).map((t1, t2, t3, t4, t5, t6, t7, _)) + } + + /** Shrink instance of 9-tuple */ + implicit def shrinkTuple9[ + T1:Shrink, T2:Shrink, T3:Shrink, T4:Shrink, T5:Shrink, T6:Shrink, + T7:Shrink, T8:Shrink, T9:Shrink + ]: Shrink[(T1,T2,T3,T4,T5,T6,T7,T8,T9)] = + Shrink { case (t1,t2,t3,t4,t5,t6,t7,t8,t9) => + shrink(t1).map((_, t2, t3, t4, t5, t6, t7, t8, t9)) append + shrink(t2).map((t1, _, t3, t4, t5, t6, t7, t8, t9)) append + shrink(t3).map((t1, t2, _, t4, t5, t6, t7, t8, t9)) append + shrink(t4).map((t1, t2, t3, _, t5, t6, t7, t8, t9)) append + shrink(t5).map((t1, t2, t3, t4, _, t6, t7, t8, t9)) append + shrink(t6).map((t1, t2, t3, t4, t5, _, t7, t8, t9)) append + shrink(t7).map((t1, t2, t3, t4, t5, t6, _, t8, t9)) append + shrink(t8).map((t1, t2, t3, t4, t5, t6, t7, _, t9)) append + shrink(t9).map((t1, t2, t3, t4, t5, t6, t7, t8, _)) + } + + /** Transform a Shrink[T] to a Shrink[U] where T and U are two isomorphic types + * whose relationship is described by the provided transformation functions. + * (exponential functor map) */ + def xmap[T, U](from: T => U, to: U => T)(implicit st: Shrink[T]): Shrink[U] = Shrink[U] { u: U ⇒ + st.shrink(to(u)).map(from) + } +} diff --git a/src/partest-extras/scala/org/scalacheck/Test.scala b/src/partest-extras/scala/org/scalacheck/Test.scala new file mode 100644 index 000000000000..9a9c62b93f94 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/Test.scala @@ -0,0 +1,372 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck + +import Prop.Arg + +object Test { + + import util.{FreqMap, ConsoleReporter} + + /** Test parameters used by the check methods. Default + * parameters are defined by [[Test.Parameters.Default]]. */ + trait Parameters { + /** The minimum number of tests that must succeed for ScalaCheck to + * consider a property passed. */ + val minSuccessfulTests: Int + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.minSuccessfulTests]] set to the specified value. */ + def withMinSuccessfulTests(minSuccessfulTests: Int): Parameters = cp( + minSuccessfulTests = minSuccessfulTests + ) + + /** The starting size given as parameter to the generators. */ + val minSize: Int + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.minSize]] set to the specified value. */ + def withMinSize(minSize: Int): Parameters = cp( + minSize = minSize + ) + + /** The maximum size given as parameter to the generators. */ + val maxSize: Int + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.maxSize]] set to the specified value. */ + def withMaxSize(maxSize: Int): Parameters = cp( + maxSize = maxSize + ) + + /** The random number generator used. */ + val rng: scala.util.Random + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.rng]] set to the specified value. */ + def withRng(rng: scala.util.Random): Parameters = cp( + rng = rng + ) + + /** The number of tests to run in parallel. */ + val workers: Int + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.workers]] set to the specified value. */ + def withWorkers(workers: Int): Parameters = cp( + workers = workers + ) + + /** A callback that ScalaCheck calls each time a test is executed. */ + val testCallback: TestCallback + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.testCallback]] set to the specified value. */ + def withTestCallback(testCallback: TestCallback): Parameters = cp( + testCallback = testCallback + ) + + /** The maximum ratio between discarded and passed tests allowed before + * ScalaCheck gives up and discards the property. At least + * `minSuccesfulTests` will always be run, though. */ + val maxDiscardRatio: Float + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.maxDiscardRatio]] set to the specified value. */ + def withMaxDiscardRatio(maxDiscardRatio: Float): Parameters = cp( + maxDiscardRatio = maxDiscardRatio + ) + + /** A custom class loader that should be used during test execution. */ + val customClassLoader: Option[ClassLoader] + + /** Create a copy of this [[Test.Parameters]] instance with + * [[Test.Parameters.customClassLoader]] set to the specified value. */ + def withCustomClassLoader(customClassLoader: Option[ClassLoader] + ): Parameters = cp( + customClassLoader = customClassLoader + ) + + // private since we can't guarantee binary compatibility for this one + private case class cp( + minSuccessfulTests: Int = minSuccessfulTests, + minSize: Int = minSize, + maxSize: Int = maxSize, + rng: scala.util.Random = rng, + workers: Int = workers, + testCallback: TestCallback = testCallback, + maxDiscardRatio: Float = maxDiscardRatio, + customClassLoader: Option[ClassLoader] = customClassLoader + ) extends Parameters + } + + /** Test parameters used by the check methods. Default + * parameters are defined by [[Test.Parameters.Default]]. */ + object Parameters { + /** Default test parameters trait. This can be overriden if you need to + * tweak the parameters: + * + * {{{ + * val myParams = new Parameters.Default { + * override val minSuccesfulTests = 600 + * override val maxDiscardRatio = 8 + * } + * }}} + * + * You can also use the withXXX-methods in + * [[org.scalacheck.Test.Parameters]] to achieve + * the same thing: + * + * {{{ + * val myParams = Parameters.default + * .withMinSuccessfulTests(600) + * .withMaxDiscardRatio(8) + * }}} */ + trait Default extends Parameters { + val minSuccessfulTests: Int = 100 + val minSize: Int = 0 + val maxSize: Int = Gen.Parameters.default.size + val rng: scala.util.Random = Gen.Parameters.default.rng + val workers: Int = 1 + val testCallback: TestCallback = new TestCallback {} + val maxDiscardRatio: Float = 5 + val customClassLoader: Option[ClassLoader] = None + } + + /** Default test parameters instance. */ + val default: Parameters = new Default {} + + /** Verbose console reporter test parameters instance. */ + val defaultVerbose: Parameters = new Default { + override val testCallback = ConsoleReporter(2) + } + } + + /** Test statistics */ + case class Result( + status: Status, + succeeded: Int, + discarded: Int, + freqMap: FreqMap[Set[Any]], + time: Long = 0 + ) { + def passed = status match { + case Passed => true + case Proved(_) => true + case _ => false + } + } + + /** Test status */ + sealed trait Status + + /** ScalaCheck found enough cases for which the property holds, so the + * property is considered correct. (It is not proved correct, though). */ + case object Passed extends Status + + /** ScalaCheck managed to prove the property correct */ + sealed case class Proved(args: List[Arg[Any]]) extends Status + + /** The property was proved wrong with the given concrete arguments. */ + sealed case class Failed(args: List[Arg[Any]], labels: Set[String]) extends Status + + /** The property test was exhausted, it wasn't possible to generate enough + * concrete arguments satisfying the preconditions to get enough passing + * property evaluations. */ + case object Exhausted extends Status + + /** An exception was raised when trying to evaluate the property with the + * given concrete arguments. If an exception was raised before or during + * argument generation, the argument list will be empty. */ + sealed case class PropException(args: List[Arg[Any]], e: Throwable, + labels: Set[String]) extends Status + + /** An exception was raised when trying to generate concrete arguments + * for evaluating the property. + * @deprecated Not used. The type PropException is used for all exceptions. + */ + @deprecated("Not used. The type PropException is used for all exceptions.", "1.11.2") + sealed case class GenException(e: Throwable) extends Status + + trait TestCallback { self => + /** Called each time a property is evaluated */ + def onPropEval(name: String, threadIdx: Int, succeeded: Int, + discarded: Int): Unit = () + + /** Called whenever a property has finished testing */ + def onTestResult(name: String, result: Result): Unit = () + + def chain(testCallback: TestCallback) = new TestCallback { + override def onPropEval(name: String, threadIdx: Int, + succeeded: Int, discarded: Int + ): Unit = { + self.onPropEval(name,threadIdx,succeeded,discarded) + testCallback.onPropEval(name,threadIdx,succeeded,discarded) + } + + override def onTestResult(name: String, result: Result): Unit = { + self.onTestResult(name,result) + testCallback.onTestResult(name,result) + } + } + } + + private def assertParams(prms: Parameters) = { + import prms._ + if( + minSuccessfulTests <= 0 || + maxDiscardRatio <= 0 || + minSize < 0 || + maxSize < minSize || + workers <= 0 + ) throw new IllegalArgumentException("Invalid test parameters") + } + + private def secure[T](x: => T): Either[T,Throwable] = + try { Left(x) } catch { case e: Throwable => Right(e) } + + def parseParams(args: Array[String]): Option[Parameters] = { + var params = Parameters.default + args.grouped(2).filter(_.size > 1).map(a => (a(0), a(1))).foreach { + case ("-workers" | "-w", n) => params = params.withWorkers(n.toInt) + case ("-minSize" | "-n", n) => params = params.withMinSize(n.toInt) + case ("-maxSize" | "-x", n) => params = params.withMaxSize(n.toInt) + case ("-verbosity" | "-v", n) => params = params.withTestCallback(ConsoleReporter(n.toInt)) + case ("-maxDiscardRatio" | "-r", n) => params = params.withMaxDiscardRatio(n.toFloat) + case ("-minSuccessfulTests" | "-s", n) => params = params.withMinSuccessfulTests(n.toInt) + case _ => + } + Some(params) + } + + /** Tests a property with parameters that are calculated by applying + * the provided function to [[Test.Parameters.default]]. + * Example use: + * + * {{{ + * Test.check(p) { _. + * withMinSuccessfulTests(80000). + * withWorkers(4) + * } + * }}} + */ + def check(p: Prop)(f: Parameters => Parameters): Result = + check(f(Parameters.default), p) + + /** Tests a property with the given testing parameters, and returns + * the test results. */ + def check(params: Parameters, p: Prop): Result = { + import params._ + import concurrent._ + + assertParams(params) + if(workers > 1) { + assert(!p.isInstanceOf[Commands], "Commands cannot be checked multi-threaded") + } + + val iterations = math.ceil(minSuccessfulTests / (workers: Double)) + val sizeStep = (maxSize-minSize) / (iterations*workers) + var stop = false + val genPrms = new Gen.Parameters.Default { override val rng = params.rng } + val tp = java.util.concurrent.Executors.newFixedThreadPool(workers) + implicit val ec = ExecutionContext.fromExecutor(tp) + + def workerFun(workerIdx: Int): Result = { + var n = 0 // passed tests + var d = 0 // discarded tests + var res: Result = null + var fm = FreqMap.empty[Set[Any]] + while(!stop && res == null && n < iterations) { + val size = (minSize: Double) + (sizeStep * (workerIdx + (workers*(n+d)))) + val propRes = p(genPrms.withSize(size.round.toInt)) + fm = if(propRes.collected.isEmpty) fm else fm + propRes.collected + propRes.status match { + case Prop.Undecided => + d += 1 + testCallback.onPropEval("", workerIdx, n, d) + // The below condition is kind of hacky. We have to have + // some margin, otherwise workers might stop testing too + // early because they have been exhausted, but the overall + // test has not. + if (n+d > minSuccessfulTests && 1+workers*maxDiscardRatio*n < d) + res = Result(Exhausted, n, d, fm) + case Prop.True => + n += 1 + testCallback.onPropEval("", workerIdx, n, d) + case Prop.Proof => + n += 1 + res = Result(Proved(propRes.args), n, d, fm) + stop = true + case Prop.False => + res = Result(Failed(propRes.args,propRes.labels), n, d, fm) + stop = true + case Prop.Exception(e) => + res = Result(PropException(propRes.args,e,propRes.labels), n, d, fm) + stop = true + } + } + if (res == null) { + if (maxDiscardRatio*n > d) Result(Passed, n, d, fm) + else Result(Exhausted, n, d, fm) + } else res + } + + def mergeResults(r1: Result, r2: Result): Result = { + val Result(st1, s1, d1, fm1, _) = r1 + val Result(st2, s2, d2, fm2, _) = r2 + if (st1 != Passed && st1 != Exhausted) + Result(st1, s1+s2, d1+d2, fm1++fm2, 0) + else if (st2 != Passed && st2 != Exhausted) + Result(st2, s1+s2, d1+d2, fm1++fm2, 0) + else { + if (s1+s2 >= minSuccessfulTests && maxDiscardRatio*(s1+s2) >= (d1+d2)) + Result(Passed, s1+s2, d1+d2, fm1++fm2, 0) + else + Result(Exhausted, s1+s2, d1+d2, fm1++fm2, 0) + } + } + + try { + val start = System.currentTimeMillis + val r = + if(workers < 2) workerFun(0) + else { + val fs = List.range(0,workers) map (idx => Future { + params.customClassLoader.map( + Thread.currentThread.setContextClassLoader(_) + ) + blocking { workerFun(idx) } + }) + val zeroRes = Result(Passed,0,0,FreqMap.empty[Set[Any]],0) + val res = Future.fold(fs)(zeroRes)(mergeResults) + Await.result(res, concurrent.duration.Duration.Inf) + } + val timedRes = r.copy(time = System.currentTimeMillis-start) + params.testCallback.onTestResult("", timedRes) + timedRes + } finally { + stop = true + tp.shutdown() + } + } + + /** Check a set of properties. */ + def checkProperties(prms: Parameters, ps: Properties): Seq[(String,Result)] = + ps.properties.map { case (name,p) => + val testCallback = new TestCallback { + override def onPropEval(n: String, t: Int, s: Int, d: Int) = + prms.testCallback.onPropEval(name,t,s,d) + override def onTestResult(n: String, r: Result) = + prms.testCallback.onTestResult(name,r) + } + val res = check(prms.withTestCallback(testCallback), p) + (name,res) + } +} diff --git a/src/partest-extras/scala/org/scalacheck/util/Buildable.scala b/src/partest-extras/scala/org/scalacheck/util/Buildable.scala new file mode 100644 index 000000000000..6a275b05c282 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/util/Buildable.scala @@ -0,0 +1,77 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck.util + +import collection._ + +trait Buildable[T,C[_]] { + def builder: mutable.Builder[T,C[T]] + def fromIterable(it: Traversable[T]): C[T] = { + val b = builder + b ++= it + b.result() + } +} + +trait Buildable2[T,U,C[_,_]] { + def builder: mutable.Builder[(T,U),C[T,U]] + def fromIterable(it: Traversable[(T,U)]): C[T,U] = { + val b = builder + b ++= it + b.result() + } +} + +object Buildable { + import generic.CanBuildFrom + + implicit def buildableCanBuildFrom[T, C[_]](implicit c: CanBuildFrom[C[_], T, C[T]]) = + new Buildable[T, C] { + def builder = c.apply + } + + import java.util.ArrayList + implicit def buildableArrayList[T] = new Buildable[T,ArrayList] { + def builder = new mutable.Builder[T,ArrayList[T]] { + val al = new ArrayList[T] + def +=(x: T) = { + al.add(x) + this + } + def clear() = al.clear() + def result() = al + } + } + +} + +object Buildable2 { + + implicit def buildableMutableMap[T,U] = new Buildable2[T,U,mutable.Map] { + def builder = mutable.Map.newBuilder + } + + implicit def buildableImmutableMap[T,U] = new Buildable2[T,U,immutable.Map] { + def builder = immutable.Map.newBuilder + } + + implicit def buildableMap[T,U] = new Buildable2[T,U,Map] { + def builder = Map.newBuilder + } + + implicit def buildableImmutableSortedMap[T: Ordering, U] = new Buildable2[T,U,immutable.SortedMap] { + def builder = immutable.SortedMap.newBuilder + } + + implicit def buildableSortedMap[T: Ordering, U] = new Buildable2[T,U,SortedMap] { + def builder = SortedMap.newBuilder + } + +} diff --git a/src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala b/src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala new file mode 100644 index 000000000000..45b6ac6948ee --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/util/CmdLineParser.scala @@ -0,0 +1,41 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck.util + +import scala.collection.Set +import org.scalacheck.Test + +private[scalacheck] trait CmdLineParser { + + type Elem = String + + trait Opt[+T] { + val default: T + val names: Set[String] + val help: String + } + trait Flag extends Opt[Unit] + trait IntOpt extends Opt[Int] + trait FloatOpt extends Opt[Float] + trait StrOpt extends Opt[String] + + class OptMap { + private val opts = new collection.mutable.HashMap[Opt[_], Any] + def apply(flag: Flag): Boolean = opts.contains(flag) + def apply[T](opt: Opt[T]): T = opts.get(opt) match { + case None => opt.default + case Some(v) => v.asInstanceOf[T] + } + def update[T](opt: Opt[T], optVal: T) = opts.update(opt, optVal) + } + + val opts: Set[Opt[_]] + +} diff --git a/src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala b/src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala new file mode 100644 index 000000000000..89858dfb64e1 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/util/ConsoleReporter.scala @@ -0,0 +1,44 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck.util + +import Pretty.{Params, pretty, format} +import org.scalacheck.{Prop, Properties, Test} + +/** A [[org.scalacheck.Test.TestCallback]] implementation that prints + * test results directly to the console. This is the callback used + * by ScalaCheck's command line test runner, and when you run [[org.scalacheck.Prop!.check:Unit*]] + */ +class ConsoleReporter(val verbosity: Int) extends Test.TestCallback { + + private val prettyPrms = Params(verbosity) + + override def onTestResult(name: String, res: Test.Result) = { + if(verbosity > 0) { + if(name == "") { + val s = (if(res.passed) "+ " else "! ") + pretty(res, prettyPrms) + printf("\r%s\n", format(s, "", "", 75)) + } else { + val s = (if(res.passed) "+ " else "! ") + name + ": " + + pretty(res, prettyPrms) + printf("\r%s\n", format(s, "", "", 75)) + } + } + } + +} + +object ConsoleReporter { + + /** Factory method, creates a ConsoleReporter with the + * the given verbosity */ + def apply(verbosity: Int = 0) = new ConsoleReporter(verbosity) + +} diff --git a/src/partest-extras/scala/org/scalacheck/util/FreqMap.scala b/src/partest-extras/scala/org/scalacheck/util/FreqMap.scala new file mode 100644 index 000000000000..2a9f36f1e540 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/util/FreqMap.scala @@ -0,0 +1,65 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck.util + +trait FreqMap[T] { + protected val underlying: scala.collection.immutable.Map[T,Int] + val total: Int + + def +(t: T) = new FreqMap[T] { + private val n = FreqMap.this.underlying.get(t) match { + case None => 1 + case Some(n) => n+1 + } + val underlying = FreqMap.this.underlying + (t -> n) + val total = FreqMap.this.total + 1 + } + + def -(t: T) = new FreqMap[T] { + val underlying = FreqMap.this.underlying.get(t) match { + case None => FreqMap.this.underlying + case Some(n) => FreqMap.this.underlying + (t -> (n-1)) + } + val total = FreqMap.this.total + 1 + } + + def ++(fm: FreqMap[T]) = new FreqMap[T] { + private val keys = FreqMap.this.underlying.keySet ++ fm.underlying.keySet + private val mappings = keys.toStream.map { x => + (x, fm.getCount(x).getOrElse(0) + FreqMap.this.getCount(x).getOrElse(0)) + } + val underlying = scala.collection.immutable.Map(mappings: _*) + val total = FreqMap.this.total + fm.total + } + + def --(fm: FreqMap[T]) = new FreqMap[T] { + val underlying = FreqMap.this.underlying transform { + case (x,n) => n - fm.getCount(x).getOrElse(0) + } + lazy val total = (0 /: underlying.valuesIterator) (_ + _) + } + + def getCount(t: T) = underlying.get(t) + + def getCounts: List[(T,Int)] = underlying.toList.sortBy(-_._2) + + def getRatio(t: T) = for(c <- getCount(t)) yield (c: Float)/total + + def getRatios = for((t,c) <- getCounts) yield (t, (c: Float)/total) + + override def toString = underlying.toString +} + +object FreqMap { + def empty[T] = new FreqMap[T] { + val underlying = scala.collection.immutable.Map.empty[T,Int] + val total = 0 + } +} diff --git a/src/partest-extras/scala/org/scalacheck/util/Pretty.scala b/src/partest-extras/scala/org/scalacheck/util/Pretty.scala new file mode 100644 index 000000000000..13a1b44b51d7 --- /dev/null +++ b/src/partest-extras/scala/org/scalacheck/util/Pretty.scala @@ -0,0 +1,129 @@ +/*-------------------------------------------------------------------------*\ +** ScalaCheck ** +** Copyright (c) 2007-2014 Rickard Nilsson. All rights reserved. ** +** http://www.scalacheck.org ** +** ** +** This software is released under the terms of the Revised BSD License. ** +** There is NO WARRANTY. See the file LICENSE for the full text. ** +\*------------------------------------------------------------------------ */ + +package org.scalacheck.util + +import org.scalacheck.Prop.Arg +import org.scalacheck.Test + +import math.round + + +sealed trait Pretty { + def apply(prms: Pretty.Params): String + + def map(f: String => String) = Pretty(prms => f(Pretty.this(prms))) + + def flatMap(f: String => Pretty) = Pretty(prms => f(Pretty.this(prms))(prms)) +} + +object Pretty { + + case class Params(verbosity: Int) + + val defaultParams = Params(0) + + def apply(f: Params => String) = new Pretty { def apply(p: Params) = f(p) } + + def pretty[T <% Pretty](t: T, prms: Params): String = t(prms) + + def pretty[T <% Pretty](t: T): String = t(defaultParams) + + implicit def strBreak(s1: String) = new { + def /(s2: String) = if(s2 == "") s1 else s1+"\n"+s2 + } + + def pad(s: String, c: Char, length: Int) = + if(s.length >= length) s + else s + List.fill(length-s.length)(c).mkString + + def break(s: String, lead: String, length: Int): String = + if(s.length <= length) s + else s.substring(0, length) / break(lead+s.substring(length), lead, length) + + def format(s: String, lead: String, trail: String, width: Int) = + s.lines.map(l => break(lead+l+trail, " ", width)).mkString("\n") + + implicit def prettyAny(t: Any) = Pretty { p => t.toString } + + implicit def prettyString(t: String) = Pretty { p => "\""++t++"\"" } + + implicit def prettyList(l: List[Any]) = Pretty { p => + l.map("\""+_+"\"").mkString("List(", ", ", ")") + } + + implicit def prettyThrowable(e: Throwable) = Pretty { prms => + val strs = e.getStackTrace.map { st => + import st._ + getClassName+"."+getMethodName + "("+getFileName+":"+getLineNumber+")" + } + + val strs2 = + if(prms.verbosity <= 0) Array[String]() + else if(prms.verbosity <= 1) strs.take(5) + else strs + + e.getClass.getName + ": " + e.getMessage / strs2.mkString("\n") + } + + def prettyArgs(args: Seq[Arg[Any]]): Pretty = Pretty { prms => + if(args.isEmpty) "" else { + for((a,i) <- args.zipWithIndex) yield { + val l = "> "+(if(a.label == "") "ARG_"+i else a.label) + val s = + if(a.shrinks == 0) "" + else "\n"+l+"_ORIGINAL: "+a.prettyOrigArg(prms) + l+": "+a.prettyArg(prms)+""+s + } + }.mkString("\n") + } + + implicit def prettyFreqMap(fm: FreqMap[Set[Any]]) = Pretty { prms => + if(fm.total == 0) "" + else { + "> Collected test data: " / { + for { + (xs,r) <- fm.getRatios + ys = xs - (()) + if !ys.isEmpty + } yield round(r*100)+"% " + ys.mkString(", ") + }.mkString("\n") + } + } + + implicit def prettyTestRes(res: Test.Result) = Pretty { prms => + def labels(ls: collection.immutable.Set[String]) = + if(ls.isEmpty) "" + else "> Labels of failing property: " / ls.mkString("\n") + val s = res.status match { + case Test.Proved(args) => "OK, proved property."/prettyArgs(args)(prms) + case Test.Passed => "OK, passed "+res.succeeded+" tests." + case Test.Failed(args, l) => + "Falsified after "+res.succeeded+" passed tests."/labels(l)/prettyArgs(args)(prms) + case Test.Exhausted => + "Gave up after only "+res.succeeded+" passed tests. " + + res.discarded+" tests were discarded." + case Test.PropException(args,e,l) => + "Exception raised on property evaluation."/labels(l)/prettyArgs(args)(prms)/ + "> Exception: "+pretty(e,prms) + case Test.GenException(e) => + "Exception raised on argument generation."/ + "> Exception: "+pretty(e,prms) + } + val t = if(prms.verbosity <= 1) "" else "Elapsed time: "+prettyTime(res.time) + s/t/pretty(res.freqMap,prms) + } + + def prettyTime(millis: Long): String = { + val min = millis/(60*1000) + val sec = (millis-(60*1000*min)) / 1000d + if(min <= 0) "%.3f sec ".format(sec) + else "%d min %.3f sec ".format(min, sec) + } +} diff --git a/versions.properties b/versions.properties index 488535c9914f..3b8077ab885b 100644 --- a/versions.properties +++ b/versions.properties @@ -31,7 +31,9 @@ scala-asm.version=5.0.4-scala-3 # external modules, used internally (not shipped) partest.version.number=1.0.16 -scalacheck.version.number=1.11.6 +# We've embedded these sources in partest-extras for now. After 2.12.0 is released +# we can switch to a public release. +# scalacheck.version.number=1.11.6 # TODO: modularize the compiler #scala-compiler-doc.version.number=1.0.0-RC1 From 1ae80e868039e4e18843ec67768002085decb763 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 27 Jun 2016 13:44:52 +1000 Subject: [PATCH 0140/2793] Fix ParVector#padTo This was throwing a UnsupportedOperationError for small operations. The parallel collections test suite sets `-minSuccessfulTests 5` in test/files/scalacheck/parallel-collections/pc.scala, which is far lower thatn the default of 100, and means that we are less likely to falsify properties. This parameter seems to have been added in #2476, assuming I'm reading it correctly. Not sure of the motiviation, perhaps just to make the slowest part of the scalacheck test suite run faster? I haven't changed the paramater now, but instead have included a one element collection in generator. I also found that when the test failed, Scalacheck would try to minimize the example, but did so assuming that the elements of the tuple of test data could be independentally shrunk. This breaks the invariant that the two collections contain equal elements, and led to spurious error reports. I have disabled shrinking in all tests tests affected by this. --- .../parallel/immutable/package.scala | 7 ++- .../ParallelArrayCheck.scala | 2 +- .../ParallelArrayViewCheck.scala | 2 +- .../ParallelIterableCheck.scala | 44 +++++++++---------- .../ParallelMapCheck1.scala | 2 +- .../ParallelSeqCheck.scala | 33 +++++++------- .../ParallelSetCheck.scala | 2 +- 7 files changed, 49 insertions(+), 43 deletions(-) diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala index 8fd84eaf4d5a..3cafdba5f702 100644 --- a/src/library/scala/collection/parallel/immutable/package.scala +++ b/src/library/scala/collection/parallel/immutable/package.scala @@ -20,7 +20,12 @@ package immutable { self => def apply(idx: Int) = if (0 <= idx && idx < length) elem else throw new IndexOutOfBoundsException("" + idx) - override def seq = throw new UnsupportedOperationException + override def seq: collection.immutable.Seq[T] = new collection.AbstractSeq[T] with collection.immutable.Seq[T] { + override def length: Int = self.length + override def apply(idx: Int): T = self.apply(idx) + override def iterator: Iterator[T] = Iterator.continually(elem).take(length) + override def par: ParSeq[T] = self + } def update(idx: Int, elem: T) = throw new UnsupportedOperationException class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] { diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala index 691a3e961eda..605c16857a40 100644 --- a/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelArrayCheck.scala @@ -44,7 +44,7 @@ abstract class ParallelArrayCheck[T](tp: String) extends ParallelSeqCheck[T]("Pa pa } - property("array mappings must be equal") = forAll(collectionPairs) { case (t, coll) => + property("array mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield ("op index: " + ind) |: t.map(f) == coll.map(f) results.reduceLeft(_ && _) diff --git a/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala b/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala index 9805e2644f74..fb09a5bbb724 100644 --- a/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelArrayViewCheck.scala @@ -46,7 +46,7 @@ // pa.view // } -// property("forces must be equal") = forAll(collectionPairs) { case (s, coll) => +// property("forces must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => // val smodif = (s ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2) // val cmodif = (coll ++ s).reverse.take(s.length).reverse.zip(s).drop(s.length / 2).force // smodif == cmodif diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala index 468bcb6dd117..7e7ef2ce1bcd 100644 --- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala @@ -109,7 +109,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col println("cf == tf - " + (cf == tf)) } - property("reductions must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) => + property("reductions must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) => if (t.size != 0) { val results = for ((op, ind) <- reduceOperators.zipWithIndex) yield { val tr = t.reduceLeft(op) @@ -127,7 +127,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col } else "has size 0" |: true } - property("counts must be equal") = forAll(collectionPairs) { case (t, coll) => + property("counts must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((pred, ind) <- countPredicates.zipWithIndex) yield { val tc = t.count(pred) val cc = coll.count(pred) @@ -143,19 +143,19 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col results.reduceLeft(_ && _) } - property("forall must be equal") = forAll(collectionPairs) { case (t, coll) => + property("forall must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((pred, ind) <- forallPredicates.zipWithIndex) yield ("op index: " + ind) |: t.forall(pred) == coll.forall(pred) results.reduceLeft(_ && _) } - property("exists must be equal") = forAll(collectionPairs) { case (t, coll) => + property("exists must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((pred, ind) <- existsPredicates.zipWithIndex) yield ("op index: " + ind) |: t.exists(pred) == coll.exists(pred) results.reduceLeft(_ && _) } - property("both must find or not find an element") = forAll(collectionPairs) { case (t, coll) => + property("both must find or not find an element") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((pred, ind) <- findPredicates.zipWithIndex) yield { val ft = t.find(pred) val fcoll = coll.find(pred) @@ -164,7 +164,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col results.reduceLeft(_ && _) } - property("mappings must be equal") = forAll(collectionPairs) { case (t, coll) => + property("mappings must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((f, ind) <- mapFunctions.zipWithIndex) yield { val ms = t.map(f) val mp = coll.map(f) @@ -185,7 +185,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col results.reduceLeft(_ && _) } - property("collects must be equal") = forAll(collectionPairs) { case (t, coll) => + property("collects must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val results = for ((f, ind) <- partialMapFunctions.zipWithIndex) yield { val ps = t.collect(f) val pp = coll.collect(f) @@ -201,12 +201,12 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col results.reduceLeft(_ && _) } - property("flatMaps must be equal") = forAll(collectionPairs) { case (t, coll) => + property("flatMaps must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((f, ind) <- flatMapFunctions.zipWithIndex) yield ("op index: " + ind) |: areEqual(t.flatMap(f), coll.flatMap(f))).reduceLeft(_ && _) } - property("filters must be equal") = forAll(collectionPairs) { case (t, coll) => + property("filters must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((p, ind) <- filterPredicates.zipWithIndex) yield { val tf = t.filter(p) val cf = coll.filter(p) @@ -235,7 +235,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - property("filterNots must be equal") = forAll(collectionPairs) { case (t, coll) => + property("filterNots must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((p, ind) <- filterNotPredicates.zipWithIndex) yield { val tf = t.filterNot(p) val cf = coll.filterNot(p) @@ -244,7 +244,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - if (!isCheckingViews) property("partitions must be equal") = forAll(collectionPairs) { case (t, coll) => + if (!isCheckingViews) property("partitions must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((p, ind) <- partitionPredicates.zipWithIndex) yield { val tpart = t.partition(p) val cpart = coll.partition(p) @@ -258,15 +258,15 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - if (hasStrictOrder) property("takes must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) => + if (hasStrictOrder) property("takes must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) => ("take " + n + " elements") |: t.take(n) == coll.take(n) } - if (hasStrictOrder) property("drops must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) => + if (hasStrictOrder) property("drops must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) => ("drop " + n + " elements") |: t.drop(n) == coll.drop(n) } - if (hasStrictOrder) property("slices must be equal") = forAll(collectionPairsWith2Indices) + if (hasStrictOrder) property("slices must be equal") = forAllNoShrink(collectionPairsWith2Indices) { case (t, coll, fr, slicelength) => val from = if (fr < 0) 0 else fr val until = if (from + slicelength > t.size) t.size else from + slicelength @@ -290,7 +290,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col ("slice from " + from + " until " + until) |: tsl == collsl } - if (hasStrictOrder) property("splits must be equal") = forAll(collectionPairsWithLengths) { case (t, coll, n) => + if (hasStrictOrder) property("splits must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (t, coll, n) => val tspl = t.splitAt(n) val cspl = coll.splitAt(n) if (tspl != cspl) { @@ -303,7 +303,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col ("splitAt " + n) |: tspl == cspl } - if (hasStrictOrder) property("takeWhiles must be equal") = forAll(collectionPairs) { case (t, coll) => + if (hasStrictOrder) property("takeWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((pred, ind) <- takeWhilePredicates.zipWithIndex) yield { val tt = t.takeWhile(pred) val ct = coll.takeWhile(pred) @@ -318,7 +318,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - if (hasStrictOrder) property("spans must be equal") = forAll(collectionPairs) { case (t, coll) => + if (hasStrictOrder) property("spans must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((pred, ind) <- spanPredicates.zipWithIndex) yield { val tsp = t.span(pred) val csp = coll.span(pred) @@ -336,13 +336,13 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - if (hasStrictOrder) property("dropWhiles must be equal") = forAll(collectionPairs) { case (t, coll) => + if (hasStrictOrder) property("dropWhiles must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((pred, ind) <- dropWhilePredicates.zipWithIndex) yield { ("operator " + ind) |: t.dropWhile(pred) == coll.dropWhile(pred) }).reduceLeft(_ && _) } - property("folds must be equal for assoc. operators") = forAll(collectionPairs) { case (t, coll) => + property("folds must be equal for assoc. operators") = forAllNoShrink(collectionPairs) { case (t, coll) => (for (((first, op), ind) <- foldArguments.zipWithIndex) yield { val tres = t.foldLeft(first)(op) val cres = coll.fold(first)(op) @@ -389,7 +389,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col } } - if (hasStrictOrder) property("copies to array must be equal") = forAll(collectionPairs) { case (t, coll) => + if (hasStrictOrder) property("copies to array must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => val tarr = newArray(t.size) val collarr = newArray(coll.size) t.copyToArray(tarr, 0, t.size) @@ -403,7 +403,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col tarr.toSeq == collarr.toSeq } - if (hasStrictOrder) property("scans must be equal") = forAll(collectionPairs) { + if (hasStrictOrder) property("scans must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for (((first, op), ind) <- foldArguments.zipWithIndex) yield { val tscan = t.scanLeft(first)(op) @@ -419,7 +419,7 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - property("groupBy must be equal") = forAll(collectionPairs) { + property("groupBy must be equal") = forAllNoShrink(collectionPairs) { case (t, coll) => (for ((f, ind) <- groupByFunctions.zipWithIndex) yield { val tgroup = t.groupBy(f) diff --git a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala index d4643e7f2c74..50aa4ad0c776 100644 --- a/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala +++ b/test/files/scalacheck/parallel-collections/ParallelMapCheck1.scala @@ -17,7 +17,7 @@ import scala.collection.parallel._ abstract class ParallelMapCheck[K, V](collname: String) extends ParallelIterableCheck[(K, V)](collname) { type CollType <: ParMap[K, V] - property("gets iterated keys") = forAll(collectionPairs) { + property("gets iterated keys") = forAllNoShrink(collectionPairs) { case (t, coll) => val containsT = for ((k, v) <- t) yield (coll.get(k) == Some(v)) val containsSelf = coll.map { case (k, v) => coll.get(k) == Some(v) } diff --git a/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala b/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala index 3f8a8ad4f555..48c3d3f74504 100644 --- a/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelSeqCheck.scala @@ -24,6 +24,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe def fromSeq(s: Seq[T]): CollType override def instances(vals: Seq[Gen[T]]): Gen[Seq[T]] = oneOf( + Gen.const(ofSize(vals, 1)), sized( sz => ofSize(vals, sz) @@ -74,7 +75,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe coll.patch(updateStart, coll, howMany) } - property("segmentLengths must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) => + property("segmentLengths must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield { val slen = s.segmentLength(pred, if (len < 0) 0 else len) val clen = coll.segmentLength(pred, len) @@ -88,13 +89,13 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe }).reduceLeft(_ && _) } - property("prefixLengths must be equal") = forAll(collectionPairs) { case (s, coll) => + property("prefixLengths must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => (for ((pred, ind) <- segmentLengthPredicates.zipWithIndex) yield { ("operator " + ind) |: s.prefixLength(pred) == coll.prefixLength(pred) }).reduceLeft(_ && _) } - property("indexWheres must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) => + property("indexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => (for ((pred, ind) <- indexWherePredicates.zipWithIndex) yield { val sind = s.indexWhere(pred, len) val cind = coll.indexWhere(pred, len) @@ -109,7 +110,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe }).reduceLeft(_ && _) } - property("lastIndexWheres must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) => + property("lastIndexWheres must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => (for ((pred, ind) <- lastIndexWherePredicates.zipWithIndex) yield { val end = if (len >= s.size) s.size - 1 else len val sind = s.lastIndexWhere(pred, end) @@ -118,7 +119,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe }).reduceLeft(_ && _) } - property("reverses must be equal") = forAll(collectionPairs) { case (s, coll) => + property("reverses must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => (s.length == 0 && s.getClass == classOf[collection.immutable.Range]) || { val sr = s.reverse @@ -133,13 +134,13 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe } } - property("reverseMaps must be equal") = forAll(collectionPairs) { case (s, coll) => + property("reverseMaps must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => (for ((f, ind) <- reverseMapFunctions.zipWithIndex) yield { ("operator " + ind) |: s.reverseMap(f) == coll.reverseMap(f) }).reduceLeft(_ && _) } - property("sameElements must be equal") = forAll(collectionPairsWithModifiedWithLengths) { + property("sameElements must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) { case (s, coll, collmodif, len) => val pos = if (len < 0) 0 else len val scm = s.sameElements(collmodif) @@ -171,7 +172,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe }).reduceLeft(_ && _) } - property("startsWiths must be equal") = forAll(collectionPairsWithModifiedWithLengths) { + property("startsWiths must be equal") = forAllNoShrink(collectionPairsWithModifiedWithLengths) { case (s, coll, collmodif, len) => val pos = if (len < 0) 0 else len ("start with self" |: s.startsWith(s) == coll.startsWith(coll)) && @@ -195,7 +196,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe }).reduceLeft(_ && _) } - property("endsWiths must be equal") = forAll(collectionPairsWithModified) { + property("endsWiths must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, collmodif) => ("ends with self" |: s.endsWith(s) == coll.endsWith(s)) && ("ends with tail" |: (s.length == 0 || s.endsWith(s.tail) == coll.endsWith(coll.tail))) && @@ -214,7 +215,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe }).reduceLeft(_ && _) } - property("unions must be equal") = forAll(collectionPairsWithModified) { case (s, coll, collmodif) => + property("unions must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, collmodif) => ("modified" |: s.union(collmodif.seq) == coll.union(collmodif)) && ("empty" |: s.union(Nil) == coll.union(fromSeq(Nil))) } @@ -233,7 +234,7 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe ("with one" |: (s.length == 0 || s.patch(from, List(s(0)), 1) == coll.patch(from, fromSeq(List(coll(0))), 1))) } - if (!isCheckingViews) property("updates must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) => + if (!isCheckingViews) property("updates must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => val pos = if (len >= s.length) s.length - 1 else len if (s.length > 0) { val supd = s.updated(pos, s(0)) @@ -248,15 +249,15 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe } else "trivially" |: true } - property("prepends must be equal") = forAll(collectionPairs) { case (s, coll) => + property("prepends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => s.length == 0 || s(0) +: s == coll(0) +: coll } - property("appends must be equal") = forAll(collectionPairs) { case (s, coll) => + property("appends must be equal") = forAllNoShrink(collectionPairs) { case (s, coll) => s.length == 0 || s :+ s(0) == coll :+ coll(0) } - property("padTos must be equal") = forAll(collectionPairsWithLengths) { case (s, coll, len) => + property("padTos must be equal") = forAllNoShrink(collectionPairsWithLengths) { case (s, coll, len) => val someValue = sampleValue val sdoub = s.padTo(len * 2, someValue) val cdoub = coll.padTo(len * 2, someValue) @@ -267,10 +268,10 @@ abstract class ParallelSeqCheck[T](collName: String) extends ParallelIterableChe println(cdoub) } ("smaller" |: s.padTo(len / 2, someValue) == coll.padTo(len / 2, someValue)) && - ("bigger" |: sdoub == cdoub) + ("bigger" |: sdoub == cdoub) } - property("corresponds must be equal") = forAll(collectionPairsWithModified) { case (s, coll, modified) => + property("corresponds must be equal") = forAllNoShrink(collectionPairsWithModified) { case (s, coll, modified) => val modifcut = modified.toSeq.slice(0, modified.length) ("self" |: s.corresponds(s)(_ == _) == coll.corresponds(coll)(_ == _)) && ("modified" |: s.corresponds(modified.seq)(_ == _) == coll.corresponds(modified)(_ == _)) && diff --git a/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala b/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala index 56f7832fed0e..c22dddf96d80 100644 --- a/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelSetCheck.scala @@ -17,7 +17,7 @@ import scala.collection.parallel._ abstract class ParallelSetCheck[T](collname: String) extends ParallelIterableCheck[T](collname) { type CollType <: ParSet[T] - property("gets iterated keys") = forAll(collectionPairs) { + property("gets iterated keys") = forAllNoShrink(collectionPairs) { case (t, coll) => val containsT = for (elem <- t) yield (coll.contains(elem)) val containsSelf = for (elem <- coll) yield (coll.contains(elem)) From add83b8a8fea641c3ba96ed963c25c84ffe137fb Mon Sep 17 00:00:00 2001 From: Dima Tkach Date: Tue, 28 Jun 2016 07:26:45 -0400 Subject: [PATCH 0141/2793] Override `.slice` in ArrayOps to use arraycopy. This makes it ~10x faster when copying large chunks arround. My benchmark: def bm(duration: Long)(f: => Unit): Int = { val end = System.currentTimeMillis + duration var count = 0 while(System.currentTimeMillis < end) { f count += 1 } count } def measure(seconds: Int)(f: => Unit) = (1 to seconds).map { _ => bm(1000)(f) }.sum / seconds val array = scala.util.Random.alphanumeric.take(1000).toArray measure(20) { array.slice(100, 500) } // ~5 million measure(20) { scala.collection.WrappedArray(array).slice(100, 500) } // ~300K --- src/library/scala/collection/mutable/ArrayOps.scala | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index 507585b9cf7b..b384decbfb11 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -41,6 +41,17 @@ sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomPara if (l > 0) Array.copy(repr, 0, xs, start, l) } + override def slice(from: Int, until: Int): Array[T] = { + val lo = math.max(from, 0) + val hi = math.min(math.max(until, 0), repr.length) + val size = math.max(hi-lo, 0) + val result = java.lang.reflect.Array.newInstance(elementClass, size) + if(size > 0) { + Array.copy(repr, lo, result, 0, size) + } + result.asInstanceOf[Array[T]] + } + override def toArray[U >: T : ClassTag]: Array[U] = { val thatElementClass = implicitly[ClassTag[U]].runtimeClass if (elementClass eq thatElementClass) From 367bab451415d8d4c78253e1aa60cb5301ac0a93 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Jun 2016 14:24:38 +1000 Subject: [PATCH 0142/2793] Remove nonsensical body for trait getter This corrects an error in the change to the trait encoding in #5003: getters in traits should have empty bodies and be emitted as abstract. ``` % ~/scala/2.12.0-M4/bin/scalac sandbox/test.scala && javap -c T Compiled from "test.scala" public interface T { public abstract void T$_setter_$x_$eq(int); public int x(); Code: 0: aload_0 1: invokeinterface #15, 1 // InterfaceMethod x:()I 6: ireturn public int y(); Code: 0: aload_0 1: invokeinterface #20, 1 // InterfaceMethod y:()I 6: ireturn public void y_$eq(int); Code: 0: aload_0 1: iload_1 2: invokeinterface #24, 2 // InterfaceMethod y_$eq:(I)V 7: return public void $init$(); Code: 0: aload_0 1: bipush 42 3: invokeinterface #29, 2 // InterfaceMethod T$_setter_$x_$eq:(I)V 8: aload_0 9: bipush 24 11: invokeinterface #24, 2 // InterfaceMethod y_$eq:(I)V 16: return } % qscalac sandbox/test.scala && javap -c T Compiled from "test.scala" public interface T { public abstract void T$_setter_$x_$eq(int); public abstract int x(); public abstract int y(); public abstract void y_$eq(int); public static void $init$(T); Code: 0: aload_0 1: bipush 42 3: invokeinterface #21, 2 // InterfaceMethod T$_setter_$x_$eq:(I)V 8: aload_0 9: bipush 24 11: invokeinterface #23, 2 // InterfaceMethod y_$eq:(I)V 16: return public void $init$(); Code: 0: aload_0 1: invokestatic #27 // Method $init$:(LT;)V 4: return } ``` --- src/compiler/scala/tools/nsc/transform/Mixin.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 19ba9345faea..6df0b992ed49 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -1001,8 +1001,13 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos) // mark fields which can be nulled afterward lazyValNullables = nullableFields(templ) withDefaultValue Set() + val bodyEmptyAccessors = if (!sym.enclClass.isTrait) body else body mapConserve { + case dd: DefDef if dd.symbol.isAccessor && !dd.symbol.isLazy => + deriveDefDef(dd)(_ => EmptyTree) + case tree => tree + } // add all new definitions to current class or interface - treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, body)) + treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, bodyEmptyAccessors)) case Select(qual, name) if sym.owner.isTrait && !sym.isMethod => // refer to fields in some trait an abstract getter in the interface. From 9f54af3270396724928b335ba8d14282973898fd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Jun 2016 14:24:38 +1000 Subject: [PATCH 0143/2793] Better diagnostic for optimizer crashes --- .../nsc/backend/jvm/analysis/BackendUtils.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 9abd1d80067d..513c71fe2eeb 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -6,11 +6,12 @@ import scala.annotation.switch import scala.tools.asm.{Handle, Type} import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ -import scala.tools.asm.tree.analysis.{Frame, BasicInterpreter, Analyzer, Value} +import scala.tools.asm.tree.analysis._ import GenBCode._ import scala.tools.nsc.backend.jvm.BTypes._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ import java.lang.invoke.LambdaMetafactory + import scala.collection.mutable import scala.collection.JavaConverters._ @@ -32,8 +33,13 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { */ class AsmAnalyzer[V <: Value](methodNode: MethodNode, classInternalName: InternalName, val analyzer: Analyzer[V] = new Analyzer(new BasicInterpreter)) { computeMaxLocalsMaxStack(methodNode) - analyzer.analyze(classInternalName, methodNode) - def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) + try { + analyzer.analyze(classInternalName, methodNode) + } catch { + case ae: AnalyzerException => + throw new AnalyzerException(null, "While processing " + classInternalName + "." + methodNode.name, ae) + } + def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) } /** From a9bebf80aae7ab2e30d84c7cd02abef2349d59d5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Jun 2016 14:24:38 +1000 Subject: [PATCH 0144/2793] Remove stray .class file from version control --- test/files/run/t8601e/StaticInit.class | Bin 417 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 test/files/run/t8601e/StaticInit.class diff --git a/test/files/run/t8601e/StaticInit.class b/test/files/run/t8601e/StaticInit.class deleted file mode 100644 index 99a0e2a643d76e1528a256cbcaad359d2bfbc85d..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 417 zcmY*VO-sW-5Pj36O`4`nY_)2&f;a8Kg6E$KgFLRf&~S= z`=i83C>HlHJM;FvH#7VF^Zp57A6pho)J!Z}SV7&uss$ajA`GkE6(P@NJY}6S(&&Ej$SBf_@$h zMX3Hko|?#bKrrNecQjT{*&a98=6)uB2C*Sqa+c$qC*fG;s>wGGg?wuUddY|tj)e^5 zW#Rl$4_TU?dNub3?HQ#H=+`skzueX<3aK`SR9k_@QDdzRm)pBpMJ`(|Wu)pa(sfEe From d3c0885a1a88e9d52902a527650319010d7ced67 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Jun 2016 14:24:38 +1000 Subject: [PATCH 0145/2793] Don't minimize parents of java defined syms. --- .../scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index d83b4a1d856e..21ea351a998f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -234,12 +234,13 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val allParents = classParents ++ classSym.annotations.flatMap(newParentForAnnotation) + val minimizedParents = if (classSym.isJavaDefined) allParents else erasure.minimizeParents(allParents) // We keep the superClass when computing minimizeParents to eliminate more interfaces. // Example: T can be eliminated from D // trait T // class C extends T // class D extends C with T - val interfaces = erasure.minimizeParents(allParents) match { + val interfaces = minimizedParents match { case superClass :: ifs if !isInterfaceOrTrait(superClass.typeSymbol) => ifs case ifs => From 91b066aac5edf53ca18603f8486eb255514b3118 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Jun 2016 14:24:38 +1000 Subject: [PATCH 0146/2793] Revert pruning of redundant Java parents This partially reverts the fix for SI-5278 made in 7a99c03da. The original motivation for this case to avoid bytecode that stretched platform limitations in Android. For super calls to Scala defined trait methods, we won't use `invokespecial`, but rather use `invokestatic` to a static trait implementation method. As such, we can continue to prune redundant Scala interfaces. It might be worth considering removing the pruning of redundant parents altoghether, though: - We no longer include `ScalaObject` as a parent of every class, which was mentioned as a problem in SI-5728. - Scala 2.12 has left Android behind for the time being due to use of Java 8 facilities. - javac doesn't do this, so why should we? --- .../scala/tools/nsc/transform/Erasure.scala | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 5e903946c1c4..db8e203c1cdf 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -189,18 +189,23 @@ abstract class Erasure extends AddInterfaces /* Drop redundant types (ones which are implemented by some other parent) from the immediate parents. * This is important on Android because there is otherwise an interface explosion. + * This is now restricted to Scala defined ancestors: a Java defined ancestor may need to be listed + * as an immediate parent to support an `invokespecial`. */ def minimizeParents(parents: List[Type]): List[Type] = if (parents.isEmpty) parents else { - def isInterfaceOrTrait(sym: Symbol) = sym.isInterface || sym.isTrait + def isRedundantParent(sym: Symbol) = sym.isInterface || sym.isTrait var rest = parents.tail var leaves = collection.mutable.ListBuffer.empty[Type] += parents.head while(rest.nonEmpty) { val candidate = rest.head - val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol } - if(!nonLeaf) { - leaves = leaves filterNot { t => isInterfaceOrTrait(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) } - leaves += candidate + if (candidate.typeSymbol.isJavaDefined && candidate.typeSymbol.isInterface) leaves += candidate + else { + val nonLeaf = leaves exists { t => t.typeSymbol isSubClass candidate.typeSymbol } + if (!nonLeaf) { + leaves = leaves filterNot { t => isRedundantParent(t.typeSymbol) && (candidate.typeSymbol isSubClass t.typeSymbol) } + leaves += candidate + } } rest = rest.tail } From 7d51b3fd1569917cb804363bd418466a306f5c89 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Jun 2016 14:24:38 +1000 Subject: [PATCH 0147/2793] Emit trait method bodies in statics And use this as the target of the default methods or statically resolved super or $init calls. The call-site change is predicated on `-Yuse-trait-statics` as a stepping stone for experimentation / bootstrapping. I have performed this transformation in the backend, rather than trying to reflect this in the view from Scala symbols + ASTs. We also need to add an restriction related to invokespecial to Java parents: to support a super call to one of these to implement a super accessor, the interface must be listed as a direct parent of the class. The static method names has a trailing $ added to avoid duplicate name and signature errors in classfiles. --- .../scala/tools/nsc/ast/TreeGen.scala | 3 +- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 49 +++++++----- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 71 +++------------- .../nsc/backend/jvm/BCodeIdiomatic.scala | 22 +++-- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 17 +++- .../scala/tools/nsc/backend/jvm/BTypes.scala | 22 +---- .../nsc/backend/jvm/BTypesFromSymbols.scala | 80 ++++++++++++++++++- .../backend/jvm/analysis/BackendUtils.scala | 14 ++-- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 9 +++ .../tools/nsc/backend/jvm/opt/CallGraph.scala | 7 +- .../backend/jvm/opt/InlineInfoAttribute.scala | 13 --- .../tools/nsc/backend/jvm/opt/Inliner.scala | 28 ++++--- .../backend/jvm/opt/InlinerHeuristics.scala | 28 ++++++- .../tools/nsc/transform/Delambdafy.scala | 6 +- .../scala/tools/nsc/transform/Mixin.scala | 28 ++++++- .../instrumented/InstrumentationTest.check | 2 +- test/files/neg/trait-defaults-super.check | 4 + test/files/neg/trait-defaults-super.scala | 21 +++++ test/files/pos/trait-defaults-super.scala | 21 +++++ test/files/run/t4891.check | 3 +- test/files/run/t5652.check | 3 +- test/files/run/t7700.check | 5 +- test/files/run/t7700.scala | 16 ++-- test/files/run/t7932.check | 4 + test/files/run/t7932.scala | 10 ++- test/files/run/trait-static-clash.scala | 10 +++ .../scala/lang/traits/BytecodeTest.scala | 9 ++- .../nsc/backend/jvm/DefaultMethodTest.scala | 5 +- .../nsc/backend/jvm/DirectCompileTest.scala | 4 +- .../opt/InlinerSeparateCompilationTest.scala | 4 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 35 +++++--- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 23 +++++- .../scala/tools/testing/BytecodeTesting.scala | 15 +++- 33 files changed, 385 insertions(+), 206 deletions(-) create mode 100644 test/files/neg/trait-defaults-super.check create mode 100644 test/files/neg/trait-defaults-super.scala create mode 100644 test/files/pos/trait-defaults-super.scala create mode 100644 test/files/run/trait-static-clash.scala diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 14ee7d7a7897..bc89609a596c 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -336,12 +336,13 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { * - are associating the RHS with a cloned symbol, but intend for the original * method to remain and for recursive calls to target it. */ - final def mkStatic(orig: DefDef, maybeClone: Symbol => Symbol): DefDef = { + final def mkStatic(orig: DefDef, newName: Name, maybeClone: Symbol => Symbol): DefDef = { assert(phase.erasedTypes, phase) assert(!orig.symbol.hasFlag(SYNCHRONIZED), orig.symbol.defString) val origSym = orig.symbol val origParams = orig.symbol.info.params val newSym = maybeClone(orig.symbol) + newSym.setName(newName) newSym.setFlag(STATIC) // Add an explicit self parameter val selfParamSym = newSym.newSyntheticValueParam(newSym.owner.typeConstructor, nme.SELF).setFlag(ARTIFACT) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index d7106ae908ff..55fe47bde61f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -11,10 +11,10 @@ package jvm import scala.annotation.switch import scala.reflect.internal.Flags - import scala.tools.asm import GenBCode._ import BackendReporting._ +import scala.tools.asm.Opcodes import scala.tools.asm.tree.MethodInsnNode import scala.tools.nsc.backend.jvm.BCodeHelpers.{InvokeStyle, TestOp} @@ -637,7 +637,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val nativeKind = tpeTK(expr) genLoad(expr, nativeKind) val MethodNameAndType(mname, methodType) = srBoxesRuntimeBoxToMethods(nativeKind) - bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, app.pos) + bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, itf = false, app.pos) generatedType = boxResultType(fun.symbol) case Apply(fun, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) => @@ -645,7 +645,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val boxType = unboxResultType(fun.symbol) generatedType = boxType val MethodNameAndType(mname, methodType) = srBoxesRuntimeUnboxToMethods(boxType) - bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, app.pos) + bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, itf = false, app.pos) case app @ Apply(fun, args) => val sym = fun.symbol @@ -1058,31 +1058,40 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } receiverClass.info // ensure types the type is up to date; erasure may add lateINTERFACE to traits - val receiverName = internalName(receiverClass) - - // super calls are only allowed to direct parents - if (style.isSuper && receiverClass.isTraitOrInterface && !cnode.interfaces.contains(receiverName)) { - thisBType.info.get.inlineInfo.lateInterfaces += receiverName - cnode.interfaces.add(receiverName) - } + val receiverBType = classBTypeFromSymbol(receiverClass) + val receiverName = receiverBType.internalName def needsInterfaceCall(sym: Symbol) = { sym.isTraitOrInterface || sym.isJavaDefined && sym.isNonBottomSubClass(definitions.ClassfileAnnotationClass) } - val jname = method.javaSimpleName.toString - val bmType = methodBTypeFromSymbol(method) - val mdescr = bmType.descriptor + val jname = method.javaSimpleName.toString + val bmType = methodBTypeFromSymbol(method) + val mdescr = bmType.descriptor + val isInterface = receiverBType.isInterface.get import InvokeStyle._ - style match { - case Static => bc.invokestatic (receiverName, jname, mdescr, pos) - case Special => bc.invokespecial (receiverName, jname, mdescr, pos) - case Virtual => - if (needsInterfaceCall(receiverClass)) bc.invokeinterface(receiverName, jname, mdescr, pos) - else bc.invokevirtual (receiverName, jname, mdescr, pos) - case Super => bc.invokespecial (receiverName, jname, mdescr, pos) + if (style == Super) { + assert(receiverClass == methodOwner, s"for super call, expecting $receiverClass == $methodOwner") + if (receiverClass.isTrait && !receiverClass.isJavaDefined) { + val staticDesc = MethodBType(typeToBType(method.owner.info) :: bmType.argumentTypes, bmType.returnType).descriptor + val staticName = traitImplMethodName(method).toString + bc.invokestatic(receiverName, staticName, staticDesc, isInterface, pos) + } else { + if (receiverClass.isTraitOrInterface) { + // An earlier check in Mixin reports an error in this case, so it doesn't reach the backend + assert(cnode.interfaces.contains(receiverName), s"cannot invokespecial $receiverName.$jname, the interface is not a direct parent.") + } + bc.invokespecial(receiverName, jname, mdescr, isInterface, pos) + } + } else { + val opc = style match { + case Static => Opcodes.INVOKESTATIC + case Special => Opcodes.INVOKESPECIAL + case Virtual => if (isInterface) Opcodes.INVOKEINTERFACE else Opcodes.INVOKEVIRTUAL + } + bc.emitInvoke(opc, receiverName, jname, mdescr, isInterface, pos) } bmType.returnType diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 5a5747c81f65..df3c2cb3d5ef 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -11,6 +11,7 @@ import scala.tools.asm import scala.tools.nsc.io.AbstractFile import GenBCode._ import BackendReporting._ +import scala.reflect.internal.Flags /* * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. @@ -49,6 +50,14 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { } } + def needsStaticImplMethod(sym: Symbol) = sym.hasAttachment[global.mixer.NeedStaticImpl.type] + + final def traitImplMethodName(sym: Symbol): Name = { + val name = sym.javaSimpleName + if (sym.isMixinConstructor) name + else name.append(nme.NAME_JOIN_STRING) + } + /** * True if `classSym` is an anonymous class or a local class. I.e., false if `classSym` is a * member class. This method is used to decide if we should emit an EnclosingMethod attribute. @@ -230,58 +239,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { sym.isErroneous } - /** - * Build the [[InlineInfo]] for a class symbol. - */ - def buildInlineInfoFromClassSymbol(classSym: Symbol, classSymToInternalName: Symbol => InternalName, methodSymToDescriptor: Symbol => String): InlineInfo = { - val isEffectivelyFinal = classSym.isEffectivelyFinal - - val sam = { - if (classSym.isEffectivelyFinal) None - else { - // Phase travel necessary. For example, nullary methods (getter of an abstract val) get an - // empty parameter list in later phases and would therefore be picked as SAM. - val samSym = exitingPickler(definitions.samOf(classSym.tpe)) - if (samSym == NoSymbol) None - else Some(samSym.javaSimpleName.toString + methodSymToDescriptor(samSym)) - } - } - - var warning = Option.empty[ClassSymbolInfoFailureSI9111] - - // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some - // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]]. - val methodInlineInfos = classSym.info.decls.iterator.filter(m => m.isMethod && !scalaPrimitives.isPrimitive(m)).flatMap({ - case methodSym => - if (completeSilentlyAndCheckErroneous(methodSym)) { - // Happens due to SI-9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler. - if (!classSym.isJavaDefined) devWarning("SI-9111 should only be possible for Java classes") - warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName)) - None - } else { - val name = methodSym.javaSimpleName.toString // same as in genDefDef - val signature = name + methodSymToDescriptor(methodSym) - - // In `trait T { object O }`, `oSym.isEffectivelyFinalOrNotOverridden` is true, but the - // method is abstract in bytecode, `defDef.rhs.isEmpty`. Abstract methods are excluded - // so they are not marked final in the InlineInfo attribute. - // - // However, due to https://github.com/scala/scala-dev/issues/126, this currently does not - // work, the abstract accessor for O will be marked effectivelyFinal. - val effectivelyFinal = methodSym.isEffectivelyFinalOrNotOverridden && !methodSym.isDeferred - - val info = MethodInlineInfo( - effectivelyFinal = effectivelyFinal, - annotatedInline = methodSym.hasAnnotation(ScalaInlineClass), - annotatedNoInline = methodSym.hasAnnotation(ScalaNoInlineClass) - ) - Some((signature, info)) - } - }).toMap - - InlineInfo(isEffectivelyFinal, sam, methodInlineInfos, warning) - } - /* * must-single-thread */ @@ -568,15 +525,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { /** * The class internal name for a given class symbol. */ - final def internalName(sym: Symbol): String = { - // For each java class, the scala compiler creates a class and a module (thus a module class). - // If the `sym` is a java module class, we use the java class instead. This ensures that the - // ClassBType is created from the main class (instead of the module class). - // The two symbols have the same name, so the resulting internalName is the same. - // Phase travel (exitingPickler) required for SI-6613 - linkedCoC is only reliable in early phases (nesting) - val classSym = if (sym.isJavaDefined && sym.isModuleClass) exitingPickler(sym.linkedClassOfClass) else sym - classBTypeFromSymbol(classSym).internalName - } + final def internalName(sym: Symbol): String = classBTypeFromSymbol(sym).internalName } // end of trait BCInnerClassGen trait BCAnnotGen extends BCInnerClassGen { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index ed1b4ec3255e..e3d45a9b3e90 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -190,6 +190,7 @@ abstract class BCodeIdiomatic extends SubComponent { JavaStringBuilderClassName, INSTANCE_CONSTRUCTOR_NAME, "()V", + itf = false, pos ) } @@ -373,30 +374,27 @@ abstract class BCodeIdiomatic extends SubComponent { final def rem(tk: BType) { emitPrimitive(JCodeMethodN.remOpcodes, tk) } // can-multi-thread // can-multi-thread - final def invokespecial(owner: String, name: String, desc: String, pos: Position) { - addInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, false, pos) + final def invokespecial(owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = { + emitInvoke(Opcodes.INVOKESPECIAL, owner, name, desc, itf, pos) } // can-multi-thread - final def invokestatic(owner: String, name: String, desc: String, pos: Position) { - addInvoke(Opcodes.INVOKESTATIC, owner, name, desc, false, pos) + final def invokestatic(owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = { + emitInvoke(Opcodes.INVOKESTATIC, owner, name, desc, itf, pos) } // can-multi-thread - final def invokeinterface(owner: String, name: String, desc: String, pos: Position) { - addInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, true, pos) + final def invokeinterface(owner: String, name: String, desc: String, pos: Position): Unit = { + emitInvoke(Opcodes.INVOKEINTERFACE, owner, name, desc, itf = true, pos) } // can-multi-thread - final def invokevirtual(owner: String, name: String, desc: String, pos: Position) { - addInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, false, pos) + final def invokevirtual(owner: String, name: String, desc: String, pos: Position): Unit = { + emitInvoke(Opcodes.INVOKEVIRTUAL, owner, name, desc, itf = false, pos) } - private def addInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean, pos: Position) = { + def emitInvoke(opcode: Int, owner: String, name: String, desc: String, itf: Boolean, pos: Position): Unit = { val node = new MethodInsnNode(opcode, owner, name, desc, itf) jmethod.instructions.add(node) if (settings.optInlinerEnabled) callsitePositions(node) = pos } - final def invokedynamic(owner: String, name: String, desc: String) { - jmethod.visitMethodInsn(Opcodes.INVOKEDYNAMIC, owner, name, desc) - } // can-multi-thread final def goTo(label: asm.Label) { jmethod.visitJumpInsn(Opcodes.GOTO, label) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index bddc41e5c6ac..1bff8519eca3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -488,7 +488,22 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { case ValDef(mods, name, tpt, rhs) => () // fields are added in `genPlainClass()`, via `addClassFields()` - case dd : DefDef => genDefDef(dd) + case dd : DefDef => + val sym = dd.symbol + if (needsStaticImplMethod(sym)) { + val staticDefDef = global.gen.mkStatic(dd, traitImplMethodName(sym), _.cloneSymbol) + val forwarderDefDef = { + val forwarderBody = Apply(global.gen.mkAttributedRef(staticDefDef.symbol), This(sym.owner).setType(sym.owner.typeConstructor) :: dd.vparamss.head.map(p => global.gen.mkAttributedIdent(p.symbol))).setType(sym.info.resultType) + // we don't want to the optimizer to inline the static method into the forwarder. Instead, + // the backend has a special case to transitively inline into a callsite of the forwarder + // when the forwarder itself is inlined. + forwarderBody.updateAttachment(NoInlineCallsiteAttachment) + deriveDefDef(dd)(_ => global.atPos(dd.pos)(forwarderBody)) + } + genDefDef(staticDefDef) + if (!sym.isMixinConstructor) + genDefDef(forwarderDefDef) + } else genDefDef(dd) case Template(_, _, body) => body foreach gen diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index a708feb0a7b8..7b2686e7a9be 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -225,8 +225,7 @@ abstract class BTypes { val inlineInfo = inlineInfoFromClassfile(classNode) - val classfileInterfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut) - val interfaces = classfileInterfaces.filterNot(i => inlineInfo.lateInterfaces.contains(i.internalName)) + val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut) classBType.info = Right(ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo, inlineInfo)) classBType @@ -1147,25 +1146,6 @@ object BTypes { sam: Option[String], methodInfos: Map[String, MethodInlineInfo], warning: Option[ClassInlineInfoWarning]) { - /** - * A super call (invokespecial) to a default method T.m is only allowed if the interface T is - * a direct parent of the class. Super calls are introduced for example in Mixin when generating - * forwarder methods: - * - * trait T { override def clone(): Object = "hi" } - * trait U extends T - * class C extends U - * - * The class C gets a forwarder that invokes T.clone(). During code generation the interface T - * is added as direct parent to class C. Note that T is not a (direct) parent in the frontend - * type of class C. - * - * All interfaces that are added to a class during code generation are added to this buffer and - * stored in the InlineInfo classfile attribute. This ensures that the ClassBTypes for a - * specific class is the same no matter if it's constructed from a Symbol or from a classfile. - * This is tested in BTypesFromClassfileTest. - */ - val lateInterfaces: ListBuffer[InternalName] = ListBuffer.empty } val EmptyInlineInfo = InlineInfo(false, None, Map.empty, None) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 21ea351a998f..1a4590e7d175 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -97,11 +97,19 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { * for the Nothing / Null. If used for example as a parameter type, we use the runtime classes * in the classfile method signature. */ - final def classBTypeFromSymbol(classSym: Symbol): ClassBType = { + final def classBTypeFromSymbol(sym: Symbol): ClassBType = { + // For each java class, the scala compiler creates a class and a module (thus a module class). + // If the `sym` is a java module class, we use the java class instead. This ensures that the + // ClassBType is created from the main class (instead of the module class). + // The two symbols have the same name, so the resulting internalName is the same. + // Phase travel (exitingPickler) required for SI-6613 - linkedCoC is only reliable in early phases (nesting) + val classSym = if (sym.isJavaDefined && sym.isModuleClass) exitingPickler(sym.linkedClassOfClass) else sym + assert(classSym != NoSymbol, "Cannot create ClassBType from NoSymbol") assert(classSym.isClass, s"Cannot create ClassBType from non-class symbol $classSym") assertClassNotArrayNotPrimitive(classSym) assert(!primitiveTypeToBType.contains(classSym) || isCompilingPrimitive, s"Cannot create ClassBType for primitive class symbol $classSym") + if (classSym == NothingClass) srNothingRef else if (classSym == NullClass) srNullRef else { @@ -509,7 +517,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { * classfile attribute. */ private def buildInlineInfo(classSym: Symbol, internalName: InternalName): InlineInfo = { - def buildFromSymbol = buildInlineInfoFromClassSymbol(classSym, classBTypeFromSymbol(_).internalName, methodBTypeFromSymbol(_).descriptor) + def buildFromSymbol = buildInlineInfoFromClassSymbol(classSym) // phase travel required, see implementation of `compiles`. for nested classes, it checks if the // enclosingTopLevelClass is being compiled. after flatten, all classes are considered top-level, @@ -530,6 +538,74 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { } } + /** + * Build the [[InlineInfo]] for a class symbol. + */ + def buildInlineInfoFromClassSymbol(classSym: Symbol): InlineInfo = { + val isEffectivelyFinal = classSym.isEffectivelyFinal + + val sam = { + if (classSym.isEffectivelyFinal) None + else { + // Phase travel necessary. For example, nullary methods (getter of an abstract val) get an + // empty parameter list in later phases and would therefore be picked as SAM. + val samSym = exitingPickler(definitions.samOf(classSym.tpe)) + if (samSym == NoSymbol) None + else Some(samSym.javaSimpleName.toString + methodBTypeFromSymbol(samSym).descriptor) + } + } + + var warning = Option.empty[ClassSymbolInfoFailureSI9111] + + // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some + // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]]. + val methodInlineInfos = classSym.info.decls.iterator.filter(m => m.isMethod && !scalaPrimitives.isPrimitive(m)).flatMap({ + case methodSym => + if (completeSilentlyAndCheckErroneous(methodSym)) { + // Happens due to SI-9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler. + if (!classSym.isJavaDefined) devWarning("SI-9111 should only be possible for Java classes") + warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName)) + Nil + } else { + val name = methodSym.javaSimpleName.toString // same as in genDefDef + val signature = name + methodBTypeFromSymbol(methodSym).descriptor + + // In `trait T { object O }`, `oSym.isEffectivelyFinalOrNotOverridden` is true, but the + // method is abstract in bytecode, `defDef.rhs.isEmpty`. Abstract methods are excluded + // so they are not marked final in the InlineInfo attribute. + // + // However, due to https://github.com/scala/scala-dev/issues/126, this currently does not + // work, the abstract accessor for O will be marked effectivelyFinal. + val effectivelyFinal = methodSym.isEffectivelyFinalOrNotOverridden && !methodSym.isDeferred + + val info = MethodInlineInfo( + effectivelyFinal = effectivelyFinal, + annotatedInline = methodSym.hasAnnotation(ScalaInlineClass), + annotatedNoInline = methodSym.hasAnnotation(ScalaNoInlineClass)) + + if (needsStaticImplMethod(methodSym)) { + val staticName = traitImplMethodName(methodSym).toString + val selfParam = methodSym.newSyntheticValueParam(methodSym.owner.typeConstructor, nme.SELF) + val staticMethodType = methodSym.info match { + case mt @ MethodType(params, res) => copyMethodType(mt, selfParam :: params, res) + } + val staticMethodSignature = staticName + methodBTypeFromMethodType(staticMethodType, isConstructor = false) + val staticMethodInfo = MethodInlineInfo( + effectivelyFinal = true, + annotatedInline = info.annotatedInline, + annotatedNoInline = info.annotatedNoInline) + if (methodSym.isMixinConstructor) + List((staticMethodSignature, staticMethodInfo)) + else + List((signature, info), (staticMethodSignature, staticMethodInfo)) + } else + List((signature, info)) + } + }).toMap + + InlineInfo(isEffectivelyFinal, sam, methodInlineInfos, warning) + } + /** * For top-level objects without a companion class, the compiler generates a mirror class with * static forwarders (Java compat). There's no symbol for the mirror class, but we still need a diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 513c71fe2eeb..539435a32604 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -2,18 +2,18 @@ package scala.tools.nsc package backend.jvm package analysis +import java.lang.invoke.LambdaMetafactory + import scala.annotation.switch -import scala.tools.asm.{Handle, Type} +import scala.collection.JavaConverters._ +import scala.collection.mutable import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ import scala.tools.asm.tree.analysis._ -import GenBCode._ +import scala.tools.asm.{Handle, Type} import scala.tools.nsc.backend.jvm.BTypes._ +import scala.tools.nsc.backend.jvm.GenBCode._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ -import java.lang.invoke.LambdaMetafactory - -import scala.collection.mutable -import scala.collection.JavaConverters._ /** * This component hosts tools and utilities used in the backend that require access to a `BTypes` @@ -39,7 +39,7 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { case ae: AnalyzerException => throw new AnalyzerException(null, "While processing " + classInternalName + "." + methodNode.name, ae) } - def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) + def frameAt(instruction: AbstractInsnNode): Frame[V] = analyzer.frameAt(instruction, methodNode) } /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 63906d80e50b..e21c46dbe99e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -93,6 +93,15 @@ object BytecodeUtils { op == INVOKESPECIAL || op == INVOKESTATIC } + def isVirtualCall(instruction: AbstractInsnNode): Boolean = { + val op = instruction.getOpcode + op == INVOKEVIRTUAL || op == INVOKEINTERFACE + } + + def isCall(instruction: AbstractInsnNode): Boolean = { + isNonVirtualCall(instruction) || isVirtualCall(instruction) + } + def isExecutable(instruction: AbstractInsnNode): Boolean = instruction.getOpcode >= 0 def isConstructor(methodNode: MethodNode): Boolean = { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index 40344809bff6..d6942d9ff99b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -9,11 +9,11 @@ package opt import scala.collection.immutable.IntMap import scala.reflect.internal.util.{NoPosition, Position} -import scala.tools.asm.{Opcodes, Type, Handle} +import scala.tools.asm.{Handle, Opcodes, Type} import scala.tools.asm.tree._ import scala.collection.{concurrent, mutable} import scala.collection.JavaConverters._ -import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.backend.jvm.BTypes.{InternalName, MethodInlineInfo} import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.backend.jvm.analysis._ import BytecodeUtils._ @@ -67,6 +67,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { } def containsCallsite(callsite: Callsite): Boolean = callsites(callsite.callsiteMethod) contains callsite.callsiteInstruction + def findCallSite(method: MethodNode, call: MethodInsnNode): Option[Callsite] = callsites.getOrElse(method, Map.empty).get(call) def removeClosureInstantiation(indy: InvokeDynamicInsnNode, methodNode: MethodNode): Option[ClosureInstantiation] = { val methodClosureInits = closureInstantiations(methodNode) @@ -356,7 +357,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { "Invocation of" + s" ${callee.map(_.calleeDeclarationClass.internalName).getOrElse("?")}.${callsiteInstruction.name + callsiteInstruction.desc}" + s"@${callsiteMethod.instructions.indexOf(callsiteInstruction)}" + - s" in ${callsiteClass.internalName}.${callsiteMethod.name}" + s" in ${callsiteClass.internalName}.${callsiteMethod.name}${callsiteMethod.desc}" } final case class ClonedCallsite(callsite: Callsite, clonedWhenInlining: Callsite) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala index 79d26b0b4eee..5ce7072c60c0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala @@ -51,7 +51,6 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI if (inlineInfo.isEffectivelyFinal) flags |= 1 // flags |= 2 // no longer written if (inlineInfo.sam.isDefined) flags |= 4 - if (inlineInfo.lateInterfaces.nonEmpty) flags |= 8 result.putByte(flags) for (samNameDesc <- inlineInfo.sam) { @@ -79,9 +78,6 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI result.putByte(inlineInfo) } - result.putShort(inlineInfo.lateInterfaces.length) - for (i <- inlineInfo.lateInterfaces) result.putShort(cw.newUTF8(i)) - result } @@ -105,7 +101,6 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI val isFinal = (flags & 1) != 0 val hasSelf = (flags & 2) != 0 val hasSam = (flags & 4) != 0 - val hasLateInterfaces = (flags & 8) != 0 if (hasSelf) nextUTF8() // no longer used @@ -128,13 +123,7 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI (name + desc, MethodInlineInfo(isFinal, isInline, isNoInline)) }).toMap - val lateInterfaces = if (!hasLateInterfaces) Nil else { - val numLateInterfaces = nextShort() - (0 until numLateInterfaces).map(_ => nextUTF8()) - } - val info = InlineInfo(isFinal, sam, infos, None) - info.lateInterfaces ++= lateInterfaces InlineInfoAttribute(info) } else { val msg = UnknownScalaInlineInfoVersion(cr.getClassName, version) @@ -161,8 +150,6 @@ object InlineInfoAttribute { * [u2] name (reference) * [u2] descriptor (reference) * [u1] isFinal (<< 0), traitMethodWithStaticImplementation (<< 1), hasInlineAnnotation (<< 2), hasNoInlineAnnotation (<< 3) - * [u2]? numLateInterfaces - * [u2] lateInterface (reference) */ final val VERSION: Byte = 1 diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 7b4cfe2a18d2..9c5a1a9f980b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -106,6 +106,8 @@ class Inliner[BT <: BTypes](val btypes: BT) { val elided = mutable.Set.empty[InlineRequest] def nonElidedRequests(methodNode: MethodNode): Set[InlineRequest] = requestsByMethod(methodNode) diff elided + def allCallees(r: InlineRequest): Set[MethodNode] = r.post.flatMap(allCallees).toSet + r.callsite.callee.get.callee + /** * Break cycles in the inline request graph by removing callsites. * @@ -114,20 +116,20 @@ class Inliner[BT <: BTypes](val btypes: BT) { */ def breakInlineCycles: List[InlineRequest] = { // is there a path of inline requests from start to goal? - def isReachable(start: MethodNode, goal: MethodNode): Boolean = { - @tailrec def reachableImpl(check: List[MethodNode], visited: Set[MethodNode]): Boolean = check match { - case x :: xs => + def isReachable(start: Set[MethodNode], goal: MethodNode): Boolean = { + @tailrec def reachableImpl(check: Set[MethodNode], visited: Set[MethodNode]): Boolean = { + if (check.isEmpty) false + else { + val x = check.head if (x == goal) true - else if (visited(x)) reachableImpl(xs, visited) + else if (visited(x)) reachableImpl(check - x, visited) else { - val callees = nonElidedRequests(x).map(_.callsite.callee.get.callee) - reachableImpl(xs ::: callees.toList, visited + x) + val callees = nonElidedRequests(x).flatMap(allCallees) + reachableImpl(check - x ++ callees, visited + x) } - - case Nil => - false + } } - reachableImpl(List(start), Set.empty) + reachableImpl(start, Set.empty) } val result = new mutable.ListBuffer[InlineRequest]() @@ -136,7 +138,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { java.util.Arrays.sort(requests, callsiteOrdering) for (r <- requests) { // is there a chain of inlining requests that would inline the callsite method into the callee? - if (isReachable(r.callsite.callee.get.callee, r.callsite.callsiteMethod)) + if (isReachable(allCallees(r), r.callsite.callsiteMethod)) elided += r else result += r @@ -150,8 +152,8 @@ class Inliner[BT <: BTypes](val btypes: BT) { if (requests.isEmpty) Nil else { val (leaves, others) = requests.partition(r => { - val inlineRequestsForCallee = nonElidedRequests(r.callsite.callee.get.callee) - inlineRequestsForCallee.forall(visited) + val inlineRequestsForCallees = allCallees(r).flatMap(nonElidedRequests) + inlineRequestsForCallees.forall(visited) }) assert(leaves.nonEmpty, requests) leaves ::: leavesFirst(others, visited ++ leaves) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala index 009742501e9e..79e74f3eb76c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala @@ -7,14 +7,14 @@ package scala.tools.nsc package backend.jvm package opt -import scala.tools.asm.tree.MethodNode -import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.collection.JavaConverters._ +import scala.tools.asm.Opcodes +import scala.tools.asm.tree.{MethodInsnNode, MethodNode} +import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.BackendReporting.OptimizerWarning class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { import bTypes._ - import inliner._ import callGraph._ case class InlineRequest(callsite: Callsite, post: List[InlineRequest], reason: String) { @@ -93,7 +93,27 @@ class InlinerHeuristics[BT <: BTypes](val bTypes: BT) { val callee = callsite.callee.get def requestIfCanInline(callsite: Callsite, reason: String): Either[OptimizerWarning, InlineRequest] = inliner.earlyCanInlineCheck(callsite) match { case Some(w) => Left(w) - case None => Right(InlineRequest(callsite, Nil, reason)) + case None => + val callee = callsite.callee.get + val postInlineRequest: List[InlineRequest] = callee.calleeDeclarationClass.isInterface match { + case Right(true) => + // Treat the pair of trait interface method and static method as one for the purposes of inlining: + // if we inline invokeinterface, invoke the invokestatic, too. + val calls = callee.callee.instructions.iterator().asScala.filter(BytecodeUtils.isCall).take(2).toList + calls match { + case List(x: MethodInsnNode) if x.getOpcode == Opcodes.INVOKESTATIC && x.name == (callee.callee.name + "$") => + callGraph.addIfMissing(callee.callee, callee.calleeDeclarationClass) + val maybeNodeToCallsite1 = callGraph.findCallSite(callee.callee, x) + maybeNodeToCallsite1.toList.flatMap(x => requestIfCanInline(x, reason).right.toOption) + case _ => + Nil + + } + case _ => Nil + } + + Right(InlineRequest(callsite, postInlineRequest, reason)) + } compilerSettings.YoptInlineHeuristics.value match { diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 2dd8def53e13..804bcddb7b88 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -261,7 +261,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre def pretransform(tree: Tree): Tree = tree match { case dd: DefDef if dd.symbol.isDelambdafyTarget => if (!dd.symbol.hasFlag(STATIC) && methodReferencesThis(dd.symbol)) { - gen.mkStatic(dd, sym => sym) + gen.mkStatic(dd, dd.symbol.name, sym => sym) } else { dd.symbol.setFlag(STATIC) dd @@ -276,8 +276,10 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre case dd: DefDef if dd.symbol.isLiftedMethod && !dd.symbol.isDelambdafyTarget => // SI-9390 emit lifted methods that don't require a `this` reference as STATIC // delambdafy targets are excluded as they are made static by `transformFunction`. - if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) + if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) { dd.symbol.setFlag(STATIC) + dd.symbol.removeAttachment[mixer.NeedStaticImpl.type] + } super.transform(tree) case Apply(fun, outer :: rest) if shouldElideOuterArg(fun.symbol, outer) => val nullOuter = gen.mkZero(outer.tpe) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 6df0b992ed49..d62b77dac26e 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -130,6 +130,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { */ def addMember(clazz: Symbol, member: Symbol): Symbol = { debuglog(s"mixing into $clazz: ${member.defString}") + // This attachment is used to instruct the backend about which methids in traits require + // a static trait impl method. We remove this from the new symbol created for the method + // mixed into the subclass. + member.removeAttachment[NeedStaticImpl.type] clazz.info.decls enter member setFlag MIXEDIN resetFlag JAVA_DEFAULTMETHOD } def cloneAndAddMember(mixinClass: Symbol, mixinMember: Symbol, clazz: Symbol): Symbol = @@ -344,6 +348,10 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { reporter.error(clazz.pos, "Member %s of mixin %s is missing a concrete super implementation.".format( mixinMember.alias, mixinClass)) case alias1 => + if (alias1.owner.isJavaDefined && alias1.owner.isInterface && !clazz.parentSymbols.contains(alias1.owner)) { + val suggestedParent = exitingTyper(clazz.info.baseType(alias1.owner)) + reporter.error(clazz.pos, s"Unable to implement a super accessor required by trait ${mixinClass.name} unless $suggestedParent is directly extended by $clazz.") + } superAccessor.asInstanceOf[TermSymbol] setAlias alias1 } } @@ -1001,13 +1009,20 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos) // mark fields which can be nulled afterward lazyValNullables = nullableFields(templ) withDefaultValue Set() + // Remove bodies of accessors in traits - TODO: after PR #5141 (fields refactoring), this might be a no-op val bodyEmptyAccessors = if (!sym.enclClass.isTrait) body else body mapConserve { case dd: DefDef if dd.symbol.isAccessor && !dd.symbol.isLazy => deriveDefDef(dd)(_ => EmptyTree) case tree => tree } // add all new definitions to current class or interface - treeCopy.Template(tree, parents1, self, addNewDefs(currentOwner, bodyEmptyAccessors)) + val body1 = addNewDefs(currentOwner, bodyEmptyAccessors) + body1 foreach { + case dd: DefDef if isTraitMethodRequiringStaticImpl(dd) => + dd.symbol.updateAttachment(NeedStaticImpl) + case _ => + } + treeCopy.Template(tree, parents1, self, body1) case Select(qual, name) if sym.owner.isTrait && !sym.isMethod => // refer to fields in some trait an abstract getter in the interface. @@ -1023,7 +1038,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { typedPos(tree.pos)((qual DOT setter)(rhs)) - case _ => tree } @@ -1042,4 +1056,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { finally localTyper = saved } } + + private def isTraitMethodRequiringStaticImpl(dd: DefDef): Boolean = { + val sym = dd.symbol + dd.rhs.nonEmpty && + sym.owner.isTrait && + !sym.isPrivate && // no need to put implementations of private methods into a static method + !sym.hasFlag(Flags.STATIC) + } + + case object NeedStaticImpl extends PlainAttachment } diff --git a/test/files/instrumented/InstrumentationTest.check b/test/files/instrumented/InstrumentationTest.check index 74f9c9d26885..d317fc42077d 100644 --- a/test/files/instrumented/InstrumentationTest.check +++ b/test/files/instrumented/InstrumentationTest.check @@ -6,5 +6,5 @@ Method call statistics: 1 instrumented/Foo2.someMethod()I 1 scala/DeprecatedConsole.()V 1 scala/Predef$.println(Ljava/lang/Object;)V - 1 scala/io/AnsiColor.$init$()V + 1 scala/io/AnsiColor.$init$(Lscala/io/AnsiColor;)V 1 scala/runtime/BoxesRunTime.boxToBoolean(Z)Ljava/lang/Boolean; diff --git a/test/files/neg/trait-defaults-super.check b/test/files/neg/trait-defaults-super.check new file mode 100644 index 000000000000..2b19402828b5 --- /dev/null +++ b/test/files/neg/trait-defaults-super.check @@ -0,0 +1,4 @@ +trait-defaults-super.scala:14: error: Unable to implement a super accessor required by trait T unless Iterable[String] is directly extended by class C. +class C extends T + ^ +one error found diff --git a/test/files/neg/trait-defaults-super.scala b/test/files/neg/trait-defaults-super.scala new file mode 100644 index 000000000000..def271e8e747 --- /dev/null +++ b/test/files/neg/trait-defaults-super.scala @@ -0,0 +1,21 @@ +trait T extends java.lang.Iterable[String] { + + override def spliterator(): java.util.Spliterator[String] = { + super[Iterable].spliterator + super.spliterator + null + } + def foo = { + super[Iterable].spliterator + super.spliterator + } + def iterator(): java.util.Iterator[String] = java.util.Collections.emptyList().iterator() +} +class C extends T +object Test { + def main(args: Array[String]): Unit = { + val t: T = new C + t.spliterator + t.foo + } +} diff --git a/test/files/pos/trait-defaults-super.scala b/test/files/pos/trait-defaults-super.scala new file mode 100644 index 000000000000..8f867ab5632d --- /dev/null +++ b/test/files/pos/trait-defaults-super.scala @@ -0,0 +1,21 @@ +trait T extends java.lang.Iterable[String] { + + override def spliterator(): java.util.Spliterator[String] = { + super[Iterable].spliterator + super.spliterator + null + } + def foo = { + super[Iterable].spliterator + super.spliterator + } + def iterator(): java.util.Iterator[String] = java.util.Collections.emptyList().iterator() +} +class C extends T with java.lang.Iterable[String] // super accessor is okay with Iterable as a direct parent +object Test { + def main(args: Array[String]): Unit = { + val t: T = new C + t.spliterator + t.foo + } +} diff --git a/test/files/run/t4891.check b/test/files/run/t4891.check index 1b1108e9eee0..a460569fd9dc 100644 --- a/test/files/run/t4891.check +++ b/test/files/run/t4891.check @@ -1,6 +1,7 @@ test.generic.T1 - (m) public default void test.generic.T1.$init$() + (m) public static void test.generic.T1.$init$(test.generic.T1) (m) public default A test.generic.T1.t1(A) + (m) public static java.lang.Object test.generic.T1.t1$(test.generic.T1,java.lang.Object) test.generic.C1 (m) public void test.generic.C1.m1() test.generic.C2 diff --git a/test/files/run/t5652.check b/test/files/run/t5652.check index 7c65ba66981d..3c039d68aa7c 100644 --- a/test/files/run/t5652.check +++ b/test/files/run/t5652.check @@ -1,6 +1,7 @@ public default int T1.f0() -public default void T1.$init$() public static int T1.T1$$g$1() +public static int T1.f0$(T1) +public static void T1.$init$(T1) public int A1.f1() public static final int A1.A1$$g$2() public int A2.f2() diff --git a/test/files/run/t7700.check b/test/files/run/t7700.check index 1d51e68877ca..7d18dbfcb4e9 100644 --- a/test/files/run/t7700.check +++ b/test/files/run/t7700.check @@ -1,3 +1,4 @@ -public default void C.$init$() +public static void C.$init$(C) public default java.lang.Object C.bar(java.lang.Object) -public abstract java.lang.Object C.foo(java.lang.Object) +public static java.lang.Object C.bar$(C,java.lang.Object) +public abstract java.lang.Object C.foo(java.lang.Object) \ No newline at end of file diff --git a/test/files/run/t7700.scala b/test/files/run/t7700.scala index 76d16b808c52..fd13666467f8 100644 --- a/test/files/run/t7700.scala +++ b/test/files/run/t7700.scala @@ -7,11 +7,13 @@ trait C[@specialized U] { def bar[A](u: U) = u } -object Test extends App { - val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName) - println(declared.mkString("\n")) - object CInt extends C[Int] { def foo(i: Int) = i } - object CAny extends C[Any] { def foo(a: Any) = a } - assert(CInt.foo(1) == 1) - assert(CAny.foo("") == "") +object Test { + def main(args: Array[String]) { + val declared = classOf[C[_]].getDeclaredMethods.sortBy(_.getName) + println(declared.mkString("\n")) + object CInt extends C[Int] { def foo(i: Int) = i } + object CAny extends C[Any] { def foo(a: Any) = a } + assert(CInt.foo(1) == 1) + assert(CAny.foo("") == "") + } } diff --git a/test/files/run/t7932.check b/test/files/run/t7932.check index a2ad84cd4635..76968fd179ca 100644 --- a/test/files/run/t7932.check +++ b/test/files/run/t7932.check @@ -2,5 +2,9 @@ public Category C.category() public Category C.category1() public default Category M1.category() public default Category M1.category1() +public static Category M1.category$(M1) +public static Category M1.category1$(M1) public default Category M2.category() public default Category M2.category1() +public static Category M2.category$(M2) +public static Category M2.category1$(M2) \ No newline at end of file diff --git a/test/files/run/t7932.scala b/test/files/run/t7932.scala index e6bdbf24170c..40b0b9989b53 100644 --- a/test/files/run/t7932.scala +++ b/test/files/run/t7932.scala @@ -17,12 +17,14 @@ trait M2[F] { self: M1[F] => abstract class C extends M1[Float] with M2[Float] -object Test extends App { +object Test { def t(c: Class[_]) = { val ms = c.getMethods.filter(_.getName.startsWith("category")) println(ms.map(_.toGenericString).sorted.mkString("\n")) } - t(classOf[C]) - t(classOf[M1[_]]) - t(classOf[M2[_]]) + def main(args: Array[String]) { + t(classOf[C]) + t(classOf[M1[_]]) + t(classOf[M2[_]]) + } } diff --git a/test/files/run/trait-static-clash.scala b/test/files/run/trait-static-clash.scala new file mode 100644 index 000000000000..603cf6b6e544 --- /dev/null +++ b/test/files/run/trait-static-clash.scala @@ -0,0 +1,10 @@ +trait T { + def foo = 1 + def foo(t: T) = 2 +} +object Test extends T { + def main(args: Array[String]) { + assert(foo == 1) + assert(foo(this) == 2) + } +} diff --git a/test/junit/scala/lang/traits/BytecodeTest.scala b/test/junit/scala/lang/traits/BytecodeTest.scala index f47fc9c12724..ec8508df999e 100644 --- a/test/junit/scala/lang/traits/BytecodeTest.scala +++ b/test/junit/scala/lang/traits/BytecodeTest.scala @@ -9,6 +9,7 @@ import scala.collection.JavaConverters._ import scala.tools.asm.Opcodes import scala.tools.asm.Opcodes._ import scala.tools.asm.tree.ClassNode +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import scala.tools.partest.ASMConverters._ import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ @@ -18,8 +19,8 @@ class BytecodeTest extends BytecodeTesting { import compiler._ def checkForwarder(classes: Map[String, ClassNode], clsName: Symbol, target: String) = { - val List(f) = getMethods(classes(clsName.name), "f") - assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, target, "f", "()I", false), Op(IRETURN))) + val f = getMethod(classes(clsName.name), "f") + assertSameCode(f, List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, target, "f$", s"(L$target;)I", true), Op(IRETURN))) } @Test @@ -88,7 +89,7 @@ class BytecodeTest extends BytecodeTesting { assertSameSummary(getMethod(c("C18"), "f"), List(BIPUSH, IRETURN)) checkForwarder(c, 'C19, "T7") assertSameCode(getMethod(c("C19"), "T7$$super$f"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "C18", "f", "()I", false), Op(IRETURN))) - assertInvoke(getMethod(c("C20"), "clone"), "T8", "clone") // mixin forwarder + assertInvoke(getMethod(c("C20"), "clone"), "T8", "clone$") // mixin forwarder } @Test @@ -141,7 +142,7 @@ class BytecodeTest extends BytecodeTesting { def invocationReceivers(): Unit = { val List(c1, c2, t, u) = compileClasses(invocationReceiversTestCode.definitions("Object")) // mixin forwarder in C1 - assertSameCode(getMethod(c1, "clone"), List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "T", "clone", "()Ljava/lang/Object;", false), Op(ARETURN))) + assertSameCode(getMethod(c1, "clone"), List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, "T", "clone$", "(LT;)Ljava/lang/Object;", true), Op(ARETURN))) assertInvoke(getMethod(c1, "f1"), "T", "clone") assertInvoke(getMethod(c1, "f2"), "T", "clone") assertInvoke(getMethod(c1, "f3"), "C1", "clone") diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala index c9a958ee4f4d..841e850b491b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala @@ -5,6 +5,7 @@ import org.junit.Test import scala.collection.JavaConverters import scala.collection.JavaConverters._ +import scala.reflect.internal.Flags import scala.tools.asm.Opcodes import scala.tools.asm.tree.ClassNode import scala.tools.testing.BytecodeTesting @@ -21,7 +22,7 @@ class DefaultMethodTest extends BytecodeTesting { /** Transforms a single tree. */ override def transform(tree: global.Tree): global.Tree = tree match { case dd @ DefDef(_, Foo, _, _, _, _) => - dd.symbol.setFlag(reflect.internal.Flags.JAVA_DEFAULTMETHOD) + dd.symbol.setFlag(Flags.JAVA_DEFAULTMETHOD).resetFlag(Flags.DEFERRED) copyDefDef(dd)(rhs = Literal(Constant(1)).setType(definitions.IntTpe)) case _ => super.transform(tree) } @@ -31,6 +32,4 @@ class DefaultMethodTest extends BytecodeTesting { assertTrue("default method should not be abstract", (foo.access & Opcodes.ACC_ABSTRACT) == 0) assertTrue("default method body emitted", foo.instructions.size() > 0) } - - } diff --git a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala index a28599cd9212..38285fbce1fb 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DirectCompileTest.scala @@ -1,7 +1,9 @@ package scala.tools.nsc.backend.jvm +import java.nio.file.{Files, Paths} + import org.junit.Assert._ -import org.junit.Test +import org.junit.{Ignore, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala index a2513cacdc47..85df42e0695d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerSeparateCompilationTest.scala @@ -97,7 +97,7 @@ class InlinerSeparateCompilationTest { """.stripMargin val List(a, t) = compileClassesSeparately(List(codeA, assembly), args) - assertNoInvoke(getMethod(t, "f")) - assertNoInvoke(getMethod(a, "n")) + assertNoInvoke(getMethod(t, "f$")) + assertNoInvoke(getMethod(a, "n$")) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 9173a1d1893e..f531ce932238 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -475,11 +475,9 @@ class InlinerTest extends BytecodeTesting { | def t2 = this.f |} """.stripMargin - val warns = Set( - "C::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden", - "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden") + val warn = "T::f()I is annotated @inline but cannot be inlined: the method is not final and may be overridden" var count = 0 - val List(c, t) = compile(code, allowMessage = i => {count += 1; warns.exists(i.msg contains _)}) + val List(c, t) = compile(code, allowMessage = i => {count += 1; i.msg contains warn}) assert(count == 2, count) assertInvoke(getMethod(c, "t1"), "T", "f") assertInvoke(getMethod(c, "t2"), "C", "f") @@ -520,7 +518,7 @@ class InlinerTest extends BytecodeTesting { val List(c, oMirror, oModule, t) = compile(code, allowMessage = i => {count += 1; i.msg contains warn}) assert(count == 1, count) - assertNoInvoke(getMethod(t, "f")) + assertNoInvoke(getMethod(t, "f$")) assertNoInvoke(getMethod(c, "t1")) assertNoInvoke(getMethod(c, "t2")) @@ -546,9 +544,9 @@ class InlinerTest extends BytecodeTesting { val List(assembly, c, t) = compile(code) - assertNoInvoke(getMethod(t, "f")) + assertNoInvoke(getMethod(t, "f$")) - assertNoInvoke(getMethod(assembly, "n")) + assertNoInvoke(getMethod(assembly, "n$")) assertNoInvoke(getMethod(c, "t1")) assertNoInvoke(getMethod(c, "t2")) @@ -624,8 +622,8 @@ class InlinerTest extends BytecodeTesting { val List(ca, cb, t1, t2a, t2b) = compile(code, allowMessage = i => {count += 1; i.msg contains warning}) assert(count == 4, count) // see comments, f is not inlined 4 times - assertNoInvoke(getMethod(t2a, "g2a")) - assertInvoke(getMethod(t2b, "g2b"), "T1", "f") + assertNoInvoke(getMethod(t2a, "g2a$")) + assertInvoke(getMethod(t2b, "g2b$"), "T1", "f") assertInvoke(getMethod(ca, "m1a"), "T1", "f") assertNoInvoke(getMethod(ca, "m2a")) // no invoke, see comment on def g2a @@ -684,8 +682,8 @@ class InlinerTest extends BytecodeTesting { |} """.stripMargin val List(c, t) = compile(code) - val t1 = getMethod(t, "t1") - val t2 = getMethod(t, "t2") + val t1 = getMethod(t, "t1$") + val t2 = getMethod(t, "t2$") val cast = TypeOp(CHECKCAST, "C") Set(t1, t2).foreach(m => assert(m.instructions.contains(cast), m.instructions)) } @@ -1574,4 +1572,19 @@ class InlinerTest extends BytecodeTesting { Label(0), LineNumber(9, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "fx", "()V", false), Label(4), LineNumber(10, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8))) } + + @Test + def traitHO(): Unit = { + val code = + """trait T { + | def foreach(f: Int => Unit): Unit = f(1) + |} + |final class C extends T { + | def cons(x: Int): Unit = () + | def t1 = foreach(cons) + |} + """.stripMargin + val List(c, t) = compile(code) + assertNoIndy(getMethod(c, "t1")) + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index 4791a29bfbb7..54f4c805c1d1 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -31,6 +31,14 @@ class ScalaInlineInfoTest extends BytecodeTesting { r.toString } + def assertSameMethods(c: ClassNode, nameAndSigs: Set[String]): Unit = { + val r = new StringBuilder + val inClass = c.methods.iterator.asScala.map(m => m.name + m.desc).toSet + for (m <- inClass.diff(nameAndSigs)) r.append(s"method in classfile found, but no inline info: $m") + for (m <- nameAndSigs.diff(inClass)) r.append(s"inline info found, but no method in classfile: $m") + assert(r.isEmpty, r.toString) + } + @Test def traitMembersInlineInfo(): Unit = { val code = @@ -79,26 +87,32 @@ class ScalaInlineInfoTest extends BytecodeTesting { ("T$$super$toString()Ljava/lang/String;", MethodInlineInfo(true ,false,false)), ("T$_setter_$x1_$eq(I)V", MethodInlineInfo(false,false,false)), ("f1()I", MethodInlineInfo(false,false,false)), - ("f2()I", MethodInlineInfo(true, false,false)), + ("f1$(LT;)I", MethodInlineInfo(true ,false,false)), + ("f2()I", MethodInlineInfo(true ,false,false)), // no static impl method for private method f2 ("f3()I", MethodInlineInfo(false,false,false)), + ("f3$(LT;)I", MethodInlineInfo(true ,false,false)), ("f4()Ljava/lang/String;", MethodInlineInfo(false,true, false)), + ("f4$(LT;)Ljava/lang/String;", MethodInlineInfo(true ,true, false)), ("f5()I", MethodInlineInfo(true ,false,false)), - ("f6()I", MethodInlineInfo(false,false,true )), + ("f5$(LT;)I", MethodInlineInfo(true ,false,false)), + ("f6()I", MethodInlineInfo(false,false,true )), // no static impl method for abstract method f6 ("x1()I", MethodInlineInfo(false,false,false)), ("y2()I", MethodInlineInfo(false,false,false)), ("y2_$eq(I)V", MethodInlineInfo(false,false,false)), ("x3()I", MethodInlineInfo(false,false,false)), ("x3_$eq(I)V", MethodInlineInfo(false,false,false)), ("x4()I", MethodInlineInfo(false,false,false)), + ("x4$(LT;)I", MethodInlineInfo(true ,false,false)), ("x5()I", MethodInlineInfo(true, false,false)), ("L$lzycompute$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true, false,false)), ("L$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true ,false,false)), ("nest$1()I", MethodInlineInfo(true, false,false)), - ("$init$()V", MethodInlineInfo(false,false,false))), + ("$init$(LT;)V", MethodInlineInfo(true,false,false))), None // warning ) assert(infoT == expectT, mapDiff(expectT.methodInfos, infoT.methodInfos) + infoT) + assertSameMethods(t, expectT.methodInfos.keySet) val infoC = inlineInfo(c) val expectC = InlineInfo(false, None, Map( @@ -119,6 +133,7 @@ class ScalaInlineInfoTest extends BytecodeTesting { None) assert(infoC == expectC, mapDiff(expectC.methodInfos, infoC.methodInfos) + infoC) + assertSameMethods(c, expectC.methodInfos.keySet) } @Test @@ -156,7 +171,6 @@ class ScalaInlineInfoTest extends BytecodeTesting { ("F",None), ("T",Some("h(Ljava/lang/String;)I")), ("U",None))) - } @Test @@ -169,5 +183,6 @@ class ScalaInlineInfoTest extends BytecodeTesting { "O$lzycompute()LC$O$;" -> MethodInlineInfo(true,false,false), "O()LC$O$;" -> MethodInlineInfo(true,false,false)) assert(infoC.methodInfos == expected, mapDiff(infoC.methodInfos, expected)) + assertSameMethods(c, expected.keySet) } } diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index 4ddb6580df9e..c0fdb8010f84 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -12,6 +12,7 @@ import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode} import scala.tools.cmd.CommandLineParser import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.nsc.backend.jvm.AsmUtils._ +import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.{Global, Settings} @@ -247,11 +248,19 @@ object BytecodeTesting { def getAsmMethod(c: ClassNode, name: String): MethodNode = { val methods = getAsmMethods(c, name) + def fail() = { + val allNames = getAsmMethods(c, _ => true).map(_.name) + throw new AssertionFailedError(s"Could not find method named $name among ${allNames}") + } methods match { case List(m) => m - case ms => - val allNames = getAsmMethods(c, _ => true).map(_.name) - throw new AssertionFailedError(s"Could not find method named $name among ${allNames}") + case ms @ List(m1, m2) if BytecodeUtils.isInterface(c) => + val (statics, nonStatics) = ms.partition(BytecodeUtils.isStaticMethod) + (statics, nonStatics) match { + case (List(staticMethod), List(_)) => m1 // prefer the static method of the pair if methods in traits + case _ => fail() + } + case ms => fail() } } From 9901daf6dcd78fb0a5b359d1c9549114ce23aa0c Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 Jun 2016 09:54:30 -0700 Subject: [PATCH 0148/2793] Bootstrap skips scalacheck build step / partest dep Keeping diff minimal since this will need to be reverted once 2.12.0 is final. --- scripts/jobs/integrate/bootstrap | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 76673b4f32c5..7716dc9eb1c1 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -247,7 +247,7 @@ buildPartest() { else update scala scala-partest "$PARTEST_REF" && gfxd doc="$(docTask $PARTEST_BUILT)" - sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' 'set VersionKeys.scalaCheckVersion := "'$SCALACHECK_VER'"' $clean "$doc" test "${buildTasks[@]}" + sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" test "${buildTasks[@]}" PARTEST_BUILT="yes" fi } @@ -282,7 +282,7 @@ buildModules() { buildXML buildParsers buildSwing - buildScalacheck + # buildScalacheck buildPartest } @@ -424,7 +424,7 @@ deriveModuleVersions() { echo "Module versions (versioning strategy: $moduleVersioning):" echo "PARSERS = $PARSERS_VER at $PARSERS_REF" echo "PARTEST = $PARTEST_VER at $PARTEST_REF" - echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF" + # echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF" echo "SWING = $SWING_VER at $SWING_REF" echo "XML = $XML_VER at $XML_REF" @@ -444,7 +444,7 @@ removeExistingBuilds() { local storageApiUrl=`echo $releaseTempRepoUrl | sed 's/\(scala-release-temp\)/api\/storage\/\1/'` local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration` - for module in "org/scalacheck" $scalaLangModules; do + for module in $scalaLangModules; do local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | contains(\"$SCALA_VER\")) | .uri"` for artifact in $artifacts; do echo "Deleting $releaseTempRepoUrl$module$artifact" @@ -464,7 +464,7 @@ constructUpdatedModuleVersions() { updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER") updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER") - updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER") + # updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER") # allow overriding the jline version using a jenkins build parameter if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi From d8c862b225cc7936e475419abaabb07226fea568 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 Jun 2016 18:01:14 -0700 Subject: [PATCH 0149/2793] Use 2.12.0-M4-9901daf as STARR (see #5152) This commit switches to using 2.12.0-M3-dc9effe as STARR, so that we can switch to the new trait encoding where each concrete trait member gets a a static member, which has the actual implementation (as well as serving as a target for for super calls using invokestatic), and a default member (forwards to the static member). Also bump partest to 1.0.17 -- the release that goes with the in-sourcing of scalacheck. Replace a few more -Yopt with -opt (for our new STARR) --- project/ScriptCommands.scala | 2 +- test/benchmarks/build.sbt | 2 +- versions.properties | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index e5ff38617eab..0bf43b18e872 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -13,7 +13,7 @@ object ScriptCommands { // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088): publishTo in Global := Some("scala-pr" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis), publishArtifact in (Compile, packageDoc) in ThisBuild := false, - scalacOptions in Compile in ThisBuild += "-Yopt:l:classpath", + scalacOptions in Compile in ThisBuild += "-opt:l:classpath", logLevel in ThisBuild := Level.Info, logLevel in update in ThisBuild := Level.Warn ), state) diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt index 31cee701ad61..fb05fb2c99f1 100644 --- a/test/benchmarks/build.sbt +++ b/test/benchmarks/build.sbt @@ -1,6 +1,6 @@ scalaHome := Some(file("../../build/pack")) scalaVersion := "2.12.0-dev" -scalacOptions ++= Seq("-feature", "-Yopt:l:classpath") +scalacOptions ++= Seq("-feature", "-opt:l:classpath") lazy val root = (project in file(".")). enablePlugins(JmhPlugin). diff --git a/versions.properties b/versions.properties index 3b8077ab885b..bf7c7d2faaad 100644 --- a/versions.properties +++ b/versions.properties @@ -8,7 +8,7 @@ # The scala version used for bootstrapping. This has no impact on the final classfiles: # there are two stages (locker and quick), so compiler and library are always built # with themselves. Stability is ensured by building a third stage (strap). -starr.version=2.12.0-M4 +starr.version=2.12.0-M4-9901daf # These are the versions of the modules that go with this release. # These properties are used during PR validation and in dbuild builds. @@ -19,7 +19,7 @@ starr.version=2.12.0-M4 # - After 2.x.0 is released, the binary version is 2.x. # - During milestones and RCs, modules are cross-built against the full version. # So the value is the full version (e.g. 2.12.0-M2). -scala.binary.version=2.12.0-M4 +scala.binary.version=2.12.0-M4-9901daf # external modules shipped with distribution, as specified by scala-library-all's pom scala-xml.version.number=1.0.5 @@ -30,7 +30,7 @@ jline.version=2.14.1 scala-asm.version=5.0.4-scala-3 # external modules, used internally (not shipped) -partest.version.number=1.0.16 +partest.version.number=1.0.17 # We've embedded these sources in partest-extras for now. After 2.12.0 is released # we can switch to a public release. # scalacheck.version.number=1.11.6 From de59237938abf34c8a22f57bb99a27f1837c6333 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 29 Jun 2016 15:23:12 -0700 Subject: [PATCH 0150/2793] Bump STARR to 2.12.0-M5. --- versions.properties | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/versions.properties b/versions.properties index bf7c7d2faaad..4d24e0d598af 100644 --- a/versions.properties +++ b/versions.properties @@ -8,7 +8,7 @@ # The scala version used for bootstrapping. This has no impact on the final classfiles: # there are two stages (locker and quick), so compiler and library are always built # with themselves. Stability is ensured by building a third stage (strap). -starr.version=2.12.0-M4-9901daf +starr.version=2.12.0-M5 # These are the versions of the modules that go with this release. # These properties are used during PR validation and in dbuild builds. @@ -19,7 +19,7 @@ starr.version=2.12.0-M4-9901daf # - After 2.x.0 is released, the binary version is 2.x. # - During milestones and RCs, modules are cross-built against the full version. # So the value is the full version (e.g. 2.12.0-M2). -scala.binary.version=2.12.0-M4-9901daf +scala.binary.version=2.12.0-M5 # external modules shipped with distribution, as specified by scala-library-all's pom scala-xml.version.number=1.0.5 From e5886361006f1b315af13f6aa98cf54a2f7ebe0b Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 4 Jul 2016 16:45:01 +0200 Subject: [PATCH 0151/2793] SI-9515 closure elimination also for non-Scala-Function SAM types Also logged in as SD-162 The optimizer had conservative checks in place to perform closure elimination only for Scala Function types. We can eliminate IndyLambda instructions for any functional interface. LambdaMetaFactory only constructs lambda objects for interface types, which don't have any side-effects on construction - they don't have a constructor. --- .../tools/nsc/backend/jvm/CoreBTypes.scala | 24 ------------------- .../backend/jvm/analysis/BackendUtils.scala | 3 --- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 2 +- .../tools/nsc/backend/jvm/opt/CopyProp.scala | 14 ++++------- .../backend/jvm/opt/MethodLevelOptsTest.scala | 20 ++++++++++++++++ 5 files changed, 25 insertions(+), 38 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index 1feca56923f4..d65380aa1fc7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -217,26 +217,6 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { nonOverloadedConstructors(tupleClassSymbols) } - // enumeration of specialized classes is temporary, while we still use the java-defined JFunctionN. - // once we switch to ordinary FunctionN, we can use specializedSubclasses just like for tuples. - private def specializedJFunctionSymbols(base: String): Seq[Symbol] = { - def primitives = Seq("B", "S", "I", "J", "C", "F", "D", "Z", "V") - def ijfd = Iterator("I", "J", "F", "D") - def ijfdzv = Iterator("I", "J", "F", "D", "Z", "V") - def ijd = Iterator("I", "J", "D") - val classNames = { - primitives.map(base + "0$mc" + _ + "$sp") // Function0 - } ++ { - // return type specializations appear first in the name string (alphabetical sorting) - for (r <- ijfdzv; a <- ijfd) yield base + "1$mc" + r + a + "$sp" // Function1 - } ++ { - for (r <- ijfdzv; a <- ijd; b <- ijd) yield base + "2$mc" + r + a + b + "$sp" // Function2 - } - classNames map getRequiredClass - } - - lazy val functionRefs: Set[InternalName] = (FunctionClass.seq ++ specializedJFunctionSymbols("scala.runtime.java8.JFunction")).map(classBTypeFromSymbol(_).internalName).toSet - lazy val typeOfArrayOp: Map[Int, BType] = { import scalaPrimitives._ Map( @@ -342,8 +322,6 @@ trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { def srRefConstructors : Map[InternalName, MethodNameAndType] def tupleClassConstructors : Map[InternalName, MethodNameAndType] - def functionRefs: Set[InternalName] - def lambdaMetaFactoryBootstrapHandle : asm.Handle def lambdaDeserializeBootstrapHandle : asm.Handle } @@ -410,8 +388,6 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: def srRefConstructors : Map[InternalName, MethodNameAndType] = _coreBTypes.srRefConstructors def tupleClassConstructors : Map[InternalName, MethodNameAndType] = _coreBTypes.tupleClassConstructors - def functionRefs: Set[InternalName] = _coreBTypes.functionRefs - def srSymbolLiteral : ClassBType = _coreBTypes.srSymbolLiteral def srStructuralCallSite : ClassBType = _coreBTypes.srStructuralCallSite def srLambdaDeserialize : ClassBType = _coreBTypes.srLambdaDeserialize diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 539435a32604..83615abc31bb 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -131,7 +131,6 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { private val anonfunAdaptedName = """.*\$anonfun\$.*\$\d+\$adapted""".r def hasAdaptedImplMethod(closureInit: ClosureInstantiation): Boolean = { - isBuiltinFunctionType(Type.getReturnType(closureInit.lambdaMetaFactoryCall.indy.desc).getInternalName) && anonfunAdaptedName.pattern.matcher(closureInit.lambdaMetaFactoryCall.implMethod.getName).matches } @@ -256,8 +255,6 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { } } - def isBuiltinFunctionType(internalName: InternalName): Boolean = functionRefs(internalName) - /** * Visit the class node and collect all referenced nested classes. */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index d6942d9ff99b..5248183337fa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -430,7 +430,7 @@ class CallGraph[BT <: BTypes](val btypes: BT) { def unapply(insn: AbstractInsnNode): Option[(InvokeDynamicInsnNode, Type, Handle, Type)] = insn match { case indy: InvokeDynamicInsnNode if indy.bsm == metafactoryHandle || indy.bsm == altMetafactoryHandle => indy.bsmArgs match { - case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, xs@_*) => // xs binding because IntelliJ gets confused about _@_* + case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, _@_*) => // LambdaMetaFactory performs a number of automatic adaptations when invoking the lambda // implementation method (casting, boxing, unboxing, and primitive widening, see Javadoc). // diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala index 4163d62df77f..b05669ce899d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala @@ -295,18 +295,12 @@ class CopyProp[BT <: BTypes](val btypes: BT) { } /** - * Eliminate the closure value produced by `indy`. If the SAM type is known to construct - * without side-effects (e.g. scala/FunctionN), the `indy` and its inputs - * are eliminated, otherwise a POP is inserted. + * Eliminate LMF `indy` and its inputs. */ def handleClosureInst(indy: InvokeDynamicInsnNode): Unit = { - if (isBuiltinFunctionType(Type.getReturnType(indy.desc).getInternalName)) { - toRemove += indy - callGraph.removeClosureInstantiation(indy, method) - handleInputs(indy, Type.getArgumentTypes(indy.desc).length) - } else { - toInsertAfter(indy) = getPop(1) - } + toRemove += indy + callGraph.removeClosureInstantiation(indy, method) + handleInputs(indy, Type.getArgumentTypes(indy.desc).length) } def runQueue(): Unit = while (queue.nonEmpty) { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala index 938bc7b84686..2c697bfe5079 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOptsTest.scala @@ -750,4 +750,24 @@ class MethodLevelOptsTest extends BytecodeTesting { -1, LDC, ASTORE, -1, ALOAD, ARETURN)) } + + @Test + def elimSamLambda(): Unit = { + val code = + """class C { + | def t1(x: Int) = { + | val fun: java.util.function.IntFunction[Int] = y => y + 1 + | fun(x) + | } + | def t2(x: Int) = { + | val fun: T = i => i + 1 + | fun.f(x) + | } + |} + |trait T { def f(x: Int): Int } + """.stripMargin + val List(c, t) = compileClasses(code) + assertSameSummary(getMethod(c, "t1"), List(ILOAD, "$anonfun$t1$1", IRETURN)) + assertSameSummary(getMethod(c, "t2"), List(ILOAD, "$anonfun$t2$1", IRETURN)) + } } From 1c1abd1cbb41a1e03af215eea566bd9202e1079a Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Sun, 12 Jun 2016 15:36:54 +0100 Subject: [PATCH 0152/2793] Fixed a typo in Predef --- src/library/scala/Predef.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 5f1a6b0bbbd5..8de9754b50c2 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -71,7 +71,7 @@ import scala.io.StdIn * * @groupname assertions Assertions * @groupprio assertions 20 - * @groupdesc assertions These methods support program verfication and runtime correctness. + * @groupdesc assertions These methods support program verification and runtime correctness. * * @groupname console-output Console Output * @groupprio console-output 30 From 27f30053c7247bacfb099dad843e5d4c6cee7816 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Wed, 6 Jul 2016 12:42:12 +0100 Subject: [PATCH 0153/2793] Group math package functions Groups - Mathematical Constants - Minimum and Maximum - Rounding - Exponential and Logarithmic - Trigonometric - Angular Measurement Conversion - Hyperbolic - Absolute Values - Signs - Root Extraction - Polar Coordindates - Unit of Least Precision Other changes, - Dropped use of `double` for `Double` in all cases - Grouped some methods in the source - Extended notes about exception to method forwarding - Minor method documentation enhancements IEEERemainder is in the Rounding group since it is related to rounding and did not justify a new group. --- src/library/scala/math/package.scala | 253 ++++++++++++++++++++------- 1 file changed, 189 insertions(+), 64 deletions(-) diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala index 0e39af2febd9..546efef114f1 100644 --- a/src/library/scala/math/package.scala +++ b/src/library/scala/math/package.scala @@ -11,28 +11,90 @@ package scala /** The package object `scala.math` contains methods for performing basic * numeric operations such as elementary exponential, logarithmic, root and * trigonometric functions. + * + * All methods forward to [[java.lang.Math]] unless otherwise noted. + * + * @see [[java.lang.Math]] + * + * @groupname math-const Mathematical Constants + * @groupprio math-const 10 + * + * @groupname minmax Minimum and Maximum + * @groupdesc minmax Find the min or max of two numbers. Note: [[scala.collection.TraversableOnce]] has + * min and max methods which determine the min or max of a collection. + * @groupprio minmax 20 + * + * @groupname rounding Rounding + * @groupprio rounding 30 + * + * @groupname explog Exponential and Logarithmic + * @groupprio explog 40 + * + * @groupname trig Trigonometric + * @groupdesc trig Arguments in radians + * @groupprio trig 50 + * + * @groupname angle-conversion Angular Measurement Conversion + * @groupprio angle-conversion 60 + * + * @groupname hyperbolic Hyperbolic + * @groupprio hyperbolic 70 + * + * @groupname abs Absolute Values + * @groupdesc abs Determine the magnitude of a value by discarding the sign. Results are >= 0. + * @groupprio abs 80 + * + * @groupname signum Signs + * @groupdesc signum Extract the sign of a value. Results are -1, 0 or 1. + * Note that these are not pure forwarders to the java versions. + * In particular, the return type of java.lang.Long.signum is Int, + * but here it is widened to Long so that each overloaded variant + * will return the same numeric type it is passed. + * @groupprio signum 90 + * + * @groupname root-extraction Root Extraction + * @groupprio root-extraction 100 + * + * @groupname polar-coords Polar Coordinates + * @groupprio polar-coords 110 + * + * @groupname ulp Unit of Least Precision + * @groupprio ulp 120 + * + * @groupname randomisation Pseudo Random Number Generation + * @groupprio randomisation 130 */ package object math { - /** The `double` value that is closer than any other to `e`, the base of + /** The `Double` value that is closer than any other to `e`, the base of * the natural logarithms. + * @group math-const */ @inline final val E = java.lang.Math.E - /** The `double` value that is closer than any other to `pi`, the ratio of + /** The `Double` value that is closer than any other to `pi`, the ratio of * the circumference of a circle to its diameter. + * @group math-const */ @inline final val Pi = java.lang.Math.PI - /** Returns a `double` value with a positive sign, greater than or equal + /** Returns a `Double` value with a positive sign, greater than or equal * to `0.0` and less than `1.0`. + * + * @group randomisation */ def random(): Double = java.lang.Math.random() + /** @group trig */ def sin(x: Double): Double = java.lang.Math.sin(x) + /** @group trig */ def cos(x: Double): Double = java.lang.Math.cos(x) + /** @group trig */ def tan(x: Double): Double = java.lang.Math.tan(x) + /** @group trig */ def asin(x: Double): Double = java.lang.Math.asin(x) + /** @group trig */ def acos(x: Double): Double = java.lang.Math.acos(x) + /** @group trig */ def atan(x: Double): Double = java.lang.Math.atan(x) /** Converts an angle measured in degrees to an approximately equivalent @@ -40,6 +102,7 @@ package object math { * * @param x an angle, in degrees * @return the measurement of the angle `x` in radians. + * @group angle-conversion */ def toRadians(x: Double): Double = java.lang.Math.toRadians(x) @@ -48,44 +111,10 @@ package object math { * * @param x angle, in radians * @return the measurement of the angle `x` in degrees. + * @group angle-conversion */ def toDegrees(x: Double): Double = java.lang.Math.toDegrees(x) - /** Returns Euler's number `e` raised to the power of a `double` value. - * - * @param x the exponent to raise `e` to. - * @return the value `e^a^`, where `e` is the base of the natural - * logarithms. - */ - def exp(x: Double): Double = java.lang.Math.exp(x) - - /** Returns the natural logarithm of a `double` value. - * - * @param x the number to take the natural logarithm of - * @return the value `logₑ(x)` where `e` is Eulers number - */ - def log(x: Double): Double = java.lang.Math.log(x) - - /** Returns the square root of a `double` value. - * - * @param x the number to take the square root of - * @return the value √x - */ - def sqrt(x: Double): Double = java.lang.Math.sqrt(x) - def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y) - - def ceil(x: Double): Double = java.lang.Math.ceil(x) - def floor(x: Double): Double = java.lang.Math.floor(x) - - /** Returns the `double` value that is closest in value to the - * argument and is equal to a mathematical integer. - * - * @param x a `double` value - * @return the closest floating-point value to a that is equal to a - * mathematical integer. - */ - def rint(x: Double): Double = java.lang.Math.rint(x) - /** Converts rectangular coordinates `(x, y)` to polar `(r, theta)`. * * @param x the ordinate coordinate @@ -93,19 +122,44 @@ package object math { * @return the ''theta'' component of the point `(r, theta)` in polar * coordinates that corresponds to the point `(x, y)` in * Cartesian coordinates. + * @group polar-coords */ def atan2(y: Double, x: Double): Double = java.lang.Math.atan2(y, x) - /** Returns the value of the first argument raised to the power of the - * second argument. + /** Returns the square root of the sum of the squares of both given `Double` + * values without intermediate underflow or overflow. + * + * The ''r'' component of the point `(r, theta)` in polar + * coordinates that corresponds to the point `(x, y)` in + * Cartesian coordinates. + * @group polar-coords + */ + def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y) + + // ----------------------------------------------------------------------- + // rounding functions + // ----------------------------------------------------------------------- + + /** @group rounding */ + def ceil(x: Double): Double = java.lang.Math.ceil(x) + /** @group rounding */ + def floor(x: Double): Double = java.lang.Math.floor(x) + + /** Returns the `Double` value that is closest in value to the + * argument and is equal to a mathematical integer. + * + * @param x a `Double` value + * @return the closest floating-point value to a that is equal to a + * mathematical integer. + * @group rounding + */ + def rint(x: Double): Double = java.lang.Math.rint(x) + + /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`. * - * @param x the base. - * @param y the exponent. - * @return the value `x^y^`. + * @note Does not forward to [[java.lang.Math]] + * @group rounding */ - def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y) - - /** There is no reason to round a `Long`, but this method prevents unintended conversion to `Float` followed by rounding to `Int`. */ @deprecated("This is an integer type; there is no reason to round it. Perhaps you meant to call this with a floating-point value?", "2.11.0") def round(x: Long): Long = x @@ -113,6 +167,7 @@ package object math { * * @param x a floating-point value to be rounded to a `Int`. * @return the value of the argument rounded to the nearest `Int` value. + * @group rounding */ def round(x: Float): Int = java.lang.Math.round(x) @@ -120,83 +175,153 @@ package object math { * * @param x a floating-point value to be rounded to a `Long`. * @return the value of the argument rounded to the nearest`long` value. + * @group rounding */ def round(x: Double): Long = java.lang.Math.round(x) + /** @group abs */ def abs(x: Int): Int = java.lang.Math.abs(x) + /** @group abs */ def abs(x: Long): Long = java.lang.Math.abs(x) + /** @group abs */ def abs(x: Float): Float = java.lang.Math.abs(x) + /** @group abs */ def abs(x: Double): Double = java.lang.Math.abs(x) + /** @group minmax */ def max(x: Int, y: Int): Int = java.lang.Math.max(x, y) + /** @group minmax */ def max(x: Long, y: Long): Long = java.lang.Math.max(x, y) + /** @group minmax */ def max(x: Float, y: Float): Float = java.lang.Math.max(x, y) + /** @group minmax */ def max(x: Double, y: Double): Double = java.lang.Math.max(x, y) + /** @group minmax */ def min(x: Int, y: Int): Int = java.lang.Math.min(x, y) + /** @group minmax */ def min(x: Long, y: Long): Long = java.lang.Math.min(x, y) + /** @group minmax */ def min(x: Float, y: Float): Float = java.lang.Math.min(x, y) + /** @group minmax */ def min(x: Double, y: Double): Double = java.lang.Math.min(x, y) - /** Note that these are not pure forwarders to the java versions. - * In particular, the return type of java.lang.Long.signum is Int, - * but here it is widened to Long so that each overloaded variant - * will return the same numeric type it is passed. - */ + /** @group signum + * @note Forwards to [[java.lang.Integer]] + */ def signum(x: Int): Int = java.lang.Integer.signum(x) + /** @group signum + * @note Forwards to [[java.lang.Long]] + */ def signum(x: Long): Long = java.lang.Long.signum(x) + /** @group signum */ def signum(x: Float): Float = java.lang.Math.signum(x) + /** @group signum */ def signum(x: Double): Double = java.lang.Math.signum(x) // ----------------------------------------------------------------------- // root functions // ----------------------------------------------------------------------- - /** Returns the cube root of the given `Double` value. */ + /** Returns the square root of a `Double` value. + * + * @param x the number to take the square root of + * @return the value √x + * @group root-extraction + */ + def sqrt(x: Double): Double = java.lang.Math.sqrt(x) + + /** Returns the cube root of the given `Double` value. + * + * @param x the number to take the cube root of + * @return the value ∛x + * @group root-extraction + */ def cbrt(x: Double): Double = java.lang.Math.cbrt(x) // ----------------------------------------------------------------------- // exponential functions // ----------------------------------------------------------------------- - /** Returns `exp(x) - 1`. */ + /** Returns the value of the first argument raised to the power of the + * second argument. + * + * @param x the base. + * @param y the exponent. + * @return the value `x^y^`. + * @group explog + */ + def pow(x: Double, y: Double): Double = java.lang.Math.pow(x, y) + + /** Returns Euler's number `e` raised to the power of a `Double` value. + * + * @param x the exponent to raise `e` to. + * @return the value `e^a^`, where `e` is the base of the natural + * logarithms. + * @group explog + */ + def exp(x: Double): Double = java.lang.Math.exp(x) + + /** Returns `exp(x) - 1`. + * @group explog + */ def expm1(x: Double): Double = java.lang.Math.expm1(x) // ----------------------------------------------------------------------- // logarithmic functions // ----------------------------------------------------------------------- - /** Returns the natural logarithm of the sum of the given `Double` value and 1. */ + /** Returns the natural logarithm of a `Double` value. + * + * @param x the number to take the natural logarithm of + * @return the value `logₑ(x)` where `e` is Eulers number + * @group explog + */ + def log(x: Double): Double = java.lang.Math.log(x) + + /** Returns the natural logarithm of the sum of the given `Double` value and 1. + * @group explog + */ def log1p(x: Double): Double = java.lang.Math.log1p(x) - /** Returns the base 10 logarithm of the given `Double` value. */ + /** Returns the base 10 logarithm of the given `Double` value. + * @group explog + */ def log10(x: Double): Double = java.lang.Math.log10(x) // ----------------------------------------------------------------------- // trigonometric functions // ----------------------------------------------------------------------- - /** Returns the hyperbolic sine of the given `Double` value. */ + /** Returns the hyperbolic sine of the given `Double` value. + * @group hyperbolic + */ def sinh(x: Double): Double = java.lang.Math.sinh(x) - /** Returns the hyperbolic cosine of the given `Double` value. */ + /** Returns the hyperbolic cosine of the given `Double` value. + * @group hyperbolic + */ def cosh(x: Double): Double = java.lang.Math.cosh(x) - /** Returns the hyperbolic tangent of the given `Double` value. */ + /** Returns the hyperbolic tangent of the given `Double` value. + * @group hyperbolic + */ def tanh(x: Double):Double = java.lang.Math.tanh(x) // ----------------------------------------------------------------------- // miscellaneous functions // ----------------------------------------------------------------------- - /** Returns the square root of the sum of the squares of both given `Double` - * values without intermediate underflow or overflow. + /** Returns the size of an ulp of the given `Double` value. + * @group ulp */ - def hypot(x: Double, y: Double): Double = java.lang.Math.hypot(x, y) - - /** Returns the size of an ulp of the given `Double` value. */ def ulp(x: Double): Double = java.lang.Math.ulp(x) - /** Returns the size of an ulp of the given `Float` value. */ + /** Returns the size of an ulp of the given `Float` value. + * @group ulp + */ def ulp(x: Float): Float = java.lang.Math.ulp(x) + + /** @group rounding */ + def IEEEremainder(x: Double, y: Double): Double = java.lang.Math.IEEEremainder(x, y) } From 78c3bfd2ec49a26604cc2eedb445555efaac14fe Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 6 Jul 2016 15:44:51 +0200 Subject: [PATCH 0154/2793] SI-8561 named subclasses for known Manifest / ClassTag instances This helps keeping ClassTag serialization stable under accidental changes (like changing the order of definitions, which would change the name of the anonymous classes). --- src/library/scala/reflect/ClassTag.scala | 4 +- src/library/scala/reflect/Manifest.scala | 113 +++++++++++++---------- test/files/run/t8549.scala | 37 +++----- 3 files changed, 84 insertions(+), 70 deletions(-) diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 1811d3a00fda..3a300e059316 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -134,6 +134,8 @@ object ClassTag { val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing val Null : ClassTag[scala.Null] = Manifest.Null + private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] + def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = runtimeClass1 match { case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]] @@ -148,7 +150,7 @@ object ClassTag { case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]] case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]] case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]] - case _ => new ClassTag[T]{ def runtimeClass = runtimeClass1 } + case _ => new GenericClassTag[T](runtimeClass1) } def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass) diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 369676c27398..9c38864194ee 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -9,7 +9,7 @@ package scala package reflect -import scala.collection.mutable.{ ArrayBuilder, WrappedArray } +import scala.collection.mutable.{ArrayBuilder, WrappedArray} /** A `Manifest[T]` is an opaque descriptor for type T. Its supported use * is to give access to the erasure of the type as a `Class` instance, as @@ -21,23 +21,22 @@ import scala.collection.mutable.{ ArrayBuilder, WrappedArray } * which are not yet adequately represented in manifests. * * Example usages: -{{{ - def arr[T] = new Array[T](0) // does not compile - def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles - def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding - - // Methods manifest, classManifest, and optManifest are in [[scala.Predef]]. - def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] - isApproxSubType[List[String], List[AnyRef]] // true - isApproxSubType[List[String], List[Int]] // false - - def methods[T: ClassManifest] = classManifest[T].erasure.getMethods - def retType[T: ClassManifest](name: String) = - methods[T] find (_.getName == name) map (_.getGenericReturnType) - - retType[Map[_, _]]("values") // Some(scala.collection.Iterable) -}}} + * {{{ + * def arr[T] = new Array[T](0) // does not compile + * def arr[T](implicit m: Manifest[T]) = new Array[T](0) // compiles + * def arr[T: Manifest] = new Array[T](0) // shorthand for the preceding * + * // Methods manifest, classManifest, and optManifest are in [[scala.Predef]]. + * def isApproxSubType[T: Manifest, U: Manifest] = manifest[T] <:< manifest[U] + * isApproxSubType[List[String], List[AnyRef]] // true + * isApproxSubType[List[String], List[Int]] // false + * + * def methods[T: ClassManifest] = classManifest[T].erasure.getMethods + * def retType[T: ClassManifest](name: String) = + * methods[T] find (_.getName == name) map (_.getGenericReturnType) + * + * retType[Map[_, _]]("values") // Some(scala.collection.Iterable) + * }}} */ @scala.annotation.implicitNotFound(msg = "No Manifest available for ${T}.") // TODO undeprecated until Scala reflection becomes non-experimental @@ -88,71 +87,79 @@ object ManifestFactory { def valueManifests: List[AnyValManifest[_]] = List(Byte, Short, Char, Int, Long, Float, Double, Boolean, Unit) - val Byte: AnyValManifest[Byte] = new AnyValManifest[scala.Byte]("Byte") { + private class ByteManifest extends AnyValManifest[scala.Byte]("Byte") { def runtimeClass = java.lang.Byte.TYPE override def newArray(len: Int): Array[Byte] = new Array[Byte](len) override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len)) override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() private def readResolve(): Any = Manifest.Byte } + val Byte: AnyValManifest[Byte] = new ByteManifest - val Short: AnyValManifest[Short] = new AnyValManifest[scala.Short]("Short") { + private class ShortManifest extends AnyValManifest[scala.Short]("Short") { def runtimeClass = java.lang.Short.TYPE override def newArray(len: Int): Array[Short] = new Array[Short](len) override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len)) override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() private def readResolve(): Any = Manifest.Short } + val Short: AnyValManifest[Short] = new ShortManifest - val Char: AnyValManifest[Char] = new AnyValManifest[scala.Char]("Char") { + private class CharManifest extends AnyValManifest[scala.Char]("Char") { def runtimeClass = java.lang.Character.TYPE override def newArray(len: Int): Array[Char] = new Array[Char](len) override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len)) override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() private def readResolve(): Any = Manifest.Char } + val Char: AnyValManifest[Char] = new CharManifest - val Int: AnyValManifest[Int] = new AnyValManifest[scala.Int]("Int") { + private class IntManifest extends AnyValManifest[scala.Int]("Int") { def runtimeClass = java.lang.Integer.TYPE override def newArray(len: Int): Array[Int] = new Array[Int](len) override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len)) override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() private def readResolve(): Any = Manifest.Int } + val Int: AnyValManifest[Int] = new IntManifest - val Long: AnyValManifest[Long] = new AnyValManifest[scala.Long]("Long") { + private class LongManifest extends AnyValManifest[scala.Long]("Long") { def runtimeClass = java.lang.Long.TYPE override def newArray(len: Int): Array[Long] = new Array[Long](len) override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len)) override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() private def readResolve(): Any = Manifest.Long } + val Long: AnyValManifest[Long] = new LongManifest - val Float: AnyValManifest[Float] = new AnyValManifest[scala.Float]("Float") { + private class FloatManifest extends AnyValManifest[scala.Float]("Float") { def runtimeClass = java.lang.Float.TYPE override def newArray(len: Int): Array[Float] = new Array[Float](len) override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len)) override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() private def readResolve(): Any = Manifest.Float } + val Float: AnyValManifest[Float] = new FloatManifest - val Double: AnyValManifest[Double] = new AnyValManifest[scala.Double]("Double") { + private class DoubleManifest extends AnyValManifest[scala.Double]("Double") { def runtimeClass = java.lang.Double.TYPE override def newArray(len: Int): Array[Double] = new Array[Double](len) override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len)) override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() private def readResolve(): Any = Manifest.Double } + val Double: AnyValManifest[Double] = new DoubleManifest - val Boolean: AnyValManifest[Boolean] = new AnyValManifest[scala.Boolean]("Boolean") { + private class BooleanManifest extends AnyValManifest[scala.Boolean]("Boolean") { def runtimeClass = java.lang.Boolean.TYPE override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len)) override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() private def readResolve(): Any = Manifest.Boolean } + val Boolean: AnyValManifest[Boolean] = new BooleanManifest - val Unit: AnyValManifest[Unit] = new AnyValManifest[scala.Unit]("Unit") { + private class UnitManifest extends AnyValManifest[scala.Unit]("Unit") { def runtimeClass = java.lang.Void.TYPE override def newArray(len: Int): Array[Unit] = new Array[Unit](len) override def newWrappedArray(len: Int): WrappedArray[Unit] = new WrappedArray.ofUnit(new Array[Unit](len)) @@ -162,43 +169,49 @@ object ManifestFactory { else super.arrayClass(tp) private def readResolve(): Any = Manifest.Unit } + val Unit: AnyValManifest[Unit] = new UnitManifest private val ObjectTYPE = classOf[java.lang.Object] private val NothingTYPE = classOf[scala.runtime.Nothing$] private val NullTYPE = classOf[scala.runtime.Null$] - val Any: Manifest[scala.Any] = new PhantomManifest[scala.Any](ObjectTYPE, "Any") { + private class AnyManifest extends PhantomManifest[scala.Any](ObjectTYPE, "Any") { override def newArray(len: Int) = new Array[scala.Any](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) private def readResolve(): Any = Manifest.Any } + val Any: Manifest[scala.Any] = new AnyManifest - val Object: Manifest[java.lang.Object] = new PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { + private class ObjectManifest extends PhantomManifest[java.lang.Object](ObjectTYPE, "Object") { override def newArray(len: Int) = new Array[java.lang.Object](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) private def readResolve(): Any = Manifest.Object } + val Object: Manifest[java.lang.Object] = new ObjectManifest val AnyRef: Manifest[scala.AnyRef] = Object.asInstanceOf[Manifest[scala.AnyRef]] - val AnyVal: Manifest[scala.AnyVal] = new PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { + private class AnyValPhantomManifest extends PhantomManifest[scala.AnyVal](ObjectTYPE, "AnyVal") { override def newArray(len: Int) = new Array[scala.AnyVal](len) override def <:<(that: ClassManifest[_]): Boolean = (that eq this) || (that eq Any) private def readResolve(): Any = Manifest.AnyVal } + val AnyVal: Manifest[scala.AnyVal] = new AnyValPhantomManifest - val Null: Manifest[scala.Null] = new PhantomManifest[scala.Null](NullTYPE, "Null") { + private class NullManifest extends PhantomManifest[scala.Null](NullTYPE, "Null") { override def newArray(len: Int) = new Array[scala.Null](len) override def <:<(that: ClassManifest[_]): Boolean = (that ne null) && (that ne Nothing) && !(that <:< AnyVal) private def readResolve(): Any = Manifest.Null } + val Null: Manifest[scala.Null] = new NullManifest - val Nothing: Manifest[scala.Nothing] = new PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { + private class NothingManifest extends PhantomManifest[scala.Nothing](NothingTYPE, "Nothing") { override def newArray(len: Int) = new Array[scala.Nothing](len) override def <:<(that: ClassManifest[_]): Boolean = (that ne null) private def readResolve(): Any = Manifest.Nothing } + val Nothing: Manifest[scala.Nothing] = new NothingManifest private class SingletonTypeManifest[T <: AnyRef](value: AnyRef) extends Manifest[T] { lazy val runtimeClass = value.getClass @@ -251,31 +264,37 @@ object ManifestFactory { def arrayType[T](arg: Manifest[_]): Manifest[Array[T]] = arg.asInstanceOf[Manifest[T]].arrayManifest + private class AbstractTypeManifest[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Seq[Manifest[_]]) extends Manifest[T] { + def runtimeClass = upperBound + override val typeArguments = args.toList + override def toString = prefix.toString+"#"+name+argString + } + /** Manifest for the abstract type `prefix # name`. `upperBound` is not * strictly necessary as it could be obtained by reflection. It was * added so that erasure can be calculated without reflection. */ def abstractType[T](prefix: Manifest[_], name: String, upperBound: Predef.Class[_], args: Manifest[_]*): Manifest[T] = - new Manifest[T] { - def runtimeClass = upperBound - override val typeArguments = args.toList - override def toString = prefix.toString+"#"+name+argString - } + new AbstractTypeManifest[T](prefix, name, upperBound, args) + + private class WildcardManifest[T](lowerBound: Manifest[_], upperBound: Manifest[_]) extends Manifest[T] { + def runtimeClass = upperBound.runtimeClass + override def toString = + "_" + + (if (lowerBound eq Nothing) "" else " >: "+lowerBound) + + (if (upperBound eq Nothing) "" else " <: "+upperBound) + } /** Manifest for the unknown type `_ >: L <: U` in an existential. */ def wildcardType[T](lowerBound: Manifest[_], upperBound: Manifest[_]): Manifest[T] = - new Manifest[T] { - def runtimeClass = upperBound.runtimeClass - override def toString = - "_" + - (if (lowerBound eq Nothing) "" else " >: "+lowerBound) + - (if (upperBound eq Nothing) "" else " <: "+upperBound) - } + new WildcardManifest[T](lowerBound, upperBound) + + private class IntersectionTypeManifest[T](parents: Seq[Manifest[_]]) extends Manifest[T] { + def runtimeClass = parents.head.runtimeClass + override def toString = parents.mkString(" with ") + } /** Manifest for the intersection type `parents_0 with ... with parents_n`. */ def intersectionType[T](parents: Manifest[_]*): Manifest[T] = - new Manifest[T] { - def runtimeClass = parents.head.runtimeClass - override def toString = parents.mkString(" with ") - } + new IntersectionTypeManifest[T](parents) } diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala index 1ce8933efb13..f8d6819e33b0 100644 --- a/test/files/run/t8549.scala +++ b/test/files/run/t8549.scala @@ -79,7 +79,7 @@ object Test extends App { } } - // Generated on 20160515-00:17:51 with Scala version 2.12.0-SNAPSHOT) + // Generated on 20160706-15:44:41 with Scala version 2.12.0-20160629-163201-6612ba0) overwrite.foreach(updateComment) check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAF4dAASTGphdmEvbGFuZy9PYmplY3Q7eHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==") @@ -95,13 +95,10 @@ object Test extends App { import collection.{ mutable, immutable } class C - // TODO IMPLCLASS not stable across trait encoding change (removal of impl classes) - // java.io.InvalidClassException: scala.reflect.ClassTag$$anon$1; local class incompatible: stream classdesc serialVersionUID = -4937928798201944954, local class serialVersionUID = 4714485091895415501 - // Switch to using named, rather than anoymous classes, in the class tag implementation, or maybe use a `readResolve` / `writeReplace` approach. - // check(reflect.classTag[C])("rO0ABXNyAB5zY2FsYS5yZWZsZWN0LkNsYXNzVGFnJCRhbm9uJDG7ePPrmQBkhgIAAUwAD3J1bnRpbWVDbGFzczEkMXQAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAGVGVzdCRDAAAAAAAAAAAAAAB4cA==") - // check(reflect.classTag[Int])("rO0ABXNyACVzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSQkYW5vbiQ5zfmiSVNjtVICAAB4cgAcc2NhbGEucmVmbGVjdC5BbnlWYWxNYW5pZmVzdAAAAAAAAAABAgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cHQAA0ludA==") - // check(reflect.classTag[String])("rO0ABXNyAB5zY2FsYS5yZWZsZWN0LkNsYXNzVGFnJCRhbm9uJDG7ePPrmQBkhgIAAUwAD3J1bnRpbWVDbGFzczEkMXQAEUxqYXZhL2xhbmcvQ2xhc3M7eHB2cgAQamF2YS5sYW5nLlN0cmluZ6DwpDh6O7NCAgAAeHA=") - // check(reflect.classTag[Object])("rO0ABXNyACVzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSQkYW5vbiQymPrtq/Ci1gsCAAB4cgAtc2NhbGEucmVmbGVjdC5NYW5pZmVzdEZhY3RvcnkkUGhhbnRvbU1hbmlmZXN0rzigP7KRh/kCAAFMAAh0b1N0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO3hyAC9zY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRDbGFzc1R5cGVNYW5pZmVzdFq6NWvfTgYFAgADTAAGcHJlZml4dAAOTHNjYWxhL09wdGlvbjtMAAxydW50aW1lQ2xhc3N0ABFMamF2YS9sYW5nL0NsYXNzO0wADXR5cGVBcmd1bWVudHN0ACFMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvTGlzdDt4cHNyAAtzY2FsYS5Ob25lJEZQJPZTypSsAgAAeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHB2cgAQamF2YS5sYW5nLk9iamVjdAAAAAAAAAAAAAAAeHBzcgAyc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuTGlzdCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAQMAAHhwc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHh0AAZPYmplY3Q=") + check(reflect.classTag[C])("rO0ABXNyACZzY2FsYS5yZWZsZWN0LkNsYXNzVGFnJEdlbmVyaWNDbGFzc1RhZy5VPJBpc7h/AgABTAAMcnVudGltZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzczt4cHZyAAZUZXN0JEMAAAAAAAAAAAAAAHhw") + check(reflect.classTag[Int])("rO0ABXNyAClzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRJbnRNYW5pZmVzdFbjh2PQL01qAgAAeHIAHHNjYWxhLnJlZmxlY3QuQW55VmFsTWFuaWZlc3QAAAAAAAAAAQIAAUwACHRvU3RyaW5ndAASTGphdmEvbGFuZy9TdHJpbmc7eHB0AANJbnQ=") + check(reflect.classTag[String])("rO0ABXNyACZzY2FsYS5yZWZsZWN0LkNsYXNzVGFnJEdlbmVyaWNDbGFzc1RhZy5VPJBpc7h/AgABTAAMcnVudGltZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzczt4cHZyABBqYXZhLmxhbmcuU3RyaW5noPCkOHo7s0ICAAB4cA==") + check(reflect.classTag[Object])("rO0ABXNyACxzY2FsYS5yZWZsZWN0Lk1hbmlmZXN0RmFjdG9yeSRPYmplY3RNYW5pZmVzdIWY9dplxtUqAgAAeHIALXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JFBoYW50b21NYW5pZmVzdK84oD+ykYf5AgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cgAvc2NhbGEucmVmbGVjdC5NYW5pZmVzdEZhY3RvcnkkQ2xhc3NUeXBlTWFuaWZlc3TQb2e0Lu/6HQIAA0wABnByZWZpeHQADkxzY2FsYS9PcHRpb247TAAMcnVudGltZUNsYXNzdAARTGphdmEvbGFuZy9DbGFzcztMAA10eXBlQXJndW1lbnRzdAAhTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL0xpc3Q7eHBzcgALc2NhbGEuTm9uZSRGUCT2U8qUrAIAAHhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwdnIAEGphdmEubGFuZy5PYmplY3QAAAAAAAAAAAAAAHhwc3IAMnNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3QkU2VyaWFsaXphdGlvblByb3h5AAAAAAAAAAEDAAB4cHNyACxzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0U2VyaWFsaXplRW5kJIpcY1v3UwttAgAAeHB4dAAGT2JqZWN0") // TODO SI-8576 unstable under -Xcheckinit // check(Enum)( "rO0ABXNyAApUZXN0JEVudW0ketCIyQ8C23MCAAJMAAJWMXQAGUxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZTtMAAJWMnQAF0xzY2FsYS9FbnVtZXJhdGlvbiRWYWw7eHIAEXNjYWxhLkVudW1lcmF0aW9udaDN3ZgOWY4CAAhJAAZuZXh0SWRJABtzY2FsYSRFbnVtZXJhdGlvbiQkYm90dG9tSWRJABhzY2FsYSRFbnVtZXJhdGlvbiQkdG9wSWRMABRWYWx1ZU9yZGVyaW5nJG1vZHVsZXQAIkxzY2FsYS9FbnVtZXJhdGlvbiRWYWx1ZU9yZGVyaW5nJDtMAA9WYWx1ZVNldCRtb2R1bGV0AB1Mc2NhbGEvRW51bWVyYXRpb24kVmFsdWVTZXQkO0wACG5leHROYW1ldAAbTHNjYWxhL2NvbGxlY3Rpb24vSXRlcmF0b3I7TAAXc2NhbGEkRW51bWVyYXRpb24kJG5tYXB0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL01hcDtMABdzY2FsYSRFbnVtZXJhdGlvbiQkdm1hcHEAfgAHeHAAAAArAAAAAAAAACtwcHBzcgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkhhc2hNYXAAAAAAAAAAAQMAAHhwdw0AAALuAAAAAAAAAAQAeHNxAH4ACXcNAAAC7gAAAAEAAAAEAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAqc3IAFXNjYWxhLkVudW1lcmF0aW9uJFZhbM9pZ6/J/O1PAgACSQAYc2NhbGEkRW51bWVyYXRpb24kVmFsJCRpTAAEbmFtZXQAEkxqYXZhL2xhbmcvU3RyaW5nO3hyABdzY2FsYS5FbnVtZXJhdGlvbiRWYWx1ZWJpfC/tIR1RAgACTAAGJG91dGVydAATTHNjYWxhL0VudW1lcmF0aW9uO0wAHHNjYWxhJEVudW1lcmF0aW9uJCRvdXRlckVudW1xAH4AEnhwcQB+AAhxAH4ACAAAACpweHNyABFUZXN0JEVudW0kJGFub24kMVlIjlmE1sXaAgAAeHEAfgARcQB+AAhxAH4ACHEAfgAT") @@ -119,8 +116,7 @@ object Test extends App { // TODO SI-8576 unstable under -Xcheckinit check(collection.convert.Wrappers)( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcA==") - // TODO IMPLCLASS java.io.InvalidClassException: scala.collection.immutable.Set$EmptySet$; local class incompatible: stream classdesc serialVersionUID = -1118802231467657162, local class serialVersionUID = -5214304379191661165 - // check(new collection.convert.Wrappers.SetWrapper(immutable.Set()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQk8Hk3TFN0uDYCAAB4cA==") + check(new collection.convert.Wrappers.SetWrapper(immutable.Set()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQkzP+rBndbbiMCAAB4cA==") check(new collection.convert.Wrappers.SetWrapper(immutable.Set(1, 2, 3)))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0M84syT0560SgAgADTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgAZTAAFZWxlbTNxAH4AGXhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+ABsAAAACc3EAfgAbAAAAAw==") check(new collection.convert.Wrappers.SetWrapper(mutable.Set()))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAAAAAAABQB4") check(new collection.convert.Wrappers.SetWrapper(mutable.Set(1, 2, 3)))("rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkU2V0V3JhcHBlcgAAAAAAAAABAgACTAAGJG91dGVydAAjTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycztMADhzY2FsYSRjb2xsZWN0aW9uJGNvbnZlcnQkV3JhcHBlcnMkU2V0V3JhcHBlciQkdW5kZXJseWluZ3QAFkxzY2FsYS9jb2xsZWN0aW9uL1NldDt4cHNyACJzY2FsYS5jb2xsZWN0aW9uLmNvbnZlcnQuV3JhcHBlcnMkrrSziizavIECABJMABhEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADZMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJERpY3Rpb25hcnlXcmFwcGVyJDtMABZJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA0THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRJdGVyYWJsZVdyYXBwZXIkO0wAFkl0ZXJhdG9yV3JhcHBlciRtb2R1bGV0ADRMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEl0ZXJhdG9yV3JhcHBlciQ7TAAZSkNvbGxlY3Rpb25XcmFwcGVyJG1vZHVsZXQAN0xzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkNvbGxlY3Rpb25XcmFwcGVyJDtMABxKQ29uY3VycmVudE1hcFdyYXBwZXIkbW9kdWxldAA6THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKQ29uY3VycmVudE1hcFdyYXBwZXIkO0wAGUpEaWN0aW9uYXJ5V3JhcHBlciRtb2R1bGV0ADdMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpEaWN0aW9uYXJ5V3JhcHBlciQ7TAAaSkVudW1lcmF0aW9uV3JhcHBlciRtb2R1bGV0ADhMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpFbnVtZXJhdGlvbldyYXBwZXIkO0wAF0pJdGVyYWJsZVdyYXBwZXIkbW9kdWxldAA1THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKSXRlcmFibGVXcmFwcGVyJDtMABdKSXRlcmF0b3JXcmFwcGVyJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkl0ZXJhdG9yV3JhcHBlciQ7TAATSkxpc3RXcmFwcGVyJG1vZHVsZXQAMUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSkxpc3RXcmFwcGVyJDtMABJKTWFwV3JhcHBlciRtb2R1bGV0ADBMc2NhbGEvY29sbGVjdGlvbi9jb252ZXJ0L1dyYXBwZXJzJEpNYXBXcmFwcGVyJDtMABlKUHJvcGVydGllc1dyYXBwZXIkbW9kdWxldAA3THNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRKUHJvcGVydGllc1dyYXBwZXIkO0wAEkpTZXRXcmFwcGVyJG1vZHVsZXQAMExzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkSlNldFdyYXBwZXIkO0wAG011dGFibGVCdWZmZXJXcmFwcGVyJG1vZHVsZXQAOUxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZUJ1ZmZlcldyYXBwZXIkO0wAGE11dGFibGVNYXBXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZU1hcFdyYXBwZXIkO0wAGE11dGFibGVTZXFXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNlcVdyYXBwZXIkO0wAGE11dGFibGVTZXRXcmFwcGVyJG1vZHVsZXQANkxzY2FsYS9jb2xsZWN0aW9uL2NvbnZlcnQvV3JhcHBlcnMkTXV0YWJsZVNldFdyYXBwZXIkO0wAEVNlcVdyYXBwZXIkbW9kdWxldAAvTHNjYWxhL2NvbGxlY3Rpb24vY29udmVydC9XcmFwcGVycyRTZXFXcmFwcGVyJDt4cHBwcHBwcHBwcHBwcHBwcHBwcHNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAADAAAABQBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AGgAAAAJzcQB+ABoAAAADeA==") @@ -155,8 +151,7 @@ object Test extends App { // TODO SI-8576 throws scala.UnitializedFieldError under -Xcheckinit // check(new immutable.Range(0, 1, 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5SYW5nZWm7o1SrFTINAgAHSQADZW5kWgAHaXNFbXB0eUkAC2xhc3RFbGVtZW50SQAQbnVtUmFuZ2VFbGVtZW50c0kABXN0YXJ0SQAEc3RlcEkAD3Rlcm1pbmFsRWxlbWVudHhwAAAAAQAAAAAAAAAAAQAAAAAAAAABAAAAAQ==") - // TODO IMPLCLASS java.io.InvalidClassException: scala.collection.immutable.Set$EmptySet$; local class incompatible: stream classdesc serialVersionUID = -1118802231467657162, local class serialVersionUID = -5214304379191661165 - // check(immutable.Set())( "rO0ABXNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQk8Hk3TFN0uDYCAAB4cA==") + check(immutable.Set())( "rO0ABXNyAChzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkRW1wdHlTZXQkzP+rBndbbiMCAAB4cA==") check(immutable.Set(1))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0MREd3c4yqtWTAgABTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDt4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAAB") check(immutable.Set(1, 2))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0MqaV02sZQzV0AgACTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAwAAAAI=") check(immutable.Set(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TZXQkU2V0M84syT0560SgAgADTAAFZWxlbTF0ABJMamF2YS9sYW5nL09iamVjdDtMAAVlbGVtMnEAfgABTAAFZWxlbTNxAH4AAXhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAMAAAACc3EAfgADAAAAAw==") @@ -168,8 +163,7 @@ object Test extends App { // TODO SI-8576 Uninitialized field: IndexedSeqLike.scala: 56 // check(immutable.Stream(1, 2, 3))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdHJlYW0kQ29uc/ekjBXM3TlFAgADTAACaGR0ABJMamF2YS9sYW5nL09iamVjdDtMAAV0bEdlbnQAEUxzY2FsYS9GdW5jdGlvbjA7TAAFdGxWYWx0ACNMc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvU3RyZWFtO3hyACFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5TdHJlYW0552RDntM42gIAAHhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcgAtc2NhbGEuY29sbGVjdGlvbi5JdGVyYXRvciQkYW5vbmZ1biR0b1N0cmVhbSQxRWR4We0SX0UCAAFMAAYkb3V0ZXJ0ABtMc2NhbGEvY29sbGVjdGlvbi9JdGVyYXRvcjt4cHNyAChzY2FsYS5jb2xsZWN0aW9uLkluZGV4ZWRTZXFMaWtlJEVsZW1lbnRzGF+1cBwmcx0CAANJAANlbmRJAAVpbmRleEwABiRvdXRlcnQAIUxzY2FsYS9jb2xsZWN0aW9uL0luZGV4ZWRTZXFMaWtlO3hwAAAAAwAAAAFzcgArc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLldyYXBwZWRBcnJheSRvZkludMmRLBcI15VjAgABWwAFYXJyYXl0AAJbSXhwdXIAAltJTbpgJnbqsqUCAAB4cAAAAAMAAAABAAAAAgAAAANw") - // TODO IMPLCLASS java.io.InvalidClassException: scala.math.Ordering$Int$; local class incompatible: stream classdesc serialVersionUID = 828746404302808924, local class serialVersionUID = -4070467079371527467 - // check(immutable.TreeSet[Int]())( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHA=") + check(immutable.TreeSet[Int]())( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHA=") // TODO SI-8576 unstable under -Xcheckinit // check(immutable.TreeSet(1, 2, 3))( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5UcmVlU2V0sRdVIDjbWAsCAAJMAAhvcmRlcmluZ3QAFUxzY2FsYS9tYXRoL09yZGVyaW5nO0wABHRyZWV0AC5Mc2NhbGEvY29sbGVjdGlvbi9pbW11dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyADFzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5SZWRCbGFja1RyZWUkQmxhY2tUcmVlzRxnCKenVAECAAB4cgAsc2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWVrqCSyHJbsMgIABUkABWNvdW50TAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgACTAAFcmlnaHRxAH4AAkwABXZhbHVlcQB+AAh4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAnNxAH4ABgAAAAFzcQB+AAoAAAABcHBzcgAXc2NhbGEucnVudGltZS5Cb3hlZFVuaXR0pn1HHezLmgIAAHhwc3EAfgAGAAAAAXNxAH4ACgAAAANwcHEAfgAQcQB+ABA=") @@ -185,13 +179,12 @@ object Test extends App { check(mutable.HashMap())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAAAAAAABAB4") check(mutable.HashMap(1 -> 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXEAfgAEeA==") check(mutable.HashSet(1, 2, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAADAAAABQBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==") - // TODO IMPLCLASS java.io.InvalidClassException: scala.math.Ordering$Int$; local class incompatible: stream classdesc serialVersionUID = 828746404302808924, local class serialVersionUID = -4070467079371527467 - // check(mutable.TreeMap[Int, Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw") - // check(mutable.TreeMap(1 -> 1, 3 -> 6))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHNxAH4ADAAAAAZxAH4ADg==") - // check(mutable.TreeMap(1 -> 1, 3 -> 6).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcCRUcmVlTWFwVmlldx7MCZxLhVQ8AgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlTWFwO0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVNYXDcfKgttvWb8AIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVNYXAkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JAuATHa9WedcAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBzcQB+AA8AAAAGcQB+ABFzcQB+AANxAH4ACHEAfgALc3IACnNjYWxhLlNvbWURIvJpXqGLdAIAAUwAAXhxAH4ADXhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwcQB+ABFzcQB+ABZzcQB+AA8AAAAC") - // check(mutable.TreeSet[Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw") - // check(mutable.TreeSet(1, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQkC4BMdr1Z51wCAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHBw") - // check(mutable.TreeSet(1, 3).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCRUcmVlU2V0Vmlld2JdAzqy0DpGAgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlU2V0O0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVTZXTNdJ8RUA6beAIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVTZXQkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JAuATHa9WedcAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBwcHNxAH4AA3EAfgAIcQB+AAtzcgAKc2NhbGEuU29tZREi8mleoYt0AgABTAABeHEAfgANeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBxAH4AEXNxAH4AFXNxAH4ADwAAAAI=") + check(mutable.TreeMap[Int, Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw") + check(mutable.TreeMap(1 -> 1, 3 -> 6))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHNxAH4ADAAAAAZxAH4ADg==") + check(mutable.TreeMap(1 -> 1, 3 -> 6).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcCRUcmVlTWFwVmlldx7MCZxLhVQ8AgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlTWFwO0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVNYXDcfKgttvWb8AIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVNYXAkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JNmBnwaXZn6wAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBzcQB+AA8AAAAGcQB+ABFzcQB+AANxAH4ACHEAfgALc3IACnNjYWxhLlNvbWURIvJpXqGLdAIAAUwAAXhxAH4ADXhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwcQB+ABFzcQB+ABZzcQB+AA8AAAAC") + check(mutable.TreeSet[Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw") + check(mutable.TreeSet(1, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHBw") + check(mutable.TreeSet(1, 3).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCRUcmVlU2V0Vmlld2JdAzqy0DpGAgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlU2V0O0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVTZXTNdJ8RUA6beAIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVTZXQkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JNmBnwaXZn6wAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBwcHNxAH4AA3EAfgAIcQB+AAtzcgAKc2NhbGEuU29tZREi8mleoYt0AgABTAABeHEAfgANeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBxAH4AEXNxAH4AFXNxAH4ADwAAAAI=") // TODO SI-8576 Uninitialized field under -Xcheckinit // check(new mutable.History())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGlzdG9yeUhuXxDIFJrsAgACSQAKbWF4SGlzdG9yeUwAA2xvZ3QAIExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUXVldWU7eHAAAAPoc3IAHnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5RdWV1ZbjMURVfOuHHAgAAeHIAJHNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5NdXRhYmxlTGlzdFJpnjJ+gFbAAgADSQADbGVuTAAGZmlyc3QwdAAlTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9MaW5rZWRMaXN0O0wABWxhc3QwcQB+AAV4cAAAAABzcgAjc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZExpc3Sak+nGCZHaUQIAAkwABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDtMAARuZXh0dAAeTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9TZXE7eHBwcQB+AApxAH4ACg==") check(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4") @@ -201,7 +194,7 @@ object Test extends App { // TODO SI-8576 unstable under -Xcheckinit // check(mutable.ListBuffer(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlzdEJ1ZmZlci9y9I7QyWzGAwAEWgAIZXhwb3J0ZWRJAANsZW5MAAVsYXN0MHQAKUxzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS8kY29sb24kY29sb247TAAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJExpc3RCdWZmZXIkJHN0YXJ0dAAhTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL0xpc3Q7eHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJzcQB+AAQAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHcFAAAAAAN4") check(new mutable.StringBuilder(new java.lang.StringBuilder("123")))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuU3RyaW5nQnVpbGRlcomvqgGv1tTxAgABTAAKdW5kZXJseWluZ3QAGUxqYXZhL2xhbmcvU3RyaW5nQnVpbGRlcjt4cHNyABdqYXZhLmxhbmcuU3RyaW5nQnVpbGRlcjzV+xRaTGrLAwAAeHB3BAAAAAN1cgACW0OwJmaw4l2ErAIAAHhwAAAAEwAxADIAMwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeA==") - check(mutable.UnrolledBuffer[Int]())( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVW5yb2xsZWRCdWZmZXIAAAAAAAAAAQMAAUwAA3RhZ3QAGExzY2FsYS9yZWZsZWN0L0NsYXNzVGFnO3hwc3IAJXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JCRhbm9uJDnN+aJJU2O1UgIAAHhyABxzY2FsYS5yZWZsZWN0LkFueVZhbE1hbmlmZXN0AAAAAAAAAAECAAFMAAh0b1N0cmluZ3QAEkxqYXZhL2xhbmcvU3RyaW5nO3hwdAADSW50dwQAAAAAeA==") + check(mutable.UnrolledBuffer[Int]())( "rO0ABXNyACdzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVW5yb2xsZWRCdWZmZXIAAAAAAAAAAQMAAUwAA3RhZ3QAGExzY2FsYS9yZWZsZWN0L0NsYXNzVGFnO3hwc3IAKXNjYWxhLnJlZmxlY3QuTWFuaWZlc3RGYWN0b3J5JEludE1hbmlmZXN0VuOHY9AvTWoCAAB4cgAcc2NhbGEucmVmbGVjdC5BbnlWYWxNYW5pZmVzdAAAAAAAAAABAgABTAAIdG9TdHJpbmd0ABJMamF2YS9sYW5nL1N0cmluZzt4cHQAA0ludHcEAAAAAHg=") import collection.parallel check(parallel.immutable.ParHashMap(1 -> 2))( "rO0ABXNyAC5zY2FsYS5jb2xsZWN0aW9uLnBhcmFsbGVsLmltbXV0YWJsZS5QYXJIYXNoTWFwAAAAAAAAAAECAANMAA9TY2FuTGVhZiRtb2R1bGV0ADVMc2NhbGEvY29sbGVjdGlvbi9wYXJhbGxlbC9QYXJJdGVyYWJsZUxpa2UkU2NhbkxlYWYkO0wAD1NjYW5Ob2RlJG1vZHVsZXQANUxzY2FsYS9jb2xsZWN0aW9uL3BhcmFsbGVsL1Bhckl0ZXJhYmxlTGlrZSRTY2FuTm9kZSQ7TAAEdHJpZXQAJExzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS9IYXNoTWFwO3hwcHBzcgA1c2NhbGEuY29sbGVjdGlvbi5pbW11dGFibGUuSGFzaE1hcCRTZXJpYWxpemF0aW9uUHJveHkAAAAAAAAAAgMAAHhwdwQAAAABc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFzcQB+AAcAAAACeA==") From bd9654d4a22bcccbd98d0f33699ece25f2c0904a Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 7 Jul 2016 14:10:40 +0200 Subject: [PATCH 0155/2793] [squash] Fix bounds in contains --- src/library/scala/util/Either.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 6da39692c51b..5c61d83a1a07 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -214,7 +214,7 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * @param elem the element to test. * @return `true` if the option has an element that is equal (as determined by `==`) to `elem`, `false` otherwise. */ - final def contains[AA >: A](elem: AA): Boolean = this match { + final def contains[BB >: B](elem: BB): Boolean = this match { case Right(b) => b == elem case Left(_) => false } From 7e933d5b5a4c1c8795b74e67e2148c6fc4ca19a6 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Wed, 6 Jul 2016 19:42:57 +0200 Subject: [PATCH 0156/2793] SI-6947 Better type parameter names for Map classes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Type parameter names are currently assigned pretty much alphabetically without any meaning. This change renames all key parameters in Map classes from `A` to `K` and all value parameters from `B` to `V` to make them more meaningful. Derived names are renamed accordingly (e.g. `V1` instead of `B1` for an upper bound on `V`, `W` instead of `C` for a new value type). As a side-effect this solves the documentation problem in SI-6947. Due to using `B` both as a type parameter for `foldLeft[B]` in `GenTraversableOnce[A]` and in `Map[A, B]` which extends `GenTraversableOnce[(A, B)]`, the signature of `Map.foldLeft` was rendered in scaladoc as def foldLeft[B](z: B)(op: (B, (A, B)) ⇒ B): B Now you get an unambiguous version: def foldLeft[B](z: B)(op: (B, (K, V)) ⇒ B): B --- src/library/scala/collection/GenMap.scala | 14 +- src/library/scala/collection/GenMapLike.scala | 36 ++--- src/library/scala/collection/Map.scala | 24 +-- src/library/scala/collection/MapLike.scala | 142 +++++++++--------- .../scala/collection/immutable/Map.scala | 116 +++++++------- .../scala/collection/immutable/MapLike.scala | 55 ++++--- .../scala/collection/mutable/Map.scala | 40 ++--- .../scala/collection/mutable/MapLike.scala | 62 ++++---- test/files/run/xMigration.check | 6 +- 9 files changed, 247 insertions(+), 248 deletions(-) diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala index d17a2de179e0..6bc507ae9319 100644 --- a/src/library/scala/collection/GenMap.scala +++ b/src/library/scala/collection/GenMap.scala @@ -18,18 +18,18 @@ import generic._ * @author Aleksandar Prokopec * @since 2.9 */ -trait GenMap[A, +B] -extends GenMapLike[A, B, GenMap[A, B]] - with GenIterable[(A, B)] +trait GenMap[K, +V] +extends GenMapLike[K, V, GenMap[K, V]] + with GenIterable[(K, V)] { - def seq: Map[A, B] + def seq: Map[K, V] - def updated [B1 >: B](key: A, value: B1): GenMap[A, B1] + def updated [V1 >: V](key: K, value: V1): GenMap[K, V1] } object GenMap extends GenMapFactory[GenMap] { - def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty + def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), GenMap[A, B]] = new MapCanBuildFrom[A, B] + implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), GenMap[K, V]] = new MapCanBuildFrom[K, V] } diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala index 2b39fa2289b8..f6c2d071b510 100644 --- a/src/library/scala/collection/GenMapLike.scala +++ b/src/library/scala/collection/GenMapLike.scala @@ -22,13 +22,13 @@ package collection * A map is a collection of bindings from keys to values, where there are * no duplicate keys. */ -trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals with Parallelizable[(A, B), parallel.ParMap[A, B]] { - def default(key: A): B - def get(key: A): Option[B] - def apply(key: A): B - def seq: Map[A, B] - def +[B1 >: B](kv: (A, B1)): GenMap[A, B1] - def - (key: A): Repr +trait GenMapLike[K, +V, +Repr] extends GenIterableLike[(K, V), Repr] with Equals with Parallelizable[(K, V), parallel.ParMap[K, V]] { + def default(key: K): V + def get(key: K): Option[V] + def apply(key: K): V + def seq: Map[K, V] + def +[V1 >: V](kv: (K, V1)): GenMap[K, V1] + def - (key: K): Repr // This hash code must be symmetric in the contents but ought not // collide trivially. @@ -41,17 +41,17 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals * @tparam B1 the result type of the default computation. * @return the value associated with `key` if it exists, * otherwise the result of the `default` computation. - * @usecase def getOrElse(key: A, default: => B): B + * @usecase def getOrElse(key: K, default: => V): V * @inheritdoc */ - def getOrElse[B1 >: B](key: A, default: => B1): B1 + def getOrElse[V1 >: V](key: K, default: => V1): V1 /** Tests whether this map contains a binding for a key. * * @param key the key * @return `true` if there is a binding for `key` in this map, `false` otherwise. */ - def contains(key: A): Boolean + def contains(key: K): Boolean /** Tests whether this map contains a binding for a key. This method, * which implements an abstract method of trait `PartialFunction`, @@ -60,47 +60,47 @@ trait GenMapLike[A, +B, +Repr] extends GenIterableLike[(A, B), Repr] with Equals * @param key the key * @return `true` if there is a binding for `key` in this map, `false` otherwise. */ - def isDefinedAt(key: A): Boolean + def isDefinedAt(key: K): Boolean - def keySet: GenSet[A] + def keySet: GenSet[K] /** Collects all keys of this map in an iterable collection. * * @return the keys of this map as an iterable. */ - def keys: GenIterable[A] + def keys: GenIterable[K] /** Collects all values of this map in an iterable collection. * * @return the values of this map as an iterable. */ - def values: GenIterable[B] + def values: GenIterable[V] /** Creates an iterator for all keys. * * @return an iterator over all keys. */ - def keysIterator: Iterator[A] + def keysIterator: Iterator[K] /** Creates an iterator for all values in this map. * * @return an iterator over all values that are associated with some key in this map. */ - def valuesIterator: Iterator[B] + def valuesIterator: Iterator[V] /** Filters this map by retaining only keys satisfying a predicate. * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - def filterKeys(p: A => Boolean): GenMap[A, B] + def filterKeys(p: K => Boolean): GenMap[K, V] /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. * @return a map view which maps every key of this map * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ - def mapValues[C](f: B => C): GenMap[A, C] + def mapValues[W](f: V => W): GenMap[K, W] /** Compares two maps structurally; i.e., checks if all mappings * contained in this map are also contained in the other map, diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index 1e40fd8c245f..c9a943f1f724 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -12,7 +12,7 @@ package collection import generic._ /** - * A map from keys of type `A` to values of type `B`. + * A map from keys of type `K` to values of type `V`. * * $mapNote * @@ -22,15 +22,15 @@ import generic._ * '''Note:''' If your additions and mutations return the same kind of map as the map * you are defining, you should inherit from `MapLike` as well. * - * @tparam A the type of the keys in this map. - * @tparam B the type of the values associated with keys. + * @tparam K the type of the keys in this map. + * @tparam V the type of the values associated with keys. * * @since 1.0 */ -trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, Map[A, B]] { - def empty: Map[A, B] = Map.empty +trait Map[K, +V] extends Iterable[(K, V)] with GenMap[K, V] with MapLike[K, V, Map[K, V]] { + def empty: Map[K, V] = Map.empty - override def seq: Map[A, B] = this + override def seq: Map[K, V] = this } /** $factoryInfo @@ -38,22 +38,22 @@ trait Map[A, +B] extends Iterable[(A, B)] with GenMap[A, B] with MapLike[A, B, M * @define coll map */ object Map extends MapFactory[Map] { - def empty[A, B]: immutable.Map[A, B] = immutable.Map.empty + def empty[K, V]: immutable.Map[K, V] = immutable.Map.empty /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] + implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V] /** An abstract shell used by { mutable, immutable }.Map but not by collection.Map * because of variance issues. */ - abstract class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + abstract class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends AbstractMap[K, V] with Map[K, V] with Serializable { override def size = underlying.size - def get(key: A) = underlying.get(key) // removed in 2.9: orElse Some(default(key)) + def get(key: K) = underlying.get(key) // removed in 2.9: orElse Some(default(key)) def iterator = underlying.iterator - override def default(key: A): B = d(key) + override def default(key: K): V = d(key) } } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[A, +B] extends AbstractIterable[(A, B)] with Map[A, B] +abstract class AbstractMap[K, +V] extends AbstractIterable[(K, V)] with Map[K, V] diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index d4d85c43ec66..a087cb0f4542 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -28,10 +28,10 @@ import parallel.ParMap * To implement a concrete map, you need to provide implementations of the * following methods: * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * def + [B1 >: B](kv: (A, B1)): This - * def -(key: A): This + * def get(key: K): Option[V] + * def iterator: Iterator[(K, V)] + * def + [V1 >: V](kv: (K, V1)): This + * def -(key: K): This * }}} * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map * you should also override: @@ -42,8 +42,8 @@ import parallel.ParMap * `size` for efficiency. * * @define mapTags - * @tparam A the type of the keys. - * @tparam B the type of associated values. + * @tparam K the type of the keys. + * @tparam V the type of associated values. * @tparam This the type of the map itself. * * @author Martin Odersky @@ -54,12 +54,12 @@ import parallel.ParMap * @define willNotTerminateInf * @define mayNotTerminateInf */ -trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] - extends PartialFunction[A, B] - with IterableLike[(A, B), This] - with GenMapLike[A, B, This] - with Subtractable[A, This] - with Parallelizable[(A, B), ParMap[A, B]] +trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]] + extends PartialFunction[K, V] + with IterableLike[(K, V), This] + with GenMapLike[K, V, This] + with Subtractable[K, This] + with Parallelizable[(K, V), ParMap[K, V]] { self => @@ -71,7 +71,7 @@ self => /** A common implementation of `newBuilder` for all maps in terms of `empty`. * Overridden for mutable maps in `mutable.MapLike`. */ - override protected[this] def newBuilder: Builder[(A, B), This] = new MapBuilder[A, B, This](empty) + override protected[this] def newBuilder: Builder[(K, V), This] = new MapBuilder[K, V, This](empty) /** Optionally returns the value associated with a key. * @@ -79,32 +79,32 @@ self => * @return an option value containing the value associated with `key` in this map, * or `None` if none exists. */ - def get(key: A): Option[B] + def get(key: K): Option[V] /** Creates a new iterator over all key/value pairs of this map * * @return the new iterator */ - def iterator: Iterator[(A, B)] + def iterator: Iterator[(K, V)] /** Adds a key/value pair to this map, returning a new map. * @param kv the key/value pair - * @tparam B1 the type of the value in the key/value pair. + * @tparam V1 the type of the value in the key/value pair. * @return a new map with the new binding added to this map * - * @usecase def + (kv: (A, B)): Map[A, B] + * @usecase def + (kv: (K, V)): Map[K, V] * @inheritdoc */ - def + [B1 >: B] (kv: (A, B1)): Map[A, B1] + def + [V1 >: V] (kv: (K, V1)): Map[K, V1] /** Removes a key from this map, returning a new map. * @param key the key to be removed * @return a new map without a binding for `key` * - * @usecase def - (key: A): Map[A, B] + * @usecase def - (key: K): Map[K, V] * @inheritdoc */ - def - (key: A): This + def - (key: K): This /** Tests whether the map is empty. * @@ -116,14 +116,14 @@ self => * @param key the key. * @param default a computation that yields a default value in case no binding for `key` is * found in the map. - * @tparam B1 the result type of the default computation. + * @tparam V1 the result type of the default computation. * @return the value associated with `key` if it exists, * otherwise the result of the `default` computation. * - * @usecase def getOrElse(key: A, default: => B): B + * @usecase def getOrElse(key: K, default: => V): V * @inheritdoc */ - def getOrElse[B1 >: B](key: A, default: => B1): B1 = get(key) match { + def getOrElse[V1 >: V](key: K, default: => V1): V1 = get(key) match { case Some(v) => v case None => default } @@ -137,7 +137,7 @@ self => * @return the value associated with the given key, or the result of the * map's `default` method, if none exists. */ - def apply(key: A): B = get(key) match { + def apply(key: K): V = get(key) match { case None => default(key) case Some(value) => value } @@ -147,7 +147,7 @@ self => * @param key the key * @return `true` if there is a binding for `key` in this map, `false` otherwise. */ - def contains(key: A): Boolean = get(key).isDefined + def contains(key: K): Boolean = get(key).isDefined /** Tests whether this map contains a binding for a key. This method, * which implements an abstract method of trait `PartialFunction`, @@ -156,33 +156,33 @@ self => * @param key the key * @return `true` if there is a binding for `key` in this map, `false` otherwise. */ - def isDefinedAt(key: A) = contains(key) + def isDefinedAt(key: K) = contains(key) override /*PartialFunction*/ - def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = + def applyOrElse[K1 <: K, V1 >: V](x: K1, default: K1 => V1): V1 = getOrElse(x, default(x)) /** Collects all keys of this map in a set. * @return a set containing all keys of this map. */ - def keySet: Set[A] = new DefaultKeySet + def keySet: Set[K] = new DefaultKeySet /** The implementation class of the set returned by `keySet`. */ - protected class DefaultKeySet extends AbstractSet[A] with Set[A] with Serializable { - def contains(key : A) = self.contains(key) + protected class DefaultKeySet extends AbstractSet[K] with Set[K] with Serializable { + def contains(key : K) = self.contains(key) def iterator = keysIterator - def + (elem: A): Set[A] = (Set[A]() ++ this + elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem - def - (elem: A): Set[A] = (Set[A]() ++ this - elem).asInstanceOf[Set[A]] // !!! concrete overrides abstract problem + def + (elem: K): Set[K] = (Set[K]() ++ this + elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem + def - (elem: K): Set[K] = (Set[K]() ++ this - elem).asInstanceOf[Set[K]] // !!! concrete overrides abstract problem override def size = self.size - override def foreach[U](f: A => U) = self.keysIterator foreach f + override def foreach[U](f: K => U) = self.keysIterator foreach f } /** Creates an iterator for all keys. * * @return an iterator over all keys. */ - def keysIterator: Iterator[A] = new AbstractIterator[A] { + def keysIterator: Iterator[K] = new AbstractIterator[K] { val iter = self.iterator def hasNext = iter.hasNext def next() = iter.next()._1 @@ -192,29 +192,29 @@ self => * * @return the keys of this map as an iterable. */ - @migration("`keys` returns `Iterable[A]` rather than `Iterator[A]`.", "2.8.0") - def keys: Iterable[A] = keySet + @migration("`keys` returns `Iterable[K]` rather than `Iterator[K]`.", "2.8.0") + def keys: Iterable[K] = keySet /** Collects all values of this map in an iterable collection. * * @return the values of this map as an iterable. */ - @migration("`values` returns `Iterable[B]` rather than `Iterator[B]`.", "2.8.0") - def values: Iterable[B] = new DefaultValuesIterable + @migration("`values` returns `Iterable[V]` rather than `Iterator[V]`.", "2.8.0") + def values: Iterable[V] = new DefaultValuesIterable /** The implementation class of the iterable returned by `values`. */ - protected class DefaultValuesIterable extends AbstractIterable[B] with Iterable[B] with Serializable { + protected class DefaultValuesIterable extends AbstractIterable[V] with Iterable[V] with Serializable { def iterator = valuesIterator override def size = self.size - override def foreach[U](f: B => U) = self.valuesIterator foreach f + override def foreach[U](f: V => U) = self.valuesIterator foreach f } /** Creates an iterator for all values in this map. * * @return an iterator over all values that are associated with some key in this map. */ - def valuesIterator: Iterator[B] = new AbstractIterator[B] { + def valuesIterator: Iterator[V] = new AbstractIterator[V] { val iter = self.iterator def hasNext = iter.hasNext def next() = iter.next()._2 @@ -228,33 +228,33 @@ self => * @param key the given key value for which a binding is missing. * @throws NoSuchElementException */ - def default(key: A): B = + def default(key: K): V = throw new NoSuchElementException("key not found: " + key) - protected class FilteredKeys(p: A => Boolean) extends AbstractMap[A, B] with DefaultMap[A, B] { - override def foreach[U](f: ((A, B)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) + protected class FilteredKeys(p: K => Boolean) extends AbstractMap[K, V] with DefaultMap[K, V] { + override def foreach[U](f: ((K, V)) => U): Unit = for (kv <- self) if (p(kv._1)) f(kv) def iterator = self.iterator.filter(kv => p(kv._1)) - override def contains(key: A) = p(key) && self.contains(key) - def get(key: A) = if (!p(key)) None else self.get(key) + override def contains(key: K) = p(key) && self.contains(key) + def get(key: K) = if (!p(key)) None else self.get(key) } /** Filters this map by retaining only keys satisfying a predicate. * - * '''Note''': the predicate must accept any key of type `A`, not just those already + * '''Note''': the predicate must accept any key of type `K`, not just those already * present in the map, as the predicate is tested before the underlying map is queried. * * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) + def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) - protected class MappedValues[C](f: B => C) extends AbstractMap[A, C] with DefaultMap[A, C] { - override def foreach[U](g: ((A, C)) => U): Unit = for ((k, v) <- self) g((k, f(v))) + protected class MappedValues[W](f: V => W) extends AbstractMap[K, W] with DefaultMap[K, W] { + override def foreach[U](g: ((K, W)) => U): Unit = for ((k, v) <- self) g((k, f(v))) def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) override def size = self.size - override def contains(key: A) = self.contains(key) - def get(key: A) = self.get(key).map(f) + override def contains(key: K) = self.contains(key) + def get(key: K) = self.get(key).map(f) } /** Transforms this map by applying a function to every retrieved value. @@ -262,22 +262,22 @@ self => * @return a map view which maps every key of this map * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ - def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) + def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) // The following 5 operations (updated, two times +, two times ++) should really be - // generic, returning This[B]. We need better covariance support to express that though. + // generic, returning This[V]. We need better covariance support to express that though. // So right now we do the brute force approach of code duplication. /** Creates a new map obtained by updating this map with a given key/value pair. * @param key the key * @param value the value - * @tparam B1 the type of the added value + * @tparam V1 the type of the added value * @return A new map with the new key/value mapping added to this map. * - * @usecase def updated(key: A, value: B): Map[A, B] + * @usecase def updated(key: K, value: V): Map[K, V] * @inheritdoc */ - def updated [B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value)) + def updated [V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value)) /** Adds key/value pairs to this map, returning a new map. * @@ -287,27 +287,27 @@ self => * @param kv1 the first key/value pair * @param kv2 the second key/value pair * @param kvs the remaining key/value pairs - * @tparam B1 the type of the added values + * @tparam V1 the type of the added values * @return a new map with the given bindings added to this map * - * @usecase def + (kvs: (A, B)*): Map[A, B] + * @usecase def + (kvs: (K, V)*): Map[K, V] * @inheritdoc * @param kvs the key/value pairs */ - def + [B1 >: B] (kv1: (A, B1), kv2: (A, B1), kvs: (A, B1) *): Map[A, B1] = + def + [V1 >: V] (kv1: (K, V1), kv2: (K, V1), kvs: (K, V1) *): Map[K, V1] = this + kv1 + kv2 ++ kvs /** Adds all key/value pairs in a traversable collection to this map, returning a new map. * * @param xs the collection containing the added key/value pairs - * @tparam B1 the type of the added values + * @tparam V1 the type of the added values * @return a new map with the given bindings added to this map * - * @usecase def ++ (xs: Traversable[(A, B)]): Map[A, B] + * @usecase def ++ (xs: Traversable[(K, V)]): Map[K, V] * @inheritdoc */ - def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = - ((repr: Map[A, B1]) /: xs.seq) (_ + _) + def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = + ((repr: Map[K, V1]) /: xs.seq) (_ + _) /** Returns a new map obtained by removing all key/value pairs for which the predicate * `p` returns `true`. @@ -320,31 +320,31 @@ self => * @param p A predicate over key-value pairs * @return A new map containing elements not satisfying the predicate. */ - override def filterNot(p: ((A, B)) => Boolean): This = { + override def filterNot(p: ((K, V)) => Boolean): This = { var res: This = repr for (kv <- this) if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem res } - override def toSeq: Seq[(A, B)] = { - if (isEmpty) Vector.empty[(A, B)] + override def toSeq: Seq[(K, V)] = { + if (isEmpty) Vector.empty[(K, V)] else { // Default appropriate for immutable collections; mutable collections override this - val vb = Vector.newBuilder[(A, B)] + val vb = Vector.newBuilder[(K, V)] foreach(vb += _) vb.result } } - override def toBuffer[C >: (A, B)]: mutable.Buffer[C] = { - val result = new mutable.ArrayBuffer[C](size) + override def toBuffer[E >: (K, V)]: mutable.Buffer[E] = { + val result = new mutable.ArrayBuffer[E](size) // Faster to let the map iterate itself than to defer through copyToBuffer foreach(result += _) result } - protected[this] override def parCombiner = ParMap.newCombiner[A, B] + protected[this] override def parCombiner = ParMap.newCombiner[K, V] /** Appends all bindings of this map to a string builder using start, end, and separator strings. * The written text begins with the string `start` and ends with the string diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index 6f135cd35f4a..cbdf7b39f5a1 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -18,30 +18,30 @@ import generic._ * functionality for the abstract methods in `Map`: * * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * def + [B1 >: B](kv: (A, B1)): Map[A, B1] - * def -(key: A): Map[A, B] + * def get(key: K): Option[V] + * def iterator: Iterator[(K, V)] + * def + [V1 >: V](kv: (K, V1)): Map[K, V1] + * def -(key: K): Map[K, V] * }}} * * @since 1 */ -trait Map[A, +B] extends Iterable[(A, B)] -// with GenMap[A, B] - with scala.collection.Map[A, B] - with MapLike[A, B, Map[A, B]] { self => +trait Map[K, +V] extends Iterable[(K, V)] +// with GenMap[K, V] + with scala.collection.Map[K, V] + with MapLike[K, V, Map[K, V]] { self => - override def empty: Map[A, B] = Map.empty + override def empty: Map[K, V] = Map.empty /** Returns this $coll as an immutable map. * * A new map will not be built; lazy collections will stay lazy. */ @deprecatedOverriding("Immutable maps should do nothing on toMap except return themselves cast as a map.", "2.11.0") - override def toMap[T, U](implicit ev: (A, B) <:< (T, U)): immutable.Map[T, U] = + override def toMap[T, U](implicit ev: (K, V) <:< (T, U)): immutable.Map[T, U] = self.asInstanceOf[immutable.Map[T, U]] - override def seq: Map[A, B] = this + override def seq: Map[K, V] = this /** The same map with a given default function. * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefault`. @@ -51,7 +51,7 @@ trait Map[A, +B] extends Iterable[(A, B)] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, d) + def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, d) /** The same map with a given default value. * Note: `get`, `contains`, `iterator`, `keys`, etc are not affected by `withDefaultValue`. @@ -61,15 +61,15 @@ trait Map[A, +B] extends Iterable[(A, B)] * @param d default value used for non-present keys * @return a wrapper of the map with a default value */ - def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new Map.WithDefault[A, B1](this, x => d) + def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new Map.WithDefault[K, V1](this, x => d) /** Add a key/value pair to this map. * @param key the key * @param value the value * @return A new map with the new binding added to this map */ - override def updated [B1 >: B](key: A, value: B1): Map[A, B1] - def + [B1 >: B](kv: (A, B1)): Map[A, B1] + override def updated [V1 >: V](key: K, value: V1): Map[K, V1] + def + [V1 >: V](kv: (K, V1)): Map[K, V1] } /** $factoryInfo @@ -79,17 +79,17 @@ trait Map[A, +B] extends Iterable[(A, B)] object Map extends ImmutableMapFactory[Map] { /** $mapCanBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] + implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V] - def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]] + def empty[K, V]: Map[K, V] = EmptyMap.asInstanceOf[Map[K, V]] - class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault[A, B](underlying, d) with Map[A, B] { + class WithDefault[K, +V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault[K, V](underlying, d) with Map[K, V] { override def empty = new WithDefault(underlying.empty, d) - override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d) - override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2) - override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d) - override def withDefault[B1 >: B](d: A => B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, d) - override def withDefaultValue[B1 >: B](d: B1): immutable.Map[A, B1] = new WithDefault[A, B1](underlying, x => d) + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) + override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2) + override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) + override def withDefault[V1 >: V](d: K => V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, d) + override def withDefaultValue[V1 >: V](d: V1): immutable.Map[K, V1] = new WithDefault[K, V1](underlying, x => d) } private object EmptyMap extends AbstractMap[Any, Nothing] with Map[Any, Nothing] with Serializable { @@ -98,119 +98,119 @@ object Map extends ImmutableMapFactory[Map] { override def contains(key: Any) = false def get(key: Any): Option[Nothing] = None def iterator: Iterator[(Any, Nothing)] = Iterator.empty - override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value) - def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2) + override def updated [V1] (key: Any, value: V1): Map[Any, V1] = new Map1(key, value) + def + [V1](kv: (Any, V1)): Map[Any, V1] = updated(kv._1, kv._2) def - (key: Any): Map[Any, Nothing] = this } - class Map1[A, +B](key1: A, value1: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + class Map1[K, +V](key1: K, value1: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { override def size = 1 - override def apply(key: A) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: A) = key == key1 - def get(key: A): Option[B] = + override def apply(key: K) = if (key == key1) value1 else throw new NoSuchElementException("key not found: " + key) + override def contains(key: K) = key == key1 + def get(key: K): Option[V] = if (key == key1) Some(value1) else None def iterator = Iterator((key1, value1)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = if (key == key1) new Map1(key1, value) else new Map2(key1, value1, key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = + def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) + def - (key: K): Map[K, V] = if (key == key1) Map.empty else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((K, V)) => U): Unit = { f((key1, value1)) } } - class Map2[A, +B](key1: A, value1: B, key2: A, value2: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + class Map2[K, +V](key1: K, value1: V, key2: K, value2: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { override def size = 2 - override def apply(key: A) = + override def apply(key: K) = if (key == key1) value1 else if (key == key2) value2 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: A) = (key == key1) || (key == key2) - def get(key: A): Option[B] = + override def contains(key: K) = (key == key1) || (key == key2) + def get(key: K): Option[V] = if (key == key1) Some(value1) else if (key == key2) Some(value2) else None def iterator = Iterator((key1, value1), (key2, value2)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = if (key == key1) new Map2(key1, value, key2, value2) else if (key == key2) new Map2(key1, value1, key2, value) else new Map3(key1, value1, key2, value2, key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = + def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) + def - (key: K): Map[K, V] = if (key == key1) new Map1(key2, value2) else if (key == key2) new Map1(key1, value1) else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((K, V)) => U): Unit = { f((key1, value1)); f((key2, value2)) } } - class Map3[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + class Map3[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { override def size = 3 - override def apply(key: A) = + override def apply(key: K) = if (key == key1) value1 else if (key == key2) value2 else if (key == key3) value3 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) - def get(key: A): Option[B] = + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) + def get(key: K): Option[V] = if (key == key1) Some(value1) else if (key == key2) Some(value2) else if (key == key3) Some(value3) else None def iterator = Iterator((key1, value1), (key2, value2), (key3, value3)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = if (key == key1) new Map3(key1, value, key2, value2, key3, value3) else if (key == key2) new Map3(key1, value1, key2, value, key3, value3) else if (key == key3) new Map3(key1, value1, key2, value2, key3, value) else new Map4(key1, value1, key2, value2, key3, value3, key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = + def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) + def - (key: K): Map[K, V] = if (key == key1) new Map2(key2, value2, key3, value3) else if (key == key2) new Map2(key1, value1, key3, value3) else if (key == key3) new Map2(key1, value1, key2, value2) else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((K, V)) => U): Unit = { f((key1, value1)); f((key2, value2)); f((key3, value3)) } } - class Map4[A, +B](key1: A, value1: B, key2: A, value2: B, key3: A, value3: B, key4: A, value4: B) extends AbstractMap[A, B] with Map[A, B] with Serializable { + class Map4[K, +V](key1: K, value1: V, key2: K, value2: V, key3: K, value3: V, key4: K, value4: V) extends AbstractMap[K, V] with Map[K, V] with Serializable { override def size = 4 - override def apply(key: A) = + override def apply(key: K) = if (key == key1) value1 else if (key == key2) value2 else if (key == key3) value3 else if (key == key4) value4 else throw new NoSuchElementException("key not found: " + key) - override def contains(key: A) = (key == key1) || (key == key2) || (key == key3) || (key == key4) - def get(key: A): Option[B] = + override def contains(key: K) = (key == key1) || (key == key2) || (key == key3) || (key == key4) + def get(key: K): Option[V] = if (key == key1) Some(value1) else if (key == key2) Some(value2) else if (key == key3) Some(value3) else if (key == key4) Some(value4) else None def iterator = Iterator((key1, value1), (key2, value2), (key3, value3), (key4, value4)) - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = + override def updated [V1 >: V] (key: K, value: V1): Map[K, V1] = if (key == key1) new Map4(key1, value, key2, value2, key3, value3, key4, value4) else if (key == key2) new Map4(key1, value1, key2, value, key3, value3, key4, value4) else if (key == key3) new Map4(key1, value1, key2, value2, key3, value, key4, value4) else if (key == key4) new Map4(key1, value1, key2, value2, key3, value3, key4, value) else new HashMap + ((key1, value1), (key2, value2), (key3, value3), (key4, value4), (key, value)) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) - def - (key: A): Map[A, B] = + def + [V1 >: V](kv: (K, V1)): Map[K, V1] = updated(kv._1, kv._2) + def - (key: K): Map[K, V] = if (key == key1) new Map3(key2, value2, key3, value3, key4, value4) else if (key == key2) new Map3(key1, value1, key3, value3, key4, value4) else if (key == key3) new Map3(key1, value1, key2, value2, key4, value4) else if (key == key4) new Map3(key1, value1, key2, value2, key3, value3) else this - override def foreach[U](f: ((A, B)) => U): Unit = { + override def foreach[U](f: ((K, V)) => U): Unit = { f((key1, value1)); f((key2, value2)); f((key3, value3)); f((key4, value4)) } } } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[A, +B] extends scala.collection.AbstractMap[A, B] with Map[A, B] +abstract class AbstractMap[K, +V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index bd5b9c9faf49..5867383b522e 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -14,16 +14,16 @@ import generic._ import parallel.immutable.ParMap /** - * A generic template for immutable maps from keys of type `A` - * to values of type `B`. + * A generic template for immutable maps from keys of type `K` + * to values of type `V`. * To implement a concrete map, you need to provide implementations of the * following methods (where `This` is the type of the actual map implementation): * * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * def + [B1 >: B](kv: (A, B)): Map[A, B1] - * def - (key: A): This + * def get(key: K): Option[V] + * def iterator: Iterator[(K, V)] + * def + [V1 >: V](kv: (K, V)): Map[K, V1] + * def - (key: K): This * }}} * * If you wish that transformer methods like `take`, `drop`, `filter` return the @@ -36,8 +36,8 @@ import parallel.immutable.ParMap * It is also good idea to override methods `foreach` and * `size` for efficiency. * - * @tparam A the type of the keys contained in this collection. - * @tparam B the type of the values associated with the keys. + * @tparam K the type of the keys contained in this collection. + * @tparam V the type of the values associated with the keys. * @tparam This The type of the actual map implementation. * * @author Martin Odersky @@ -46,26 +46,26 @@ import parallel.immutable.ParMap * @define Coll immutable.Map * @define coll immutable map */ -trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] - extends scala.collection.MapLike[A, B, This] - with Parallelizable[(A, B), ParMap[A, B]] +trait MapLike[K, +V, +This <: MapLike[K, V, This] with Map[K, V]] + extends scala.collection.MapLike[K, V, This] + with Parallelizable[(K, V), ParMap[K, V]] { self => - protected[this] override def parCombiner = ParMap.newCombiner[A, B] + protected[this] override def parCombiner = ParMap.newCombiner[K, V] /** A new immutable map containing updating this map with a given key/value mapping. * @param key the key * @param value the value * @return A new map with the new key/value mapping */ - override def updated [B1 >: B](key: A, value: B1): immutable.Map[A, B1] = this + ((key, value)) + override def updated [V1 >: V](key: K, value: V1): immutable.Map[K, V1] = this + ((key, value)) /** Add a key/value pair to this map, returning a new map. * @param kv the key/value pair. * @return A new map with the new binding added to this map. */ - def + [B1 >: B] (kv: (A, B1)): immutable.Map[A, B1] + def + [V1 >: V] (kv: (K, V1)): immutable.Map[K, V1] /** Adds two or more elements to this collection and returns * a new collection. @@ -75,7 +75,7 @@ self => * @param elems the remaining elements to add. * @return A new map with the new bindings added to this map. */ - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): immutable.Map[A, B1] = + override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): immutable.Map[K, V1] = this + elem1 + elem2 ++ elems /** Adds a number of elements provided by a traversable object @@ -84,40 +84,40 @@ self => * @param xs the traversable object consisting of key-value pairs. * @return a new immutable map with the bindings of this map and those from `xs`. */ - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): immutable.Map[A, B1] = - ((repr: immutable.Map[A, B1]) /: xs.seq) (_ + _) + override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): immutable.Map[K, V1] = + ((repr: immutable.Map[K, V1]) /: xs.seq) (_ + _) /** Filters this map by retaining only keys satisfying a predicate. * @param p the predicate used to test keys * @return an immutable map consisting only of those key value pairs of this map where the key satisfies * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - override def filterKeys(p: A => Boolean): Map[A, B] = new FilteredKeys(p) with DefaultMap[A, B] + override def filterKeys(p: K => Boolean): Map[K, V] = new FilteredKeys(p) with DefaultMap[K, V] /** Transforms this map by applying a function to every retrieved value. * @param f the function used to transform values of this map. * @return a map view which maps every key of this map * to `f(this(key))`. The resulting map wraps the original map without copying any elements. */ - override def mapValues[C](f: B => C): Map[A, C] = new MappedValues(f) with DefaultMap[A, C] + override def mapValues[W](f: V => W): Map[K, W] = new MappedValues(f) with DefaultMap[K, W] /** Collects all keys of this map in a set. * @return a set containing all keys of this map. */ - override def keySet: immutable.Set[A] = new ImmutableDefaultKeySet + override def keySet: immutable.Set[K] = new ImmutableDefaultKeySet - protected class ImmutableDefaultKeySet extends super.DefaultKeySet with immutable.Set[A] { - override def + (elem: A): immutable.Set[A] = + protected class ImmutableDefaultKeySet extends super.DefaultKeySet with immutable.Set[K] { + override def + (elem: K): immutable.Set[K] = if (this(elem)) this - else immutable.Set[A]() ++ this + elem - override def - (elem: A): immutable.Set[A] = - if (this(elem)) immutable.Set[A]() ++ this - elem + else immutable.Set[K]() ++ this + elem + override def - (elem: K): immutable.Set[K] = + if (this(elem)) immutable.Set[K]() ++ this - elem else this // ImmutableDefaultKeySet is only protected, so we won't warn on override. // Someone could override in a way that makes widening not okay // (e.g. by overriding +, though the version in this class is fine) - override def toSet[B >: A]: Set[B] = this.asInstanceOf[Set[B]] + override def toSet[B >: K]: Set[B] = this.asInstanceOf[Set[B]] } /** This function transforms all the values of mappings contained @@ -126,10 +126,9 @@ self => * @param f A function over keys and values * @return the updated map */ - def transform[C, That](f: (A, B) => C)(implicit bf: CanBuildFrom[This, (A, C), That]): That = { + def transform[W, That](f: (K, V) => W)(implicit bf: CanBuildFrom[This, (K, W), That]): That = { val b = bf(repr) for ((key, value) <- this) b += ((key, f(key, value))) b.result() } } - diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index 2ac3cb65b52e..460a8b8f77f8 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -20,15 +20,15 @@ import generic._ * @since 1.0 * @author Matthias Zenger */ -trait Map[A, B] - extends Iterable[(A, B)] -// with GenMap[A, B] - with scala.collection.Map[A, B] - with MapLike[A, B, Map[A, B]] { +trait Map[K, V] + extends Iterable[(K, V)] +// with GenMap[K, V] + with scala.collection.Map[K, V] + with MapLike[K, V, Map[K, V]] { - override def empty: Map[A, B] = Map.empty + override def empty: Map[K, V] = Map.empty - override def seq: Map[A, B] = this + override def seq: Map[K, V] = this /** The same map with a given default function. * @@ -37,7 +37,7 @@ trait Map[A, B] * @param d the function mapping keys to values, used for non-present keys * @return a wrapper of the map with a default value */ - def withDefault(d: A => B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, d) + def withDefault(d: K => V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, d) /** The same map with a given default value. * @@ -46,7 +46,7 @@ trait Map[A, B] * @param d default value used for non-present keys * @return a wrapper of the map with a default value */ - def withDefaultValue(d: B): mutable.Map[A, B] = new Map.WithDefault[A, B](this, x => d) + def withDefaultValue(d: V): mutable.Map[K, V] = new Map.WithDefault[K, V](this, x => d) } /** $factoryInfo @@ -56,25 +56,25 @@ trait Map[A, B] */ object Map extends MutableMapFactory[Map] { /** $canBuildFromInfo */ - implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] + implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Map[K, V]] = new MapCanBuildFrom[K, V] - def empty[A, B]: Map[A, B] = new HashMap[A, B] + def empty[K, V]: Map[K, V] = new HashMap[K, V] - class WithDefault[A, B](underlying: Map[A, B], d: A => B) extends scala.collection.Map.WithDefault(underlying, d) with Map[A, B] { - override def += (kv: (A, B)) = {underlying += kv; this} - def -= (key: A) = {underlying -= key; this} + class WithDefault[K, V](underlying: Map[K, V], d: K => V) extends scala.collection.Map.WithDefault(underlying, d) with Map[K, V] { + override def += (kv: (K, V)) = {underlying += kv; this} + def -= (key: K) = {underlying -= key; this} override def empty = new WithDefault(underlying.empty, d) - override def updated[B1 >: B](key: A, value: B1): WithDefault[A, B1] = new WithDefault[A, B1](underlying.updated[B1](key, value), d) - override def + [B1 >: B](kv: (A, B1)): WithDefault[A, B1] = updated(kv._1, kv._2) - override def - (key: A): WithDefault[A, B] = new WithDefault(underlying - key, d) + override def updated[V1 >: V](key: K, value: V1): WithDefault[K, V1] = new WithDefault[K, V1](underlying.updated[V1](key, value), d) + override def + [V1 >: V](kv: (K, V1)): WithDefault[K, V1] = updated(kv._1, kv._2) + override def - (key: K): WithDefault[K, V] = new WithDefault(underlying - key, d) /** If these methods aren't overridden to thread through the underlying map, * successive calls to withDefault* have no effect. */ - override def withDefault(d: A => B): mutable.Map[A, B] = new WithDefault[A, B](underlying, d) - override def withDefaultValue(d: B): mutable.Map[A, B] = new WithDefault[A, B](underlying, x => d) + override def withDefault(d: K => V): mutable.Map[K, V] = new WithDefault[K, V](underlying, d) + override def withDefaultValue(d: V): mutable.Map[K, V] = new WithDefault[K, V](underlying, x => d) } } /** Explicit instantiation of the `Map` trait to reduce class file size in subclasses. */ -abstract class AbstractMap[A, B] extends scala.collection.AbstractMap[A, B] with Map[A, B] +abstract class AbstractMap[K, V] extends scala.collection.AbstractMap[K, V] with Map[K, V] diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala index 949e5e3536bc..238b6d1be1f4 100644 --- a/src/library/scala/collection/mutable/MapLike.scala +++ b/src/library/scala/collection/mutable/MapLike.scala @@ -31,10 +31,10 @@ import scala.collection.parallel.mutable.ParMap * To implement a concrete mutable map, you need to provide * implementations of the following methods: * {{{ - * def get(key: A): Option[B] - * def iterator: Iterator[(A, B)] - * def += (kv: (A, B)): This - * def -= (key: A): This + * def get(key: K): Option[V] + * def iterator: Iterator[(K, V)] + * def += (kv: (K, V)): This + * def -= (key: K): This * }}} * If you wish that methods like `take`, `drop`, `filter` also return the same kind of map * you should also override: @@ -44,13 +44,13 @@ import scala.collection.parallel.mutable.ParMap * It is also good idea to override methods `foreach` and * `size` for efficiency. */ -trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] - extends scala.collection.MapLike[A, B, This] - with Builder[(A, B), This] - with Growable[(A, B)] - with Shrinkable[A] +trait MapLike[K, V, +This <: MapLike[K, V, This] with Map[K, V]] + extends scala.collection.MapLike[K, V, This] + with Builder[(K, V), This] + with Growable[(K, V)] + with Shrinkable[K] with Cloneable[This] - with Parallelizable[(A, B), ParMap[A, B]] + with Parallelizable[(K, V), ParMap[K, V]] { self => /** A common implementation of `newBuilder` for all mutable maps @@ -58,17 +58,17 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * * Overrides `MapLike` implementation for better efficiency. */ - override protected[this] def newBuilder: Builder[(A, B), This] = empty + override protected[this] def newBuilder: Builder[(K, V), This] = empty - protected[this] override def parCombiner = ParMap.newCombiner[A, B] + protected[this] override def parCombiner = ParMap.newCombiner[K, V] /** Converts this $coll to a sequence. * * ```Note```: assumes a fast `size` method. Subclasses should override if this is not true. */ - override def toSeq: collection.Seq[(A, B)] = { + override def toSeq: collection.Seq[(K, V)] = { // ArrayBuffer for efficiency, preallocated to the right size. - val result = new ArrayBuffer[(A, B)](size) + val result = new ArrayBuffer[(K, V)](size) foreach(result += _) result } @@ -84,7 +84,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * before the `put` operation was executed, or `None` if `key` * was not defined in the map before. */ - def put(key: A, value: B): Option[B] = { + def put(key: K, value: V): Option[V] = { val r = get(key) update(key, value) r @@ -97,7 +97,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @param key The key to update * @param value The new value */ - def update(key: A, value: B) { this += ((key, value)) } + def update(key: K, value: V) { this += ((key, value)) } /** Adds a new key/value pair to this map. * If the map already contains a @@ -105,7 +105,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @param kv the key/value pair. * @return the map itself */ - def += (kv: (A, B)): this.type + def += (kv: (K, V)): this.type /** Creates a new map consisting of all key/value pairs of the current map * plus a new pair of a given key and value. @@ -115,7 +115,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return A fresh immutable map with the binding from `key` to * `value` added to this map. */ - override def updated[B1 >: B](key: A, value: B1): Map[A, B1] = this + ((key, value)) + override def updated[V1 >: V](key: K, value: V1): Map[K, V1] = this + ((key, value)) /** Creates a new map containing a new key/value mapping and all the key/value mappings * of this map. @@ -126,7 +126,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return a new map containing mappings of this map and the mapping `kv`. */ @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0") - def + [B1 >: B] (kv: (A, B1)): Map[A, B1] = clone().asInstanceOf[Map[A, B1]] += kv + def + [V1 >: V] (kv: (K, V1)): Map[K, V1] = clone().asInstanceOf[Map[K, V1]] += kv /** Creates a new map containing two or more key/value mappings and all the key/value * mappings of this map. @@ -139,8 +139,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return a new map containing mappings of this map and two or more specified mappings. */ @migration("`+` creates a new map. Use `+=` to add an element to this map and return that map itself.", "2.8.0") - override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): Map[A, B1] = - clone().asInstanceOf[Map[A, B1]] += elem1 += elem2 ++= elems + override def + [V1 >: V] (elem1: (K, V1), elem2: (K, V1), elems: (K, V1) *): Map[K, V1] = + clone().asInstanceOf[Map[K, V1]] += elem1 += elem2 ++= elems /** Creates a new map containing the key/value mappings provided by the specified traversable object * and all the key/value mappings of this map. @@ -151,8 +151,8 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return a new map containing mappings of this map and those provided by `xs`. */ @migration("`++` creates a new map. Use `++=` to add an element to this map and return that map itself.", "2.8.0") - override def ++[B1 >: B](xs: GenTraversableOnce[(A, B1)]): Map[A, B1] = - clone().asInstanceOf[Map[A, B1]] ++= xs.seq + override def ++[V1 >: V](xs: GenTraversableOnce[(K, V1)]): Map[K, V1] = + clone().asInstanceOf[Map[K, V1]] ++= xs.seq /** Removes a key from this map, returning the value associated previously * with that key as an option. @@ -160,7 +160,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return an option value containing the value associated previously with `key`, * or `None` if `key` was not defined in the map before. */ - def remove(key: A): Option[B] = { + def remove(key: K): Option[V] = { val r = get(key) this -= key r @@ -170,7 +170,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @param key the key to be removed * @return the map itself. */ - def -= (key: A): this.type + def -= (key: K): this.type /** Creates a new map with all the key/value mappings of this map except the key/value mapping * with the specified key. @@ -179,7 +179,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return a new map with all the mappings of this map except that with a key `key`. */ @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0") - override def -(key: A): This = clone() -= key + override def -(key: K): This = clone() -= key /** Removes all bindings from the map. After this operation has completed, * the map will be empty. @@ -200,7 +200,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @return the value associated with key (either previously or as a result * of executing the method). */ - def getOrElseUpdate(key: A, op: => B): B = + def getOrElseUpdate(key: K, op: => V): V = get(key) match { case Some(v) => v case None => val d = op; this(key) = d; d @@ -213,7 +213,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * @param f the transformation to apply * @return the map itself. */ - def transform(f: (A, B) => B): this.type = { + def transform(f: (K, V) => V): this.type = { this.iterator foreach { case (key, value) => update(key, f(key, value)) } @@ -225,7 +225,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * * @param p The test predicate */ - def retain(p: (A, B) => Boolean): this.type = { + def retain(p: (K, V) => Boolean): this.type = { for ((k, v) <- this.toList) // SI-7269 toList avoids ConcurrentModificationException if (!p(k, v)) this -= k @@ -249,7 +249,7 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * with a key equal to `elem1`, `elem2` or any of `elems`. */ @migration("`-` creates a new map. Use `-=` to remove an element from this map and return that map itself.", "2.8.0") - override def -(elem1: A, elem2: A, elems: A*): This = + override def -(elem1: K, elem2: K, elems: K*): This = clone() -= elem1 -= elem2 --= elems /** Creates a new map with all the key/value mappings of this map except mappings with keys @@ -260,5 +260,5 @@ trait MapLike[A, B, +This <: MapLike[A, B, This] with Map[A, B]] * with a key equal to a key from `xs`. */ @migration("`--` creates a new map. Use `--=` to remove an element from this map and return that map itself.", "2.8.0") - override def --(xs: GenTraversableOnce[A]): This = clone() --= xs.seq + override def --(xs: GenTraversableOnce[K]): This = clone() --= xs.seq } diff --git a/test/files/run/xMigration.check b/test/files/run/xMigration.check index cd860bf39490..1104dbea835f 100644 --- a/test/files/run/xMigration.check +++ b/test/files/run/xMigration.check @@ -11,7 +11,7 @@ scala> :setting -Xmigration:any scala> Map(1 -> "eis").values // warn :12: warning: method values in trait MapLike has changed semantics in version 2.8.0: -`values` returns `Iterable[B]` rather than `Iterator[B]`. +`values` returns `Iterable[V]` rather than `Iterator[V]`. Map(1 -> "eis").values // warn ^ res2: Iterable[String] = MapLike(eis) @@ -25,7 +25,7 @@ scala> :setting -Xmigration:2.7 scala> Map(1 -> "eis").values // warn :12: warning: method values in trait MapLike has changed semantics in version 2.8.0: -`values` returns `Iterable[B]` rather than `Iterator[B]`. +`values` returns `Iterable[V]` rather than `Iterator[V]`. Map(1 -> "eis").values // warn ^ res4: Iterable[String] = MapLike(eis) @@ -39,7 +39,7 @@ scala> :setting -Xmigration // same as :any scala> Map(1 -> "eis").values // warn :12: warning: method values in trait MapLike has changed semantics in version 2.8.0: -`values` returns `Iterable[B]` rather than `Iterator[B]`. +`values` returns `Iterable[V]` rather than `Iterator[V]`. Map(1 -> "eis").values // warn ^ res6: Iterable[String] = MapLike(eis) From c2ca66af4214dcd2e17be038741fe63a47bb2725 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 5 Jul 2016 16:08:25 +0200 Subject: [PATCH 0157/2793] SI-6881 Detect reference equality when comparing streams `==` already covers this case. We override `equals` in `Stream` to do the same when `equals` is called directly. This takes care of identical streams. To support short-circuiting equality checks on elements prepended to identical streams we also override `sameElements` in `Cons` to treat the case where both sides are `Cons` separately. Tests in StreamTest.test_reference_equality. --- .../scala/collection/immutable/Stream.scala | 23 +++++++++++++++++++ .../collection/immutable/StreamTest.scala | 10 ++++++++ 2 files changed, 33 insertions(+) diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index d135bb29a8fc..db19df315f50 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -1029,6 +1029,8 @@ sealed abstract class Stream[+A] extends AbstractSeq[A] */ override def stringPrefix = "Stream" + override def equals(that: Any): Boolean = + if (this eq that.asInstanceOf[AnyRef]) true else super.equals(that) } /** A specialized, extra-lazy implementation of a stream iterator, so it can @@ -1171,6 +1173,27 @@ object Stream extends SeqFactory[Stream] { tlVal } + + override /*LinearSeqOptimized*/ + def sameElements[B >: A](that: GenIterable[B]): Boolean = { + @tailrec def consEq(a: Cons[_], b: Cons[_]): Boolean = { + if (a.head != b.head) false + else { + a.tail match { + case at: Cons[_] => + b.tail match { + case bt: Cons[_] => (at eq bt) || consEq(at, bt) + case _ => false + } + case _ => b.tail.isEmpty + } + } + } + that match { + case that: Cons[_] => consEq(this, that) + case _ => super.sameElements(that) + } + } } /** An infinite stream that repeatedly applies a given function to a start value. diff --git a/test/junit/scala/collection/immutable/StreamTest.scala b/test/junit/scala/collection/immutable/StreamTest.scala index fad4e502eba1..7046525f374c 100644 --- a/test/junit/scala/collection/immutable/StreamTest.scala +++ b/test/junit/scala/collection/immutable/StreamTest.scala @@ -107,4 +107,14 @@ class StreamTest { def withFilter_map_properly_lazy_in_tail: Unit = { assertStreamOpLazyInTail(_.withFilter(_ % 2 == 0).map(identity), List(1, 2)) } + + @Test // SI-6881 + def test_reference_equality: Unit = { + // Make sure we're tested with reference equality + val s = Stream.from(0) + assert(s == s, "Referentially identical streams should be equal (==)") + assert(s equals s, "Referentially identical streams should be equal (equals)") + assert((0 #:: 1 #:: s) == (0 #:: 1 #:: s), "Cons of referentially identical streams should be equal (==)") + assert((0 #:: 1 #:: s) equals (0 #:: 1 #:: s), "Cons of referentially identical streams should be equal (equals)") + } } From cd9240ed524e69f01fcd872b9791754102c6530b Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 5 Jul 2016 12:37:04 +0200 Subject: [PATCH 0158/2793] SI-7301 Make tuple classes final This includes undoing the special case for `-Xfuture` introduced in https://github.com/scala/scala/pull/2299 and updating tests to take the new errors into account. --- src/build/genprod.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/Checkable.scala | 4 +--- src/library/scala/Tuple1.scala | 3 +-- src/library/scala/Tuple10.scala | 3 +-- src/library/scala/Tuple11.scala | 3 +-- src/library/scala/Tuple12.scala | 3 +-- src/library/scala/Tuple13.scala | 3 +-- src/library/scala/Tuple14.scala | 3 +-- src/library/scala/Tuple15.scala | 3 +-- src/library/scala/Tuple16.scala | 3 +-- src/library/scala/Tuple17.scala | 3 +-- src/library/scala/Tuple18.scala | 3 +-- src/library/scala/Tuple19.scala | 3 +-- src/library/scala/Tuple2.scala | 3 +-- src/library/scala/Tuple20.scala | 3 +-- src/library/scala/Tuple21.scala | 3 +-- src/library/scala/Tuple22.scala | 3 +-- src/library/scala/Tuple3.scala | 3 +-- src/library/scala/Tuple4.scala | 3 +-- src/library/scala/Tuple5.scala | 3 +-- src/library/scala/Tuple6.scala | 3 +-- src/library/scala/Tuple7.scala | 3 +-- src/library/scala/Tuple8.scala | 3 +-- src/library/scala/Tuple9.scala | 3 +-- test/files/neg/t7294.check | 6 +++++- test/files/neg/t7294.flags | 1 - test/files/neg/t7294b.check | 4 +--- test/files/neg/t7294b.flags | 1 - test/files/pos/t7294.scala | 6 ------ test/files/pos/tcpoly_bounds1.scala | 6 ++++-- test/files/run/t3888.check | 1 - test/files/run/t3888.scala | 5 +++-- 32 files changed, 37 insertions(+), 66 deletions(-) delete mode 100644 test/files/neg/t7294.flags delete mode 100644 test/files/neg/t7294b.flags delete mode 100644 test/files/pos/t7294.scala diff --git a/src/build/genprod.scala b/src/build/genprod.scala index f85a151ae534..fa48b020cc96 100644 --- a/src/build/genprod.scala +++ b/src/build/genprod.scala @@ -315,8 +315,7 @@ class Tuple(val i: Int) extends Group("Tuple") with Arity { * @constructor Create a new tuple with {i} elements.{idiomatic} {params} */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class {className}{covariantArgs}({fields}) +final case class {className}{covariantArgs}({fields}) extends {Product.className(i)}{invariantArgs} {{ override def toString() = "(" + {mkToString} + ")" diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 2b6a4c763a5c..215ee1c42bc6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -241,9 +241,7 @@ trait Checkable { private def isSealedOrFinal(sym: Symbol) = sym.isSealed || sym.isFinal private def isEffectivelyFinal(sym: Symbol): Boolean = ( // initialization important - sym.initialize.isEffectivelyFinalOrNotOverridden || ( - settings.future && isTupleSymbol(sym) // SI-7294 step into the future and treat TupleN as final. - ) + sym.initialize.isEffectivelyFinalOrNotOverridden ) def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2) diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala index 5898b63e213d..5544a5f63d9c 100644 --- a/src/library/scala/Tuple1.scala +++ b/src/library/scala/Tuple1.scala @@ -15,8 +15,7 @@ package scala * @constructor Create a new tuple with 1 elements. * @param _1 Element 1 of this Tuple1 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1) +final case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1) extends Product1[T1] { override def toString() = "(" + _1 + ")" diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala index 2b0239561d2c..c57acb7c6e7f 100644 --- a/src/library/scala/Tuple10.scala +++ b/src/library/scala/Tuple10.scala @@ -24,8 +24,7 @@ package scala * @param _9 Element 9 of this Tuple10 * @param _10 Element 10 of this Tuple10 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10) +final case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10) extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")" diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala index 0d5294d5471b..06360e6679ba 100644 --- a/src/library/scala/Tuple11.scala +++ b/src/library/scala/Tuple11.scala @@ -25,8 +25,7 @@ package scala * @param _10 Element 10 of this Tuple11 * @param _11 Element 11 of this Tuple11 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11) +final case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11) extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")" diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala index d36c8275c1d0..e223de104d95 100644 --- a/src/library/scala/Tuple12.scala +++ b/src/library/scala/Tuple12.scala @@ -26,8 +26,7 @@ package scala * @param _11 Element 11 of this Tuple12 * @param _12 Element 12 of this Tuple12 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12) +final case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12) extends Product12[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala index edc37456fe19..56e12b96b676 100644 --- a/src/library/scala/Tuple13.scala +++ b/src/library/scala/Tuple13.scala @@ -27,8 +27,7 @@ package scala * @param _12 Element 12 of this Tuple13 * @param _13 Element 13 of this Tuple13 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13) +final case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13) extends Product13[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala index 9896e736c9d4..53dd4d79bbf3 100644 --- a/src/library/scala/Tuple14.scala +++ b/src/library/scala/Tuple14.scala @@ -28,8 +28,7 @@ package scala * @param _13 Element 13 of this Tuple14 * @param _14 Element 14 of this Tuple14 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14) +final case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14) extends Product14[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala index 45cd4f751f8f..0aca96d00da0 100644 --- a/src/library/scala/Tuple15.scala +++ b/src/library/scala/Tuple15.scala @@ -29,8 +29,7 @@ package scala * @param _14 Element 14 of this Tuple15 * @param _15 Element 15 of this Tuple15 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15) +final case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15) extends Product15[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala index 2e370a5b31b8..d4c0c318070f 100644 --- a/src/library/scala/Tuple16.scala +++ b/src/library/scala/Tuple16.scala @@ -30,8 +30,7 @@ package scala * @param _15 Element 15 of this Tuple16 * @param _16 Element 16 of this Tuple16 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16) +final case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16) extends Product16[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala index 2242a15fda38..47df6cb59f07 100644 --- a/src/library/scala/Tuple17.scala +++ b/src/library/scala/Tuple17.scala @@ -31,8 +31,7 @@ package scala * @param _16 Element 16 of this Tuple17 * @param _17 Element 17 of this Tuple17 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17) +final case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17) extends Product17[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala index 68f245c6cefa..464b08fb2840 100644 --- a/src/library/scala/Tuple18.scala +++ b/src/library/scala/Tuple18.scala @@ -32,8 +32,7 @@ package scala * @param _17 Element 17 of this Tuple18 * @param _18 Element 18 of this Tuple18 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18) +final case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18) extends Product18[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala index a8a49549fbc7..d64b3920b441 100644 --- a/src/library/scala/Tuple19.scala +++ b/src/library/scala/Tuple19.scala @@ -33,8 +33,7 @@ package scala * @param _18 Element 18 of this Tuple19 * @param _19 Element 19 of this Tuple19 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19) +final case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19) extends Product19[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala index 9ea1469c5c3d..5faa4e982150 100644 --- a/src/library/scala/Tuple2.scala +++ b/src/library/scala/Tuple2.scala @@ -16,8 +16,7 @@ package scala * @param _1 Element 1 of this Tuple2 * @param _2 Element 2 of this Tuple2 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2) +final case class Tuple2[@specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T1, @specialized(Int, Long, Double, Char, Boolean/*, AnyRef*/) +T2](_1: T1, _2: T2) extends Product2[T1, T2] { override def toString() = "(" + _1 + "," + _2 + ")" diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala index 0118d382ab6f..a96c41baa551 100644 --- a/src/library/scala/Tuple20.scala +++ b/src/library/scala/Tuple20.scala @@ -34,8 +34,7 @@ package scala * @param _19 Element 19 of this Tuple20 * @param _20 Element 20 of this Tuple20 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20) +final case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20) extends Product20[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala index ceae94af41b0..6f240d1fba7f 100644 --- a/src/library/scala/Tuple21.scala +++ b/src/library/scala/Tuple21.scala @@ -35,8 +35,7 @@ package scala * @param _20 Element 20 of this Tuple21 * @param _21 Element 21 of this Tuple21 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21) +final case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21) extends Product21[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala index ecd567a710e0..681b709f6578 100644 --- a/src/library/scala/Tuple22.scala +++ b/src/library/scala/Tuple22.scala @@ -36,8 +36,7 @@ package scala * @param _21 Element 21 of this Tuple22 * @param _22 Element 22 of this Tuple22 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22) +final case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, +T13, +T14, +T15, +T16, +T17, +T18, +T19, +T20, +T21, +T22](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9, _10: T10, _11: T11, _12: T12, _13: T13, _14: T14, _15: T15, _16: T16, _17: T17, _18: T18, _19: T19, _20: T20, _21: T21, _22: T22) extends Product22[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16, T17, T18, T19, T20, T21, T22] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala index 6e71d3ae8c23..86f8f7e1a4b3 100644 --- a/src/library/scala/Tuple3.scala +++ b/src/library/scala/Tuple3.scala @@ -17,8 +17,7 @@ package scala * @param _2 Element 2 of this Tuple3 * @param _3 Element 3 of this Tuple3 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3) +final case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3) extends Product3[T1, T2, T3] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")" diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala index 4c84cfc674cb..f3266c262c41 100644 --- a/src/library/scala/Tuple4.scala +++ b/src/library/scala/Tuple4.scala @@ -18,8 +18,7 @@ package scala * @param _3 Element 3 of this Tuple4 * @param _4 Element 4 of this Tuple4 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4) +final case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4) extends Product4[T1, T2, T3, T4] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")" diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala index fe8e853f1212..e6ac0a62452c 100644 --- a/src/library/scala/Tuple5.scala +++ b/src/library/scala/Tuple5.scala @@ -19,8 +19,7 @@ package scala * @param _4 Element 4 of this Tuple5 * @param _5 Element 5 of this Tuple5 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5) +final case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5) extends Product5[T1, T2, T3, T4, T5] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")" diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala index 6bf1c73d4be9..cf69b9c10a6a 100644 --- a/src/library/scala/Tuple6.scala +++ b/src/library/scala/Tuple6.scala @@ -20,8 +20,7 @@ package scala * @param _5 Element 5 of this Tuple6 * @param _6 Element 6 of this Tuple6 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6) +final case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6) extends Product6[T1, T2, T3, T4, T5, T6] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")" diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala index ea42709cb7a3..4f0200fe238f 100644 --- a/src/library/scala/Tuple7.scala +++ b/src/library/scala/Tuple7.scala @@ -21,8 +21,7 @@ package scala * @param _6 Element 6 of this Tuple7 * @param _7 Element 7 of this Tuple7 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7) +final case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7) extends Product7[T1, T2, T3, T4, T5, T6, T7] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")" diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala index c24f9454e0ba..ebd9f7025264 100644 --- a/src/library/scala/Tuple8.scala +++ b/src/library/scala/Tuple8.scala @@ -22,8 +22,7 @@ package scala * @param _7 Element 7 of this Tuple8 * @param _8 Element 8 of this Tuple8 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8) +final case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8) extends Product8[T1, T2, T3, T4, T5, T6, T7, T8] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")" diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala index ed02b30df2bd..854fe97b4401 100644 --- a/src/library/scala/Tuple9.scala +++ b/src/library/scala/Tuple9.scala @@ -23,8 +23,7 @@ package scala * @param _8 Element 8 of this Tuple9 * @param _9 Element 9 of this Tuple9 */ -@deprecatedInheritance("Tuples will be made final in a future version.", "2.11.0") -case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9) +final case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _3: T3, _4: T4, _5: T5, _6: T6, _7: T7, _8: T8, _9: T9) extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")" diff --git a/test/files/neg/t7294.check b/test/files/neg/t7294.check index f15289c1c01a..a308f2457d4b 100644 --- a/test/files/neg/t7294.check +++ b/test/files/neg/t7294.check @@ -1,6 +1,10 @@ t7294.scala:4: warning: fruitless type test: a value of type (Int, Int) cannot also be a Seq[A] (1, 2) match { case Seq() => 0; case _ => 1 } ^ -error: No warnings can be incurred under -Xfatal-warnings. +t7294.scala:4: error: pattern type is incompatible with expected type; + found : Seq[A] + required: (Int, Int) + (1, 2) match { case Seq() => 0; case _ => 1 } + ^ one warning found one error found diff --git a/test/files/neg/t7294.flags b/test/files/neg/t7294.flags deleted file mode 100644 index 3f3381a45bde..000000000000 --- a/test/files/neg/t7294.flags +++ /dev/null @@ -1 +0,0 @@ --Xfuture -Xfatal-warnings diff --git a/test/files/neg/t7294b.check b/test/files/neg/t7294b.check index 707266f0cccf..3390cb72787f 100644 --- a/test/files/neg/t7294b.check +++ b/test/files/neg/t7294b.check @@ -1,6 +1,4 @@ -t7294b.scala:1: warning: inheritance from class Tuple2 in package scala is deprecated (since 2.11.0): Tuples will be made final in a future version. +t7294b.scala:1: error: illegal inheritance from final class Tuple2 class C extends Tuple2[Int, Int](0, 0) ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found one error found diff --git a/test/files/neg/t7294b.flags b/test/files/neg/t7294b.flags deleted file mode 100644 index d1b831ea87cd..000000000000 --- a/test/files/neg/t7294b.flags +++ /dev/null @@ -1 +0,0 @@ --deprecation -Xfatal-warnings \ No newline at end of file diff --git a/test/files/pos/t7294.scala b/test/files/pos/t7294.scala deleted file mode 100644 index ccac2b14005f..000000000000 --- a/test/files/pos/t7294.scala +++ /dev/null @@ -1,6 +0,0 @@ -object Test { - // no fruitless warning as Tuple2 isn't (yet) final. - // The corresponding `neg` test will treat it as final - // for the purposes of these tests under -Xfuture. - (1, 2) match { case Seq() => 0; case _ => 1 } -} diff --git a/test/files/pos/tcpoly_bounds1.scala b/test/files/pos/tcpoly_bounds1.scala index 63263cb15290..4f52f55cb61e 100644 --- a/test/files/pos/tcpoly_bounds1.scala +++ b/test/files/pos/tcpoly_bounds1.scala @@ -1,7 +1,9 @@ -class Foo[t[x]<: Tuple2[Int, x]] +case class T2[+T1, +T2](_1: T1, _2: T2) extends Product2[T1, T2] + +class Foo[t[x]<: T2[Int, x]] // -class MyPair[z](a: Int, b: z) extends Tuple2[Int, z](a,b) +class MyPair[z](a: Int, b: z) extends T2[Int, z](a,b) object foo extends Foo[MyPair] diff --git a/test/files/run/t3888.check b/test/files/run/t3888.check index 6fda32d713ea..e69de29bb2d1 100644 --- a/test/files/run/t3888.check +++ b/test/files/run/t3888.check @@ -1 +0,0 @@ -warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t3888.scala b/test/files/run/t3888.scala index 8701b42ff0d4..b1932ffb20bc 100644 --- a/test/files/run/t3888.scala +++ b/test/files/run/t3888.scala @@ -1,3 +1,4 @@ +case class Tuple2[+T1, +T2](_1: T1, _2: T2) extends Product2[T1, T2] // in a match, which notion of equals prevails? // extending Tuple doesn't seem to be at issue here. @@ -7,13 +8,13 @@ object Test { private[this] val T2 = T1 def m1 = - (1, 2) match { + Tuple2(1, 2) match { case T1 => true case _ => false } def m2 = - (1, 2) match { + Tuple2(1, 2) match { case T2 => true case _ => false } From f805cf526abb4343990ed31db6c63436d6fe4fde Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 6 Jul 2016 16:00:06 -0700 Subject: [PATCH 0159/2793] SI-9847 Nuance pure expr statement warning Clarify the current warning, which means that an expression split over multiple lines may not be parsed as naively expected. When typing a block, attempt minor nuance. For instance, a single expression is not in need of parens. Try to avoid duplicate warnings for expressions that were adapted away from result position. --- .../scala/tools/nsc/typechecker/Typers.scala | 41 +++++++++++++---- test/files/neg/macro-invalidret.check | 2 +- test/files/neg/scopes.check | 2 +- test/files/neg/stmt-expr-discard.check | 4 +- test/files/neg/t1181.check | 6 +-- test/files/neg/t9847.check | 45 +++++++++++++++++++ test/files/neg/t9847.flags | 1 + test/files/neg/t9847.scala | 23 ++++++++++ test/files/neg/unit-returns-value.check | 6 +-- test/files/run/contrib674.check | 5 ++- test/files/run/contrib674.scala | 2 +- test/files/run/delay-bad.check | 4 +- test/files/run/delay-good.check | 4 +- test/files/run/exceptions-2.check | 2 +- test/files/run/lazy-locals.check | 4 +- test/files/run/macro-duplicate.check | 2 +- test/files/run/misc.check | 16 +++---- test/files/run/names-defaults.check | 5 ++- test/files/run/patmatnew.check | 12 +++-- test/files/run/reify_lazyunit.check | 2 +- test/files/run/repl-bare-expr.check | 12 ++--- .../files/run/repl-no-imports-no-predef.check | 24 +++++----- test/files/run/repl-parens.check | 12 ++--- test/files/run/t3488.check | 10 ++++- test/files/run/t4047.check | 8 ++-- test/files/run/t4680.check | 4 +- test/files/run/t5380.check | 4 +- test/files/run/t7047.check | 2 +- test/files/run/t7747-repl.check | 24 +++++----- test/files/run/t8196.check | 2 +- test/files/run/try-2.check | 2 +- test/files/run/try.check | 2 +- test/files/specialized/tb3651.check | 2 +- test/files/specialized/tc3651.check | 2 +- test/files/specialized/td3651.check | 4 +- 35 files changed, 207 insertions(+), 95 deletions(-) create mode 100644 test/files/neg/t9847.check create mode 100644 test/files/neg/t9847.flags create mode 100644 test/files/neg/t9847.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9fa3564b2bab..6b0bd98f8fc2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2413,13 +2413,36 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } val stats1 = if (isPastTyper) block.stats else - block.stats.flatMap(stat => stat match { + block.stats.flatMap { case vd@ValDef(_, _, _, _) if vd.symbol.isLazy => namer.addDerivedTrees(Typer.this, vd) - case _ => stat::Nil - }) - val stats2 = typedStats(stats1, context.owner) + case stat => stat::Nil + } + val stats2 = typedStats(stats1, context.owner, warnPure = false) val expr1 = typed(block.expr, mode &~ (FUNmode | QUALmode), pt) + + // sanity check block for unintended expr placement + if (!isPastTyper) { + val (count, result0, adapted) = + expr1 match { + case Block(expr :: Nil, Literal(Constant(()))) => (1, expr, true) + case Literal(Constant(())) => (0, EmptyTree, false) + case _ => (1, EmptyTree, false) + } + def checkPure(t: Tree, supple: Boolean): Unit = + if (treeInfo.isPureExprForWarningPurposes(t)) { + val msg = "a pure expression does nothing in statement position" + val parens = if (stats2.length + count > 1) "multiline expressions might require enclosing parentheses" else "" + val discard = if (adapted) "; a value can be silently discarded when Unit is expected" else "" + val text = + if (supple) s"${parens}${discard}" + else if (!parens.isEmpty) s"${msg}; ${parens}" else msg + context.warning(t.pos, text) + } + stats2.foreach(checkPure(_, supple = false)) + if (result0.nonEmpty) checkPure(result0, supple = true) + } + treeCopy.Block(block, stats2, expr1) .setType(if (treeInfo.isExprSafeToInline(block)) expr1.tpe else expr1.tpe.deconst) } finally { @@ -2994,7 +3017,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => log("unhandled import: "+imp+" in "+unit); imp } - def typedStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = { + def typedStats(stats: List[Tree], exprOwner: Symbol, warnPure: Boolean = true): List[Tree] = { val inBlock = exprOwner == context.owner def includesTargetPos(tree: Tree) = tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos) @@ -3025,9 +3048,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ConstructorsOrderError(stat) } } - if (!isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) context.warning(stat.pos, - "a pure expression does nothing in statement position; you may be omitting necessary parentheses" - ) + if (warnPure && !isPastTyper && treeInfo.isPureExprForWarningPurposes(result)) { + val msg = "a pure expression does nothing in statement position" + val clause = if (stats.lengthCompare(1) > 0) "; multiline expressions may require enclosing parentheses" else "" + context.warning(stat.pos, s"${msg}${clause}") + } result } diff --git a/test/files/neg/macro-invalidret.check b/test/files/neg/macro-invalidret.check index ebdc8ec7da92..a4d4fc6f34d9 100644 --- a/test/files/neg/macro-invalidret.check +++ b/test/files/neg/macro-invalidret.check @@ -27,7 +27,7 @@ java.lang.NullPointerException Macros_Test_2.scala:15: error: macro implementation is missing foo4 ^ -Macros_Test_2.scala:17: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +Macros_Test_2.scala:17: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses foo6 ^ two warnings found diff --git a/test/files/neg/scopes.check b/test/files/neg/scopes.check index f8e8c3758a36..2db96781857e 100644 --- a/test/files/neg/scopes.check +++ b/test/files/neg/scopes.check @@ -7,7 +7,7 @@ scopes.scala:5: error: x is already defined as value x scopes.scala:8: error: y is already defined as value y val y: Float = .0f ^ -scopes.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +scopes.scala:6: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses { ^ scopes.scala:11: error: x is already defined as value x diff --git a/test/files/neg/stmt-expr-discard.check b/test/files/neg/stmt-expr-discard.check index 1207e6da5080..4a807653654b 100644 --- a/test/files/neg/stmt-expr-discard.check +++ b/test/files/neg/stmt-expr-discard.check @@ -1,7 +1,7 @@ -stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +stmt-expr-discard.scala:3: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + 2 ^ -stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +stmt-expr-discard.scala:4: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses - 4 ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t1181.check b/test/files/neg/t1181.check index 13b73d5381c7..a9c102853d92 100644 --- a/test/files/neg/t1181.check +++ b/test/files/neg/t1181.check @@ -1,10 +1,10 @@ -t1181.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses - case (Nil, Nil) => map - ^ t1181.scala:9: error: type mismatch; found : scala.collection.immutable.Map[Symbol,Symbol] required: Symbol _ => buildMap(map.updated(keyList.head, valueList.head), keyList.tail, valueList.tail) ^ +t1181.scala:8: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + case (Nil, Nil) => map + ^ one warning found one error found diff --git a/test/files/neg/t9847.check b/test/files/neg/t9847.check new file mode 100644 index 000000000000..e55109b3efda --- /dev/null +++ b/test/files/neg/t9847.check @@ -0,0 +1,45 @@ +t9847.scala:4: warning: discarded non-Unit value + def f(): Unit = 42 + ^ +t9847.scala:4: warning: a pure expression does nothing in statement position + def f(): Unit = 42 + ^ +t9847.scala:5: warning: discarded non-Unit value + def g = (42: Unit) + ^ +t9847.scala:5: warning: a pure expression does nothing in statement position + def g = (42: Unit) + ^ +t9847.scala:7: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + 1 + ^ +t9847.scala:12: warning: discarded non-Unit value + + 1 + ^ +t9847.scala:12: warning: a pure expression does nothing in statement position + + 1 + ^ +t9847.scala:11: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses + 1 + ^ +t9847.scala:12: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected + + 1 + ^ +t9847.scala:16: warning: discarded non-Unit value + x + 1 + ^ +t9847.scala:19: warning: discarded non-Unit value + def j(): Unit = x + 1 + ^ +t9847.scala:21: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + class C { 42 } + ^ +t9847.scala:22: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + class D { 42 ; 17 } + ^ +t9847.scala:22: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses + class D { 42 ; 17 } + ^ +error: No warnings can be incurred under -Xfatal-warnings. +14 warnings found +one error found diff --git a/test/files/neg/t9847.flags b/test/files/neg/t9847.flags new file mode 100644 index 000000000000..065e3ca61e2c --- /dev/null +++ b/test/files/neg/t9847.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Ywarn-value-discard diff --git a/test/files/neg/t9847.scala b/test/files/neg/t9847.scala new file mode 100644 index 000000000000..51c16d815f3b --- /dev/null +++ b/test/files/neg/t9847.scala @@ -0,0 +1,23 @@ + +trait T { + + def f(): Unit = 42 + def g = (42: Unit) + def h = { + 1 + + 1 + } + def hh(): Unit = { + 1 + + 1 + } + def i(): Unit = { + val x = 1 + x + 1 + } + def x = 42 + def j(): Unit = x + 1 + + class C { 42 } + class D { 42 ; 17 } +} diff --git a/test/files/neg/unit-returns-value.check b/test/files/neg/unit-returns-value.check index f30a506ebe50..7ebfbfde292e 100644 --- a/test/files/neg/unit-returns-value.check +++ b/test/files/neg/unit-returns-value.check @@ -1,13 +1,13 @@ -unit-returns-value.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +unit-returns-value.scala:4: warning: a pure expression does nothing in statement position if (b) return 5 ^ unit-returns-value.scala:4: warning: enclosing method f has result type Unit: return value discarded if (b) return 5 ^ -unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +unit-returns-value.scala:22: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses i1 // warn ^ -unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +unit-returns-value.scala:23: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses i2 // warn ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/run/contrib674.check b/test/files/run/contrib674.check index 78325c18105a..98c72f34dd49 100644 --- a/test/files/run/contrib674.check +++ b/test/files/run/contrib674.check @@ -1,3 +1,6 @@ -contrib674.scala:15: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +contrib674.scala:15: warning: a pure expression does nothing in statement position + 1 + ^ +contrib674.scala:15: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected 1 ^ diff --git a/test/files/run/contrib674.scala b/test/files/run/contrib674.scala index 45c9871fc4f7..bb9dad3686a9 100644 --- a/test/files/run/contrib674.scala +++ b/test/files/run/contrib674.scala @@ -1,7 +1,7 @@ // causes VerifyError with scala-2.5.1 object Test extends App { - def bad() { + def bad(): Unit = { try { 1 } catch { diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check index fcd05c827f06..bf41c79a3a6c 100644 --- a/test/files/run/delay-bad.check +++ b/test/files/run/delay-bad.check @@ -1,7 +1,7 @@ -delay-bad.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +delay-bad.scala:53: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses f(new C { 5 }) ^ -delay-bad.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +delay-bad.scala:73: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses f(new { val x = 5 } with E() { 5 }) ^ warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check index b4f6b04af7cb..ed35b9225ffb 100644 --- a/test/files/run/delay-good.check +++ b/test/files/run/delay-good.check @@ -1,7 +1,7 @@ -delay-good.scala:53: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +delay-good.scala:53: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses f(new C { 5 }) ^ -delay-good.scala:73: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +delay-good.scala:73: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses f(new { val x = 5 } with E() { 5 }) ^ diff --git a/test/files/run/exceptions-2.check b/test/files/run/exceptions-2.check index 4f8244800a7c..5cf5e71f41d7 100644 --- a/test/files/run/exceptions-2.check +++ b/test/files/run/exceptions-2.check @@ -1,4 +1,4 @@ -exceptions-2.scala:267: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +exceptions-2.scala:267: warning: a pure expression does nothing in statement position try { 1 } catch { case e: java.io.IOException => () } ^ nested1: diff --git a/test/files/run/lazy-locals.check b/test/files/run/lazy-locals.check index 9e88a55d1802..4565326beaad 100644 --- a/test/files/run/lazy-locals.check +++ b/test/files/run/lazy-locals.check @@ -1,7 +1,7 @@ -lazy-locals.scala:153: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +lazy-locals.scala:153: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses { ^ -lazy-locals.scala:159: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +lazy-locals.scala:159: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses { ^ forced lazy val q diff --git a/test/files/run/macro-duplicate.check b/test/files/run/macro-duplicate.check index 58781b719a0a..7006b1661162 100644 --- a/test/files/run/macro-duplicate.check +++ b/test/files/run/macro-duplicate.check @@ -1,3 +1,3 @@ -Test_2.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +Test_2.scala:5: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses Macros.foo ^ diff --git a/test/files/run/misc.check b/test/files/run/misc.check index 56116f81048b..075dfeff2f66 100644 --- a/test/files/run/misc.check +++ b/test/files/run/misc.check @@ -1,25 +1,25 @@ -misc.scala:46: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:46: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 42; ^ -misc.scala:47: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:47: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 42l; ^ -misc.scala:48: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:48: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 23.5f; ^ -misc.scala:49: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:49: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 23.5; ^ -misc.scala:50: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:50: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses "Hello"; ^ -misc.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:51: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 32 + 45; ^ -misc.scala:62: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:62: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses x; ^ -misc.scala:74: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +misc.scala:74: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 1 < 2; ^ ### Hello diff --git a/test/files/run/names-defaults.check b/test/files/run/names-defaults.check index c358dc5849de..722d28dd11ad 100644 --- a/test/files/run/names-defaults.check +++ b/test/files/run/names-defaults.check @@ -1,4 +1,7 @@ -names-defaults.scala:269: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +names-defaults.scala:269: warning: a pure expression does nothing in statement position + spawn(b = { val ttt = 1; ttt }, a = 0) + ^ +names-defaults.scala:269: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected spawn(b = { val ttt = 1; ttt }, a = 0) ^ warning: there were four deprecation warnings; re-run with -deprecation for details diff --git a/test/files/run/patmatnew.check b/test/files/run/patmatnew.check index 56b8ac2f4f0e..117bc28c2d19 100644 --- a/test/files/run/patmatnew.check +++ b/test/files/run/patmatnew.check @@ -1,10 +1,16 @@ -patmatnew.scala:351: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +patmatnew.scala:351: warning: a pure expression does nothing in statement position case 1 => "OK" ^ -patmatnew.scala:352: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +patmatnew.scala:352: warning: a pure expression does nothing in statement position case 2 => assert(false); "KO" ^ -patmatnew.scala:353: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +patmatnew.scala:352: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected + case 2 => assert(false); "KO" + ^ +patmatnew.scala:353: warning: a pure expression does nothing in statement position + case 3 => assert(false); "KO" + ^ +patmatnew.scala:353: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected case 3 => assert(false); "KO" ^ patmatnew.scala:670: warning: This catches all Throwables. If this is really intended, use `case e : Throwable` to clear this warning. diff --git a/test/files/run/reify_lazyunit.check b/test/files/run/reify_lazyunit.check index 579ecfe8aa2f..e6acf5d17b05 100644 --- a/test/files/run/reify_lazyunit.check +++ b/test/files/run/reify_lazyunit.check @@ -1,4 +1,4 @@ -reify_lazyunit.scala:6: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +reify_lazyunit.scala:6: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses lazy val x = { 0; println("12")} ^ 12 diff --git a/test/files/run/repl-bare-expr.check b/test/files/run/repl-bare-expr.check index e0a1f4ecd6f7..bdf8842bb0b1 100644 --- a/test/files/run/repl-bare-expr.check +++ b/test/files/run/repl-bare-expr.check @@ -1,12 +1,12 @@ scala> 2 ; 3 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 2 ;; ^ res0: Int = 3 scala> { 2 ; 3 } -:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses { 2 ; 3 } ^ res1: Int = 3 @@ -15,16 +15,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo 1 + 2 + 3 } ; bippy+88+11 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ defined object Cow diff --git a/test/files/run/repl-no-imports-no-predef.check b/test/files/run/repl-no-imports-no-predef.check index c2c8d21c0a2c..7c4ee82c78bf 100644 --- a/test/files/run/repl-no-imports-no-predef.check +++ b/test/files/run/repl-no-imports-no-predef.check @@ -76,13 +76,13 @@ y: Int = 13 scala> scala> 2 ; 3 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 2 ;; ^ res14: Int = 3 scala> { 2 ; 3 } -:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses { 2 ; 3 } ^ res15: Int = 3 @@ -92,16 +92,16 @@ bippy = { 1 + 2 + 3 } ; bippy+88+11 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def ^ defined object Cow @@ -143,10 +143,10 @@ scala> ( (2 + 2 ) ) res24: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; ( (2 + 2 ) ) ;; ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; ( (2 + 2 ) ) ;; ^ res25: Int = 5 @@ -163,16 +163,16 @@ res28: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; ((2 + 2)) ;; ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; ((2 + 2)) ;; ^ res29: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: scala.Int) => x + 1 ; () => ((5)) -:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:12: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; (x: scala.Int) => x + 1 ;; ^ res30: () => Int = @@ -183,7 +183,7 @@ scala> () => 5 res31: () => Int = scala> 55 ; () => 5 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ;; ^ res32: () => Int = diff --git a/test/files/run/repl-parens.check b/test/files/run/repl-parens.check index 6516f4ea907e..477d4d462f2b 100644 --- a/test/files/run/repl-parens.check +++ b/test/files/run/repl-parens.check @@ -18,10 +18,10 @@ scala> ( (2 + 2 ) ) res5: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; ( (2 + 2 ) ) ;; ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; ( (2 + 2 ) ) ;; ^ res6: Int = 5 @@ -38,16 +38,16 @@ res9: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; ((2 + 2)) ;; ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; ((2 + 2)) ;; ^ res10: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; (x: Int) => x + 1 ;; ^ res11: () => Int = @@ -58,7 +58,7 @@ scala> () => 5 res12: () => Int = scala> 55 ; () => 5 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ;; ^ res13: () => Int = diff --git a/test/files/run/t3488.check b/test/files/run/t3488.check index 314dfc78385f..75b2c3b07f1a 100644 --- a/test/files/run/t3488.check +++ b/test/files/run/t3488.check @@ -1,7 +1,13 @@ -t3488.scala:4: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t3488.scala:4: warning: a pure expression does nothing in statement position println(foo { val List(_*)=List(0); 1 } ()) ^ -t3488.scala:5: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t3488.scala:4: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected + println(foo { val List(_*)=List(0); 1 } ()) + ^ +t3488.scala:5: warning: a pure expression does nothing in statement position + println(foo { val List(_*)=List(0); 1 } (1)) + ^ +t3488.scala:5: warning: multiline expressions might require enclosing parentheses; a value can be silently discarded when Unit is expected println(foo { val List(_*)=List(0); 1 } (1)) ^ 0 diff --git a/test/files/run/t4047.check b/test/files/run/t4047.check index 3c41e6e2443d..c31f2f0858a8 100644 --- a/test/files/run/t4047.check +++ b/test/files/run/t4047.check @@ -1,13 +1,13 @@ -t4047.scala:23: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t4047.scala:23: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses a.foo ^ -t4047.scala:24: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t4047.scala:24: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses a.foo ^ -t4047.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t4047.scala:26: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses b.foo ^ -t4047.scala:27: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t4047.scala:27: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses b.foo ^ Unit: called A.foo diff --git a/test/files/run/t4680.check b/test/files/run/t4680.check index 21c5f9e56791..749ce4c62734 100644 --- a/test/files/run/t4680.check +++ b/test/files/run/t4680.check @@ -1,7 +1,7 @@ -t4680.scala:51: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t4680.scala:51: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses new C { 5 } ^ -t4680.scala:69: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t4680.scala:69: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses new { val x = 5 } with E() { 5 } ^ warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details diff --git a/test/files/run/t5380.check b/test/files/run/t5380.check index 731a798301cb..19471ac2d241 100644 --- a/test/files/run/t5380.check +++ b/test/files/run/t5380.check @@ -1,7 +1,7 @@ -t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t5380.scala:3: warning: a pure expression does nothing in statement position val f = () => return try { 1 } catch { case _: Throwable => 0 } ^ -t5380.scala:3: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t5380.scala:3: warning: a pure expression does nothing in statement position val f = () => return try { 1 } catch { case _: Throwable => 0 } ^ t5380.scala:3: warning: enclosing method main has result type Unit: return value discarded diff --git a/test/files/run/t7047.check b/test/files/run/t7047.check index 32bd58109493..129ce3eeca22 100644 --- a/test/files/run/t7047.check +++ b/test/files/run/t7047.check @@ -1,3 +1,3 @@ -Test_2.scala:2: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +Test_2.scala:2: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses Macros.foo ^ diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index c5e92e9d796d..621a70205e9f 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -15,13 +15,13 @@ scala> val z = x * y z: Int = 156 scala> 2 ; 3 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 2 ;; ^ res0: Int = 3 scala> { 2 ; 3 } -:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses { 2 ; 3 } ^ res1: Int = 3 @@ -30,16 +30,16 @@ scala> 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Mooo 1 + 2 + 3 } ; bippy+88+11 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; 10 ; case object Cow ; 20 ; class Moo { override def toString = "Moooooo" } ; 30 ; def bippy = { ^ defined object Cow @@ -81,10 +81,10 @@ scala> ( (2 + 2 ) ) res10: Int = 4 scala> 5 ; ( (2 + 2 ) ) ; ((5)) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; ( (2 + 2 ) ) ;; ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 5 ; ( (2 + 2 ) ) ;; ^ res11: Int = 5 @@ -101,16 +101,16 @@ res14: String = 4423 scala> scala> 55 ; ((2 + 2)) ; (1, 2, 3) -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; ((2 + 2)) ;; ^ -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; ((2 + 2)) ;; ^ res15: (Int, Int, Int) = (1,2,3) scala> 55 ; (x: Int) => x + 1 ; () => ((5)) -:13: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:13: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ; (x: Int) => x + 1 ;; ^ res16: () => Int = @@ -121,7 +121,7 @@ scala> () => 5 res17: () => Int = scala> 55 ; () => 5 -:11: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +:11: warning: a pure expression does nothing in statement position; multiline expressions may require enclosing parentheses 55 ;; ^ res18: () => Int = diff --git a/test/files/run/t8196.check b/test/files/run/t8196.check index d11dc27e68d9..8a07ebb6d71d 100644 --- a/test/files/run/t8196.check +++ b/test/files/run/t8196.check @@ -1,4 +1,4 @@ -t8196.scala:26: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +t8196.scala:26: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses form2.g1 // comment this line in order to make the test pass ^ warning: there were two feature warnings; re-run with -feature for details diff --git a/test/files/run/try-2.check b/test/files/run/try-2.check index 987d3462df79..7fd45414da18 100644 --- a/test/files/run/try-2.check +++ b/test/files/run/try-2.check @@ -1,4 +1,4 @@ -try-2.scala:41: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +try-2.scala:41: warning: a pure expression does nothing in statement position 10; ^ exception happened diff --git a/test/files/run/try.check b/test/files/run/try.check index f742ccb0dfab..d9521c236233 100644 --- a/test/files/run/try.check +++ b/test/files/run/try.check @@ -1,4 +1,4 @@ -try.scala:65: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +try.scala:65: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses 1+1; ^ 1 + 1 = 2 diff --git a/test/files/specialized/tb3651.check b/test/files/specialized/tb3651.check index 8a3f686ef59e..8e104f13ff25 100644 --- a/test/files/specialized/tb3651.check +++ b/test/files/specialized/tb3651.check @@ -1,4 +1,4 @@ -tb3651.scala:8: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +tb3651.scala:8: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses lk.a ^ 0 diff --git a/test/files/specialized/tc3651.check b/test/files/specialized/tc3651.check index e2dbadf22c7c..1e56d196fd1d 100644 --- a/test/files/specialized/tc3651.check +++ b/test/files/specialized/tc3651.check @@ -1,4 +1,4 @@ -tc3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +tc3651.scala:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses lk.a ^ 0 diff --git a/test/files/specialized/td3651.check b/test/files/specialized/td3651.check index 1a709fd0a7b8..697443ffe95f 100644 --- a/test/files/specialized/td3651.check +++ b/test/files/specialized/td3651.check @@ -1,7 +1,7 @@ -td3651.scala:12: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +td3651.scala:12: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses b.a ^ -td3651.scala:16: warning: a pure expression does nothing in statement position; you may be omitting necessary parentheses +td3651.scala:16: warning: a pure expression does nothing in statement position; multiline expressions might require enclosing parentheses der.a ^ 0 From 7d94d553e960322472e274669bad1c47723c2d23 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 8 Jul 2016 13:32:36 -0700 Subject: [PATCH 0160/2793] SI-8829 Defaultly scala -feature -deprecation Turn on `-feature -deprecation` in REPL if neither option is selected. ``` $ ./build/pack/bin/scala Welcome to Scala 2.12.0-20160707-105953-4e564ef (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_92). Type in expressions for evaluation. Or try :help. scala> @deprecated("","") def f = 42 f: Int scala> f toDouble :13: warning: postfix operator toDouble should be enabled by making the implicit value scala.language.postfixOps visible. This can be achieved by adding the import clause 'import scala.language.postfixOps' or by setting the compiler option -language:postfixOps. See the Scaladoc for value scala.language.postfixOps for a discussion why the feature should be explicitly enabled. f toDouble ^ :13: warning: method f is deprecated: f toDouble ^ res1: Double = 42.0 scala> :quit $ scala Welcome to Scala 2.11.8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_92). Type in expressions for evaluation. Or try :help. scala> @deprecated("","") def f = 42 f: Int scala> f toDouble warning: there was one deprecation warning; re-run with -deprecation for details warning: there was one feature warning; re-run with -feature for details res1: Double = 42.0 ``` --- src/repl/scala/tools/nsc/MainGenericRunner.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala index 747b6842930c..a09e797e0724 100644 --- a/src/repl/scala/tools/nsc/MainGenericRunner.scala +++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala @@ -71,6 +71,11 @@ class MainGenericRunner { Right(false) case _ => // We start the repl when no arguments are given. + // If user is agnostic about both -feature and -deprecation, turn them on. + if (settings.deprecation.isDefault && settings.feature.isDefault) { + settings.deprecation.value = true + settings.feature.value = true + } Right(new interpreter.ILoop process settings) } From 7548d2265361ec605b67aaae50ef6504937da68c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 8 Jul 2016 18:45:11 -0700 Subject: [PATCH 0161/2793] SI-8829 Let reporter customize retry message "Re-run with -deprecation" is not always appropriate. REPL gets to customize the message. The API includes the setting and its name, because reflect Settings do not have names. (!) --- src/compiler/scala/tools/nsc/Reporting.scala | 10 +++++----- src/reflect/scala/reflect/internal/Reporting.scala | 9 +++++++++ .../scala/tools/nsc/interpreter/ReplReporter.scala | 3 +++ test/files/jvm/interpreter.check | 4 ++-- test/files/run/constrained-types.check | 8 ++++---- test/files/run/iterator-from.scala | 2 +- test/files/run/reflection-magicsymbols-repl.check | 2 +- test/files/run/repl-no-imports-no-predef-power.check | 4 ++-- test/files/run/repl-power.check | 4 ++-- test/files/run/synchronized.scala | 2 +- test/files/run/t4172.check | 2 +- test/files/run/t4594-repl-settings.scala | 2 +- test/files/run/t4710.check | 2 +- test/files/run/t6329_repl.check | 8 ++++---- test/files/run/t6329_repl_bug.check | 2 +- test/files/run/t7319.check | 6 +++--- 16 files changed, 41 insertions(+), 29 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 01c583bea31b..5635e678ded5 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -43,19 +43,19 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w else sinceAndAmount += ((since, 1)) } val deprecationSummary = sinceAndAmount.size > 1 - sinceAndAmount.foreach { case (since, amount) => - val numWarnings = amount + sinceAndAmount.foreach { case (since, numWarnings) => val warningsSince = if (since.nonEmpty) s" (since $since)" else "" val warningVerb = if (numWarnings == 1) "was" else "were" val warningCount = countElementsAsString(numWarnings, s"$what warning") - val rerun = if (deprecationSummary) "" else s"; re-run with ${setting.name} for details" - reporter.warning(NoPosition, s"there $warningVerb $warningCount$warningsSince$rerun") + val rerun = if (deprecationSummary) "" else reporter.rerunWithDetails(setting, setting.name) + reporter.warning(NoPosition, s"there ${warningVerb} ${warningCount}${warningsSince}${rerun}") } if (deprecationSummary) { val numWarnings = warnings.size val warningVerb = if (numWarnings == 1) "was" else "were" val warningCount = countElementsAsString(numWarnings, s"$what warning") - reporter.warning(NoPosition, s"there $warningVerb $warningCount in total; re-run with ${setting.name} for details") + val rerun = reporter.rerunWithDetails(setting, setting.name) + reporter.warning(NoPosition, s"there ${warningVerb} ${warningCount} in total${rerun}") } } } diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala index 27fda9a7d453..c1f0140479b6 100644 --- a/src/reflect/scala/reflect/internal/Reporting.scala +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -7,6 +7,8 @@ package scala package reflect package internal +import settings.MutableSettings + /** Provides delegates to the reporter doing the actual work. * All forwarding methods should be marked final, * but some subclasses out of our reach still override them. @@ -105,6 +107,13 @@ abstract class Reporter { /** Finish reporting: print summaries, release resources. */ def finish(): Unit = () + + /** After reporting, offer advice on getting more details. */ + def rerunWithDetails(setting: MutableSettings#Setting, name: String): String = + setting.value match { + case b: Boolean if !b => s"; re-run with ${name} for details" + case _ => s"; re-run enabling ${name} for details, or try -help" + } } // TODO: move into superclass once partest cuts tie on Severity diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala index e6f5a4089ed8..3a0b69f41ec3 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala @@ -68,4 +68,7 @@ class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.i else super.displayPrompt() } + override def rerunWithDetails(setting: reflect.internal.settings.MutableSettings#Setting, name: String) = + s"; for details, enable `:setting $name' or `:replay $name'" + } diff --git a/test/files/jvm/interpreter.check b/test/files/jvm/interpreter.check index 9a2162a906d9..72d8d39fd023 100644 --- a/test/files/jvm/interpreter.check +++ b/test/files/jvm/interpreter.check @@ -93,7 +93,7 @@ scala> case class Bar(n: Int) defined class Bar scala> implicit def foo2bar(foo: Foo) = Bar(foo.n) -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' foo2bar: (foo: Foo)Bar scala> val bar: Bar = Foo(3) @@ -267,7 +267,7 @@ scala> xs map (x => x) res6: Array[_] = Array(1, 2) scala> xs map (x => (x, x)) -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' res7: Array[(_$1, _$1)] forSome { type _$1 } = Array((1,1), (2,2)) scala> diff --git a/test/files/run/constrained-types.check b/test/files/run/constrained-types.check index 5444cf208890..4acd9d16ae2e 100644 --- a/test/files/run/constrained-types.check +++ b/test/files/run/constrained-types.check @@ -69,11 +69,11 @@ scala> var four = "four" four: String = four scala> val four2 = m(four) // should have an existential bound -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' four2: String @Annot(x) forSome { val x: String } = four scala> val four3 = four2 // should have the same type as four2 -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' four3: String @Annot(x) forSome { val x: String } = four scala> val stuff = m("stuff") // should not crash @@ -96,7 +96,7 @@ scala> def m = { val y : String @Annot(x) = x y } // x should not escape the local scope with a narrow type -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' m: String @Annot(x) forSome { val x: String } scala> @@ -110,7 +110,7 @@ scala> def n(y: String) = { } m("stuff".stripMargin) } // x should be existentially bound -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' n: (y: String)String @Annot(x) forSome { val x: String } scala> diff --git a/test/files/run/iterator-from.scala b/test/files/run/iterator-from.scala index e7ba1aeb28df..01006ffc214b 100644 --- a/test/files/run/iterator-from.scala +++ b/test/files/run/iterator-from.scala @@ -1,5 +1,5 @@ /* This file tests iteratorFrom, keysIteratorFrom, and valueIteratorFrom on various sorted sets and maps - * filter: inliner warnings; re-run with + * filter: inliner warnings */ import scala.util.{Random => R} diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check index dd26c0834977..a33f41012e0f 100644 --- a/test/files/run/reflection-magicsymbols-repl.check +++ b/test/files/run/reflection-magicsymbols-repl.check @@ -19,7 +19,7 @@ scala> def test(n: Int): Unit = { val x = sig.asInstanceOf[MethodType].params.head println(x.info) } -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' test: (n: Int)Unit scala> for (i <- 1 to 8) test(i) diff --git a/test/files/run/repl-no-imports-no-predef-power.check b/test/files/run/repl-no-imports-no-predef-power.check index a76db3dbc2cb..08f614eb60b7 100644 --- a/test/files/run/repl-no-imports-no-predef-power.check +++ b/test/files/run/repl-no-imports-no-predef-power.check @@ -7,11 +7,11 @@ Try :help or completions for vals._ and power._ scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." -warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res0: $r.global.noSelfType.type = private val _ = _ scala> val tp = ArrayClass[scala.util.Random] // magic with tags -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' tp: $r.global.Type = Array[scala.util.Random] scala> tp.memberType(Array_apply) // evidence diff --git a/test/files/run/repl-power.check b/test/files/run/repl-power.check index a76db3dbc2cb..08f614eb60b7 100644 --- a/test/files/run/repl-power.check +++ b/test/files/run/repl-power.check @@ -7,11 +7,11 @@ Try :help or completions for vals._ and power._ scala> // guarding against "error: reference to global is ambiguous" scala> global.emptyValDef // "it is imported twice in the same scope by ..." -warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res0: $r.global.noSelfType.type = private val _ = _ scala> val tp = ArrayClass[scala.util.Random] // magic with tags -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' tp: $r.global.Type = Array[scala.util.Random] scala> tp.memberType(Array_apply) // evidence diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala index 6be0d64dd84e..d777b85b2c6b 100644 --- a/test/files/run/synchronized.scala +++ b/test/files/run/synchronized.scala @@ -1,5 +1,5 @@ /* - * filter: inliner warnings; re-run with + * filter: inliner warnings; */ import java.lang.Thread.holdsLock import scala.collection.mutable.StringBuilder diff --git a/test/files/run/t4172.check b/test/files/run/t4172.check index 3141647dba9a..99e420678c45 100644 --- a/test/files/run/t4172.check +++ b/test/files/run/t4172.check @@ -1,6 +1,6 @@ scala> val c = { class C { override def toString = "C" }; ((new C, new C { def f = 2 })) } -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' c: (C, C{def f: Int}) forSome { type C <: AnyRef } = (C,C) scala> :quit diff --git a/test/files/run/t4594-repl-settings.scala b/test/files/run/t4594-repl-settings.scala index 524ec2884341..587bb2312b27 100644 --- a/test/files/run/t4594-repl-settings.scala +++ b/test/files/run/t4594-repl-settings.scala @@ -9,7 +9,7 @@ object Test extends SessionTest { |depp: String | |scala> def a = depp - |warning: there was one deprecation warning (since Time began.); re-run with -deprecation for details + |warning: there was one deprecation warning (since Time began.); for details, enable `:setting -deprecation' or `:replay -deprecation' |a: String | |scala> :settings -deprecation diff --git a/test/files/run/t4710.check b/test/files/run/t4710.check index 5f90c68ed165..4a5d11f185c7 100644 --- a/test/files/run/t4710.check +++ b/test/files/run/t4710.check @@ -1,6 +1,6 @@ scala> def method : String = { implicit def f(s: Symbol) = "" ; 'symbol } -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' method: String scala> :quit diff --git a/test/files/run/t6329_repl.check b/test/files/run/t6329_repl.check index 86cd984e117a..22882a3597d2 100644 --- a/test/files/run/t6329_repl.check +++ b/test/files/run/t6329_repl.check @@ -3,28 +3,28 @@ scala> import scala.reflect.classTag import scala.reflect.classTag scala> classManifest[scala.List[_]] -warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] scala> classTag[scala.List[_]] res1: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List scala> classManifest[scala.collection.immutable.List[_]] -warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res2: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] scala> classTag[scala.collection.immutable.List[_]] res3: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List scala> classManifest[Predef.Set[_]] -warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res4: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[] scala> classTag[Predef.Set[_]] res5: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set scala> classManifest[scala.collection.immutable.Set[_]] -warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res6: scala.reflect.ClassTag[scala.collection.immutable.Set[_]] = scala.collection.immutable.Set[] scala> classTag[scala.collection.immutable.Set[_]] diff --git a/test/files/run/t6329_repl_bug.check b/test/files/run/t6329_repl_bug.check index 6476fa71fc07..11decae9bdbd 100644 --- a/test/files/run/t6329_repl_bug.check +++ b/test/files/run/t6329_repl_bug.check @@ -6,7 +6,7 @@ scala> import scala.reflect.runtime._ import scala.reflect.runtime._ scala> classManifest[List[_]] -warning: there was one deprecation warning (since 2.10.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.10.0); for details, enable `:setting -deprecation' or `:replay -deprecation' res0: scala.reflect.ClassTag[List[_]] = scala.collection.immutable.List[] scala> scala.reflect.classTag[List[_]] diff --git a/test/files/run/t7319.check b/test/files/run/t7319.check index 31923e71193c..1dcb84c804b7 100644 --- a/test/files/run/t7319.check +++ b/test/files/run/t7319.check @@ -3,15 +3,15 @@ scala> class M[A] defined class M scala> implicit def ma0[A](a: A): M[A] = null -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' ma0: [A](a: A)M[A] scala> implicit def ma1[A](a: A): M[A] = null -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' ma1: [A](a: A)M[A] scala> def convert[F[X <: F[X]]](builder: F[_ <: F[_]]) = 0 -warning: there was one feature warning; re-run with -feature for details +warning: there was one feature warning; for details, enable `:setting -feature' or `:replay -feature' convert: [F[X <: F[X]]](builder: F[_ <: F[_]])Int scala> convert(Some[Int](0)) From 3285c47d00ffeb2f9920556486bd3e460f9f5608 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pociecha?= Date: Tue, 12 Jul 2016 16:48:02 +0200 Subject: [PATCH 0162/2793] Remove redundant 'the' in IntelliJ's README.md --- src/intellij/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/intellij/README.md b/src/intellij/README.md index 41fef0418362..650d91e5d104 100644 --- a/src/intellij/README.md +++ b/src/intellij/README.md @@ -60,7 +60,7 @@ breakpoints within the Scala compiler. ## Running the Compiler and REPL You can create run/debug configurations to run the compiler and REPL directly within -IntelliJ, which might accelerate development and debugging of the the compiler. +IntelliJ, which might accelerate development and debugging of the compiler. To debug the Scala codebase you can also use "Remote" debug configuration and pass the corresponding arguments to the jvm running the compiler / program. From d33f2993782c259831e10beacc8274424b3a6250 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 8 Jul 2016 10:11:07 +0200 Subject: [PATCH 0163/2793] SI-9849 set privateWithin on default getters A default getter get the same access flag (private / protected) as the method whose default it implements. However, we forgot to set the privateWithin flag, which defines the scope in a qualified private / protected modifier. For a private[p], the default getter was therefore public, which is less restricted (a private[p] method has privateWithin set to p, but the private flag is not set). For a protected[p], the default getter was protected, which is more restricted. --- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- test/files/neg/t9849.check | 7 +++++++ test/files/neg/t9849.scala | 16 ++++++++++++++++ 3 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t9849.check create mode 100644 test/files/neg/t9849.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 9c1ba7ced1d7..ad9377f8b4bd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1346,7 +1346,7 @@ trait Namers extends MethodSynthesis { val defRhs = rvparam.rhs val defaultTree = atPos(vparam.pos.focus) { - DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags)) | oflag, name, defTparams, defVparamss, defTpt, defRhs) + DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags), ddef.mods.privateWithin) | oflag, name, defTparams, defVparamss, defTpt, defRhs) } if (!isConstr) methOwner.resetFlag(INTERFACE) // there's a concrete member now diff --git a/test/files/neg/t9849.check b/test/files/neg/t9849.check new file mode 100644 index 000000000000..7b4715084648 --- /dev/null +++ b/test/files/neg/t9849.check @@ -0,0 +1,7 @@ +t9849.scala:14: error: method h in object O cannot be accessed in object p.O + O.h() + ^ +t9849.scala:15: error: method h$default$1 in object O cannot be accessed in object p.O + O.h$default$1 + ^ +two errors found diff --git a/test/files/neg/t9849.scala b/test/files/neg/t9849.scala new file mode 100644 index 000000000000..bcd18b6916d9 --- /dev/null +++ b/test/files/neg/t9849.scala @@ -0,0 +1,16 @@ +package p + +object O { + protected[p] def f(x: Int = 1) = x + private[p] def g(x: Int = 1) = x + private def h(x: Int = 1) = x +} + +object Test { + O.f() + O.f$default$1 + O.g() + O.g$default$1 + O.h() + O.h$default$1 +} From 72076e59257da72f962d4101d87ff5507da28e4f Mon Sep 17 00:00:00 2001 From: Pavel Petlinsky Date: Fri, 8 Jul 2016 15:37:21 +0300 Subject: [PATCH 0164/2793] SI-9750 scala.util.Properties.isJavaAtLeast works with JDK9 The utility method compares javaSpecVersion, which has the form "1.8" previously and "9" going forward. The method accepts "1.n" for n < 9. More correctly, the string argument should be a single number. Supports JEP-223. --- src/library/scala/util/Properties.scala | 26 ++++++---- test/junit/scala/util/SpecVersionTest.scala | 56 ++++++++++++++------- 2 files changed, 54 insertions(+), 28 deletions(-) diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index a176748cd680..6995f452fa68 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -168,27 +168,31 @@ private[scala] trait PropertiesTrait { /** Compares the given specification version to the specification version of the platform. * - * @param version a specification version of the form "major.minor" + * @param version a specification version number (legacy forms acceptable) * @return `true` iff the specification version of the current runtime * is equal to or higher than the version denoted by the given string. * @throws NumberFormatException if the given string is not a version string * * @example {{{ - * // In this example, the runtime's Java specification is assumed to be at version 1.7. + * // In this example, the runtime's Java specification is assumed to be at version 8. * isJavaAtLeast("1.6") // true - * isJavaAtLeast("1.7") // true - * isJavaAtLeast("1.8") // false + * isJavaAtLeast("1.8") // true + * isJavaAtLeast("8") // true + * isJavaAtLeast("9") // false + * isJavaAtLeast("1.9") // throws * }}} */ def isJavaAtLeast(version: String): Boolean = { - def parts(x: String) = { - val i = x.indexOf('.') - if (i < 0) throw new NumberFormatException("Not a version: " + x) - (x.substring(0, i), x.substring(i+1, x.length)) + def versionOf(s: String): Int = s.indexOf('.') match { + case 1 if s.charAt(0) == '1' => + val v = versionOf(s.substring(2)) + if (v < 9) v else -1 + case -1 => s.toInt + case _ => -1 } - val (v, _v) = parts(version) - val (s, _s) = parts(javaSpecVersion) - s.toInt >= v.toInt && _s.toInt >= _v.toInt + val v = versionOf(version) + if (v < 0) throw new NumberFormatException(s"Not a version: $version") + versionOf(javaSpecVersion) >= v } // provide a main method so version info can be obtained by running this diff --git a/test/junit/scala/util/SpecVersionTest.scala b/test/junit/scala/util/SpecVersionTest.scala index e3e7a978f27d..2b69f288fa84 100644 --- a/test/junit/scala/util/SpecVersionTest.scala +++ b/test/junit/scala/util/SpecVersionTest.scala @@ -6,13 +6,16 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import scala.tools.testing.AssertUtil._ + /** The java version property uses the spec version - * and must work for all "major.minor" and fail otherwise. + * and must work for legacy "major.minor" and plain version_number, + * and fail otherwise. */ @RunWith(classOf[JUnit4]) class SpecVersionTest { - val sut = new PropertiesTrait { - override def javaSpecVersion = "1.7" + class TestProperties(versionAt: String) extends PropertiesTrait { + override def javaSpecVersion = versionAt override protected def pickJarBasedOn: Class[_] = ??? override protected def propCategory: String = "test" @@ -20,38 +23,57 @@ class SpecVersionTest { // override because of vals like releaseVersion override lazy val scalaProps = new java.util.Properties } + val sut7 = new TestProperties("1.7") + val sut9 = new TestProperties("9") + + @Test + def comparesJDK9Correctly(): Unit = { + assert(sut9 isJavaAtLeast "1") + assert(sut9 isJavaAtLeast "1.5") + assert(sut9 isJavaAtLeast "5") + assert(sut9 isJavaAtLeast "1.8") + assert(sut9 isJavaAtLeast "8") + assert(sut9 isJavaAtLeast "9") + } // SI-7265 @Test def comparesCorrectly(): Unit = { - assert(sut isJavaAtLeast "1.5") - assert(sut isJavaAtLeast "1.6") - assert(sut isJavaAtLeast "1.7") - assert(!(sut isJavaAtLeast "1.8")) - assert(!(sut isJavaAtLeast "1.71")) + assert(sut7 isJavaAtLeast "1") + assert(sut7 isJavaAtLeast "1.5") + assert(sut7 isJavaAtLeast "5") + assert(sut7 isJavaAtLeast "1.6") + assert(sut7 isJavaAtLeast "1.7") + assertFalse(sut7 isJavaAtLeast "1.8") + assertFalse(sut7 isJavaAtLeast "9") + assertFalse(sut7 isJavaAtLeast "10") } - @Test(expected = classOf[NumberFormatException]) - def badVersion(): Unit = { - sut isJavaAtLeast "1.a" + + @Test def variousBadVersionStrings(): Unit = { + assertThrows[NumberFormatException] { sut7 isJavaAtLeast "1.9" } + assertThrows[NumberFormatException] { sut9 isJavaAtLeast "1.9" } + assertThrows[NumberFormatException] { sut7 isJavaAtLeast "9.1" } + assertThrows[NumberFormatException] { sut9 isJavaAtLeast "9.1" } } + @Test(expected = classOf[NumberFormatException]) - def missingVersion(): Unit = { - sut isJavaAtLeast "1" + def badVersion(): Unit = { + sut7 isJavaAtLeast "1.a" } @Test(expected = classOf[NumberFormatException]) def noVersion(): Unit = { - sut isJavaAtLeast "" + sut7 isJavaAtLeast "" } @Test(expected = classOf[NumberFormatException]) def dotOnly(): Unit = { - sut isJavaAtLeast "." + sut7 isJavaAtLeast "." } @Test(expected = classOf[NumberFormatException]) def leadingDot(): Unit = { - sut isJavaAtLeast ".5" + sut7 isJavaAtLeast ".5" } @Test(expected = classOf[NumberFormatException]) def notASpec(): Unit = { - sut isJavaAtLeast "1.7.1" + sut7 isJavaAtLeast "1.7.1" } } From d386e802d53ab616a8a6005c89a9be30ab5526d8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 12 Jul 2016 19:45:32 +1000 Subject: [PATCH 0165/2793] SI-9855 Fix regression in extractor pattern translation In faa5ae6, I changed the pattern matchers code generator to use stable references (`Ident`-s with the singleton type, rather than the widened type) to the synthetic vals used to store intermediate results ("binders"). In the case where the scrutinee matched the unapply parameter type of some extractor pattern, but the pattern subsequently failed, this led to an regression. It turns out that this was due to the way that the type of the binder was mutated to upcast to the exact type of a subsequent pattern in `ensureConformsTo`: https://github.com/scala/scala/blob/953559988/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala#L165-L174 This was added in 32c57329a as a workaround for the problem caused in t6664.scala, when the binder type was `KList with KCons`, and the code generator wasn't able to find the case field accessors for `KCons` in the decls. The change to use stable references meant that this mutation was now observed in another part of the tree, as opposed to the 2.11.8 situation, where we had used the original, sharper type of the binder eagerly to assign to the `Ident` that referred to it. This led to a tree: Assign(Ident(x3), Ident(x1).setType(x1.tpe) Now that we instead refer generate: Assign(Ident(x3), Ident(x1).setType(stableTypeFor(x1)) and we don't typecheck this until after the mutation of `x1.symbol.info`, we can get a type error. This commit removes this mutation of the binder type altogether, and instead uses `aligner.wholeType`, which is based on the result type of the `Apply(TypeTree(MethodType(params, resultType))` that encodes a typechecked constructor pattern. In `t6624.scala`, this is `KCons`, the case class that has the extractors as its decls. --- .../nsc/transform/patmat/MatchTranslation.scala | 17 +++-------------- test/files/pos/t9855.scala | 10 ++++++++++ test/files/pos/t9855b.scala | 16 ++++++++++++++++ test/files/run/t6288.check | 6 +++--- 4 files changed, 32 insertions(+), 17 deletions(-) create mode 100644 test/files/pos/t9855.scala create mode 100644 test/files/pos/t9855b.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index e12b8548a8f4..5750f8f7e769 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -125,7 +125,7 @@ trait MatchTranslation { // TODO: paramType may contain unbound type params (run/t2800, run/t3530) val makers = ( // Statically conforms to paramType - if (this ensureConformsTo paramType) treeMaker(binder, false, pos) :: Nil + if (tpe <:< paramType) treeMaker(binder, false, pos) :: Nil else typeTest :: extraction :: Nil ) step(makers: _*)(extractor.subBoundTrees: _*) @@ -162,16 +162,6 @@ trait MatchTranslation { setVarInfo(binder, paramType) true } - // If <:< but not =:=, no type test needed, but the tree maker relies on the binder having - // exactly paramType (and not just some type compatible with it.) SI-6624 shows this is necessary - // because apparently patBinder may have an unfortunate type (.decls don't have the case field - // accessors) TODO: get to the bottom of this -- I assume it happens when type checking - // infers a weird type for an unapply call. By going back to the parameterType for the - // extractor call we get a saner type, so let's just do that for now. - def ensureConformsTo(paramType: Type): Boolean = ( - (tpe =:= paramType) - || (tpe <:< paramType) && setInfo(paramType) - ) private def concreteType = tpe.bounds.hi private def unbound = unbind(tree) @@ -396,7 +386,6 @@ trait MatchTranslation { /** Create the TreeMaker that embodies this extractor call * - * `binder` has been casted to `paramType` if necessary * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ @@ -502,7 +491,7 @@ trait MatchTranslation { * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { - val paramAccessors = binder.constrParamAccessors + val paramAccessors = aligner.wholeType.typeSymbol.constrParamAccessors val numParams = paramAccessors.length def paramAccessorAt(subPatIndex: Int) = paramAccessors(math.min(subPatIndex, numParams - 1)) // binders corresponding to mutable fields should be stored (SI-5158, SI-6070) @@ -531,7 +520,7 @@ trait MatchTranslation { // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component override protected def tupleSel(binder: Symbol)(i: Int): Tree = { - val accessors = binder.caseFieldAccessors + val accessors = aligner.wholeType.typeSymbol.caseFieldAccessors if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1) else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN } diff --git a/test/files/pos/t9855.scala b/test/files/pos/t9855.scala new file mode 100644 index 000000000000..b6ac3e2432cd --- /dev/null +++ b/test/files/pos/t9855.scala @@ -0,0 +1,10 @@ +class C { + def xx(verb: String, a: Array[Int]) { + val reYYYY = """(\d\d\d\d)""".r + verb match { + case "time" if a.isEmpty => + case "time" => + case reYYYY(y) => + } + } +} diff --git a/test/files/pos/t9855b.scala b/test/files/pos/t9855b.scala new file mode 100644 index 000000000000..30c58be3dcd1 --- /dev/null +++ b/test/files/pos/t9855b.scala @@ -0,0 +1,16 @@ +object Test { + var FALSE = false + def main(args: Array[String]): Unit = { + val SomeB = new B + new B() match { + case SomeB if FALSE => + case SomeB => + case Ext(_) => + } + } +} +object Ext { + def unapply(s: A) = Some(()) +} +class A +class B extends A diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index 67877fd56d6b..7933f516a8a4 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -7,7 +7,7 @@ }; [21]def unapply([29]z: [32]): [21]Option[Int] = [56][52][52]scala.Some.apply[[52]Int]([58]-1); [64]{ - [64]case val x1: [64]Any = [64]""; + [64]case val x1: [64]String = [64]""; [64]case5()[84]{ [84] val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1); [84]if ([84]o7.isEmpty.unary_!) @@ -30,7 +30,7 @@ }; [127]def unapplySeq([138]z: [141]): [127]Option[List[Int]] = [167]scala.None; [175]{ - [175]case val x1: [175]Any = [175]""; + [175]case val x1: [175]String = [175]""; [175]case5()[195]{ [195] val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1); [195]if ([195][195]o7.isEmpty.unary_!.&&([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)))) @@ -53,7 +53,7 @@ }; [238]def unapply([246]z: [249]): [238]Boolean = [265]true; [273]{ - [273]case val x1: [273]Any = [273]""; + [273]case val x1: [273]String = [273]""; [273]case5()[293]{ [293] val o7: [293]Option[List[Int]] = [293][293]Case4.unapplySeq([293]x1); [293]if ([293][293]o7.isEmpty.unary_!.&&([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0)))) From f806073d32a476d156f1b3ec24c17f35ed65b4c3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 14 Jul 2016 11:50:22 +1000 Subject: [PATCH 0166/2793] SD-183 Make refinement classes ineligible as SAMs Only non-refinement class types need apply, which is the same restriction that we levy on parent types of a class. ``` scala> class C; class D extends C; type CD = C with D; class E extends CD :11: error: class type required but C with D found class C; class D extends C; type CD = C with D; class E extends CD ^ scala> class C; class D extends C; type DC = D with C; class E extends DC :11: error: class type required but D with C found class C; class D extends C; type DC = D with C; class E extends DC ^ ``` Prior to this change: ``` scala> trait T { def t(a: Any): Any }; trait U; abstract class C extends T defined trait T defined trait U defined class C ```` For indy-based lambdas: ``` scala> val tu: T with U = x => x tu: T with U = $$Lambda$1812/317644782@3c3c4a71 scala> tu: U java.lang.ClassCastException: $$Lambda$1812/317644782 cannot be cast to U ... 30 elided ``` For anon class based lambdas: ``` scala> ((x => x): C with U) :14: error: class type required but C with U found ((x => x): C with U) ^ scala> implicit def anyToCWithU(a: Any): C with U = new C with U { def t(a: Any) = a } warning: there was one feature warning; re-run with -feature for details anyToCWithU: (a: Any)C with U scala> (((x: Any) => x): C with U) // SAM chosen but fails to typecheck the expansion uncurry :17: error: class type required but C with U found (((x: Any) => x): C with U) // SAM chosen but fails to typecheck the expansion uncurry ^ ``` Fixes https://github.com/scala/scala-dev/issues/183 While it is tempting to special case refinement classes with no decls by flattening their parents into the parents of the lambda. But there are some subtle issues at play with lineriazation order, as Martin pointed out when I brought this up before: http://www.scala-lang.org/old/node/6817.html --- .../scala/reflect/internal/Definitions.scala | 4 +-- test/files/neg/sammy_restrictions.check | 33 ++++++++++++------- test/files/neg/sammy_restrictions.scala | 7 +++- 3 files changed, 30 insertions(+), 14 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index fe6d88e7c749..0342daf11390 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -837,9 +837,9 @@ trait Definitions extends api.StandardDefinitions { * The class defining the method is a supertype of `tp` that * has a public no-arg primary constructor. */ - def samOf(tp: Type): Symbol = if (!doSam) NoSymbol else { + def samOf(tp: Type): Symbol = if (!doSam) NoSymbol else if (!isNonRefinementClassType(unwrapToClass(tp))) NoSymbol else { // look at erased type because we (only) care about what ends up in bytecode - // (e.g., an alias type or intersection type is fine as long as the intersection dominator compiles to an interface) + // (e.g., an alias type is fine as long as is compiles to a single-abstract-method) val tpSym: Symbol = erasure.javaErasure(tp).typeSymbol if (tpSym.exists && tpSym.isClass diff --git a/test/files/neg/sammy_restrictions.check b/test/files/neg/sammy_restrictions.check index 09579cbe212d..0225c61ac1c3 100644 --- a/test/files/neg/sammy_restrictions.check +++ b/test/files/neg/sammy_restrictions.check @@ -1,51 +1,62 @@ -sammy_restrictions.scala:35: error: type mismatch; +sammy_restrictions.scala:38: error: type mismatch; found : () => Int required: NoAbstract (() => 0) : NoAbstract ^ -sammy_restrictions.scala:36: error: type mismatch; +sammy_restrictions.scala:39: error: type mismatch; found : Int => Int required: TwoAbstract ((x: Int) => 0): TwoAbstract ^ -sammy_restrictions.scala:37: error: type mismatch; +sammy_restrictions.scala:40: error: type mismatch; found : Int => Int required: NoEmptyConstructor ((x: Int) => 0): NoEmptyConstructor ^ -sammy_restrictions.scala:38: error: type mismatch; +sammy_restrictions.scala:41: error: type mismatch; found : Int => Int required: MultipleConstructorLists ((x: Int) => 0): MultipleConstructorLists ^ -sammy_restrictions.scala:39: error: type mismatch; +sammy_restrictions.scala:42: error: type mismatch; found : Int => Int required: OneEmptySecondaryConstructor ((x: Int) => 0): OneEmptySecondaryConstructor // derived class must have an empty *primary* to call. ^ -sammy_restrictions.scala:40: error: type mismatch; +sammy_restrictions.scala:43: error: type mismatch; found : Int => Int required: MultipleMethodLists ((x: Int) => 0): MultipleMethodLists ^ -sammy_restrictions.scala:41: error: type mismatch; +sammy_restrictions.scala:44: error: type mismatch; found : Int => Int required: ImplicitConstructorParam ((x: Int) => 0): ImplicitConstructorParam ^ -sammy_restrictions.scala:42: error: type mismatch; +sammy_restrictions.scala:45: error: type mismatch; found : Int => Int required: ImplicitMethodParam ((x: Int) => 0): ImplicitMethodParam ^ -sammy_restrictions.scala:43: error: type mismatch; +sammy_restrictions.scala:46: error: type mismatch; found : Int => Int required: PolyMethod ((x: Int) => 0): PolyMethod ^ -sammy_restrictions.scala:44: error: type mismatch; +sammy_restrictions.scala:47: error: type mismatch; found : Int => Int required: SelfTp ((x: Int) => 0): SelfTp ^ -10 errors found +sammy_restrictions.scala:48: error: type mismatch; + found : Int => Int + required: T1 with U1 + ((x: Int) => 0): T1 with U1 + ^ +sammy_restrictions.scala:49: error: type mismatch; + found : Int => Int + required: Test.NonClassTypeRefinement + (which expands to) DerivedOneAbstract with OneAbstract + ((x: Int) => 0): NonClassTypeRefinement + ^ +12 errors found diff --git a/test/files/neg/sammy_restrictions.scala b/test/files/neg/sammy_restrictions.scala index ff2c16b67913..dee4f1f24739 100644 --- a/test/files/neg/sammy_restrictions.scala +++ b/test/files/neg/sammy_restrictions.scala @@ -27,9 +27,12 @@ abstract class DerivedOneAbstract extends OneAbstract abstract class SelfTp { self: NoAbstract => def ap(a: Int): Any } abstract class SelfVar { self => def ap(a: Int): Any } +trait T1 { def t(a: Int): Int }; trait U1 + object Test { implicit val s: String = "" - type NonClassType = DerivedOneAbstract with OneAbstract + type NonClassTypeRefinement = DerivedOneAbstract with OneAbstract + type NonClassType = DerivedOneAbstract // errors: (() => 0) : NoAbstract @@ -42,6 +45,8 @@ object Test { ((x: Int) => 0): ImplicitMethodParam ((x: Int) => 0): PolyMethod ((x: Int) => 0): SelfTp + ((x: Int) => 0): T1 with U1 + ((x: Int) => 0): NonClassTypeRefinement // allowed: ((x: Int) => 0): OneEmptyConstructor From 841fb2559b47259f9e5dc92a390fb4c7760ac218 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 14 Jul 2016 18:39:24 +0100 Subject: [PATCH 0167/2793] Deprecate and rename Left#a/Right#b to Left#value/Right#value --- src/library/scala/util/Either.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 5c61d83a1a07..2f1e5d5c33a7 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -345,9 +345,11 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * @author Tony Morris, Workingmouse * @version 1.0, 11/10/2008 */ -final case class Left[+A, +B](a: A) extends Either[A, B] { +final case class Left[+A, +B](@deprecatedName('a, "2.12.0") value: A) extends Either[A, B] { def isLeft = true def isRight = false + + @deprecated("Use .value instead.", "2.12.0") def a: A = value } /** @@ -356,9 +358,11 @@ final case class Left[+A, +B](a: A) extends Either[A, B] { * @author Tony Morris, Workingmouse * @version 1.0, 11/10/2008 */ -final case class Right[+A, +B](b: B) extends Either[A, B] { +final case class Right[+A, +B](@deprecatedName('b, "2.12.0") value: B) extends Either[A, B] { def isLeft = false def isRight = true + + @deprecated("Use .value instead.", "2.12.0") def b: B = value } object Either { From 26758054f80081a2fec32ed1c7d3fc92efeacb31 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Wed, 13 Jul 2016 15:35:31 +0100 Subject: [PATCH 0168/2793] Deprecated and rename Some#x to Some#value --- src/library/scala/Option.scala | 6 ++++-- test/files/neg/t4851.check | 4 ++-- test/files/run/t8549.scala | 8 ++++---- 3 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 7282feebb692..39c583e63bc8 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -330,9 +330,11 @@ sealed abstract class Option[+A] extends Product with Serializable { * @version 1.0, 16/07/2003 */ @SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 -final case class Some[+A](x: A) extends Option[A] { +final case class Some[+A](@deprecatedName('x, "2.12.0") value: A) extends Option[A] { def isEmpty = false - def get = x + def get = value + + @deprecated("Use .value instead.", "2.12.0") def x: A = value } diff --git a/test/files/neg/t4851.check b/test/files/neg/t4851.check index ac0854f8105c..721923e0ba3f 100644 --- a/test/files/neg/t4851.check +++ b/test/files/neg/t4851.check @@ -17,13 +17,13 @@ S.scala:4: warning: Adapting argument list by creating a 5-tuple: this may not b val x3 = new J(1, 2, 3, 4, 5) ^ S.scala:6: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. - signature: Some.apply[A](x: A): Some[A] + signature: Some.apply[A](value: A): Some[A] given arguments: 1, 2, 3 after adaptation: Some((1, 2, 3): (Int, Int, Int)) val y1 = Some(1, 2, 3) ^ S.scala:7: warning: Adapting argument list by creating a 3-tuple: this may not be what you want. - signature: Some(x: A): Some[A] + signature: Some(value: A): Some[A] given arguments: 1, 2, 3 after adaptation: new Some((1, 2, 3): (Int, Int, Int)) val y2 = new Some(1, 2, 3) diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala index f8d6819e33b0..da7a731459fd 100644 --- a/test/files/run/t8549.scala +++ b/test/files/run/t8549.scala @@ -79,10 +79,10 @@ object Test extends App { } } - // Generated on 20160706-15:44:41 with Scala version 2.12.0-20160629-163201-6612ba0) + // Generated on 20160715-08:27:53 with Scala version 2.12.0-20160715-012500-f5a80bd) overwrite.foreach(updateComment) - check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAF4dAASTGphdmEvbGFuZy9PYmplY3Q7eHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAQ==") + check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAV2YWx1ZXQAEkxqYXZhL2xhbmcvT2JqZWN0O3hyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAE=") check(None)("rO0ABXNyAAtzY2FsYS5Ob25lJEZQJPZTypSsAgAAeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHA=") check(List(1, 2, 3))( "rO0ABXNyADJzY2FsYS5jb2xsZWN0aW9uLmltbXV0YWJsZS5MaXN0JFNlcmlhbGl6YXRpb25Qcm94eQAAAAAAAAABAwAAeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHg=") @@ -181,10 +181,10 @@ object Test extends App { check(mutable.HashSet(1, 2, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAcIAAAADAAAABQBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==") check(mutable.TreeMap[Int, Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw") check(mutable.TreeMap(1 -> 1, 3 -> 6))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcNx8qC229ZvwAgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZU1hcCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHNxAH4ADAAAAAZxAH4ADg==") - check(mutable.TreeMap(1 -> 1, 3 -> 6).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcCRUcmVlTWFwVmlldx7MCZxLhVQ8AgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlTWFwO0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVNYXDcfKgttvWb8AIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVNYXAkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JNmBnwaXZn6wAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBzcQB+AA8AAAAGcQB+ABFzcQB+AANxAH4ACHEAfgALc3IACnNjYWxhLlNvbWURIvJpXqGLdAIAAUwAAXhxAH4ADXhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwcQB+ABFzcQB+ABZzcQB+AA8AAAAC") + check(mutable.TreeMap(1 -> 1, 3 -> 6).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZU1hcCRUcmVlTWFwVmlldx7MCZxLhVQ8AgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlTWFwO0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVNYXDcfKgttvWb8AIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVNYXAkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JNmBnwaXZn6wAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBzcQB+AA8AAAAGcQB+ABFzcQB+AANxAH4ACHEAfgALc3IACnNjYWxhLlNvbWURIvJpXqGLdAIAAUwABXZhbHVlcQB+AA14cgAMc2NhbGEuT3B0aW9u/mk3/dsOZnQCAAB4cHEAfgARc3EAfgAWc3EAfgAPAAAAAg==") check(mutable.TreeSet[Int]())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAABw") check(mutable.TreeSet(1, 3))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldM10nxFQDpt4AgACTAAIb3JkZXJpbmd0ABVMc2NhbGEvbWF0aC9PcmRlcmluZztMACZzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkVHJlZVNldCQkdHJlZXQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJFRyZWU7eHBzcgAYc2NhbGEubWF0aC5PcmRlcmluZyRJbnQk2YGfBpdmfrACAAB4cHNyACpzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuUmVkQmxhY2tUcmVlJFRyZWUATKc08DWmFQIAAkkABHNpemVMAARyb290dAAsTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9SZWRCbGFja1RyZWUkTm9kZTt4cAAAAAJzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSROb2RlGxHsFtValgACAAZaAANyZWRMAANrZXl0ABJMamF2YS9sYW5nL09iamVjdDtMAARsZWZ0cQB+AAdMAAZwYXJlbnRxAH4AB0wABXJpZ2h0cQB+AAdMAAV2YWx1ZXEAfgAKeHAAc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAFwcHNxAH4ACQFzcQB+AAwAAAADcHEAfgALcHBw") - check(mutable.TreeSet(1, 3).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCRUcmVlU2V0Vmlld2JdAzqy0DpGAgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlU2V0O0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVTZXTNdJ8RUA6beAIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVTZXQkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JNmBnwaXZn6wAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBwcHNxAH4AA3EAfgAIcQB+AAtzcgAKc2NhbGEuU29tZREi8mleoYt0AgABTAABeHEAfgANeHIADHNjYWxhLk9wdGlvbv5pN/3bDmZ0AgAAeHBxAH4AEXNxAH4AFXNxAH4ADwAAAAI=") + check(mutable.TreeSet(1, 3).range(1, 2))( "rO0ABXNyACxzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuVHJlZVNldCRUcmVlU2V0Vmlld2JdAzqy0DpGAgADTAAGJG91dGVydAAiTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9UcmVlU2V0O0wABGZyb210AA5Mc2NhbGEvT3B0aW9uO0wABXVudGlscQB+AAJ4cgAgc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlRyZWVTZXTNdJ8RUA6beAIAAkwACG9yZGVyaW5ndAAVTHNjYWxhL21hdGgvT3JkZXJpbmc7TAAmc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJFRyZWVTZXQkJHRyZWV0ACxMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1JlZEJsYWNrVHJlZSRUcmVlO3hwc3IAGHNjYWxhLm1hdGguT3JkZXJpbmckSW50JNmBnwaXZn6wAgAAeHBzcgAqc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLlJlZEJsYWNrVHJlZSRUcmVlAEynNPA1phUCAAJJAARzaXplTAAEcm9vdHQALExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUmVkQmxhY2tUcmVlJE5vZGU7eHAAAAACc3IAKnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5SZWRCbGFja1RyZWUkTm9kZRsR7BbVWpYAAgAGWgADcmVkTAADa2V5dAASTGphdmEvbGFuZy9PYmplY3Q7TAAEbGVmdHEAfgAKTAAGcGFyZW50cQB+AApMAAVyaWdodHEAfgAKTAAFdmFsdWVxAH4ADXhwAHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABcHBzcQB+AAwBc3EAfgAPAAAAA3BxAH4ADnBwcHNxAH4AA3EAfgAIcQB+AAtzcgAKc2NhbGEuU29tZREi8mleoYt0AgABTAAFdmFsdWVxAH4ADXhyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwcQB+ABFzcQB+ABVzcQB+AA8AAAAC") // TODO SI-8576 Uninitialized field under -Xcheckinit // check(new mutable.History())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGlzdG9yeUhuXxDIFJrsAgACSQAKbWF4SGlzdG9yeUwAA2xvZ3QAIExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUXVldWU7eHAAAAPoc3IAHnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5RdWV1ZbjMURVfOuHHAgAAeHIAJHNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5NdXRhYmxlTGlzdFJpnjJ+gFbAAgADSQADbGVuTAAGZmlyc3QwdAAlTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9MaW5rZWRMaXN0O0wABWxhc3QwcQB+AAV4cAAAAABzcgAjc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZExpc3Sak+nGCZHaUQIAAkwABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDtMAARuZXh0dAAeTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9TZXE7eHBwcQB+AApxAH4ACg==") check(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4") From 543d719dce062e6ea99c21c7320def711af1cf9e Mon Sep 17 00:00:00 2001 From: Jakob Odersky Date: Mon, 25 Apr 2016 15:00:09 -0700 Subject: [PATCH 0169/2793] Retain javadoc comments in scaladoc * Hook into java parser to generate doc comments * Generate empty trees for java implementation bodies --- src/compiler/scala/tools/nsc/Global.scala | 4 +- .../tools/nsc/ast/parser/SyntaxAnalyzer.scala | 2 +- .../scala/tools/nsc/javac/JavaParsers.scala | 15 ++- .../scala/tools/nsc/javac/JavaScanners.scala | 38 +++--- .../scala/tools/nsc/typechecker/Typers.scala | 7 +- .../tools/nsc/doc/ScaladocAnalyzer.scala | 119 +++++++++++------- .../scala/tools/nsc/doc/ScaladocGlobal.scala | 3 + .../tools/partest/ScaladocModelTest.scala | 2 +- test/files/run/t5699.check | 4 +- test/scaladoc/resources/SI-4826.java | 20 +++ test/scaladoc/run/SI-4826.check | 1 + test/scaladoc/run/SI-4826.scala | 30 +++++ 12 files changed, 171 insertions(+), 74 deletions(-) create mode 100644 test/scaladoc/resources/SI-4826.java create mode 100644 test/scaladoc/run/SI-4826.check create mode 100644 test/scaladoc/run/SI-4826.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 9d6693c00fd8..d4c2896c5c65 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -411,7 +411,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) override val initial = true } - import syntaxAnalyzer.{ UnitScanner, UnitParser } + import syntaxAnalyzer.{ UnitScanner, UnitParser, JavaUnitParser } // !!! I think we're overdue for all these phase objects being lazy vals. // There's no way for a Global subclass to provide a custom typer @@ -1042,6 +1042,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) def newUnitParser(code: String, filename: String = ""): UnitParser = newUnitParser(newCompilationUnit(code, filename)) + def newJavaUnitParser(unit: CompilationUnit): JavaUnitParser = new JavaUnitParser(unit) + /** A Run is a single execution of the compiler on a set of units. */ class Run extends RunContextApi with RunReporting with RunParsing { diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index df2073785b84..e0667b5a3ebb 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -82,7 +82,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse } private def initialUnitBody(unit: CompilationUnit): Tree = { - if (unit.isJava) new JavaUnitParser(unit).parse() + if (unit.isJava) newJavaUnitParser(unit).parse() else if (currentRun.parsing.incompleteHandled) newUnitParser(unit).parse() else newUnitParser(unit).smartParse() } diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index fd9c99a3b966..01ca8033accd 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -111,7 +111,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { def arrayOf(tpt: Tree) = AppliedTypeTree(scalaDot(tpnme.Array), List(tpt)) - def blankExpr = Ident(nme.WILDCARD) + def blankExpr = EmptyTree def makePackaging(pkg: RefTree, stats: List[Tree]): PackageDef = atPos(pkg.pos) { PackageDef(pkg, stats) } @@ -135,6 +135,11 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { DefDef(Modifiers(Flags.JAVA), nme.CONSTRUCTOR, List(), List(vparams), TypeTree(), blankExpr) } + /** A hook for joining the comment associated with a definition. + * Overridden by scaladoc. + */ + def joinComment(trees: => List[Tree]): List[Tree] = trees + // ------------- general parsing --------------------------- /** skip parent or brace enclosed sequence of things */ @@ -581,7 +586,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case CLASS | ENUM | INTERFACE | AT => typeDecl(if (definesInterface(parentToken)) mods | Flags.STATIC else mods) case _ => - termDecl(mods, parentToken) + joinComment(termDecl(mods, parentToken)) } def makeCompanionObject(cdef: ClassDef, statics: List[Tree]): Tree = @@ -833,10 +838,10 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { } def typeDecl(mods: Modifiers): List[Tree] = in.token match { - case ENUM => enumDecl(mods) - case INTERFACE => interfaceDecl(mods) + case ENUM => joinComment(enumDecl(mods)) + case INTERFACE => joinComment(interfaceDecl(mods)) case AT => annotationDecl(mods) - case CLASS => classDecl(mods) + case CLASS => joinComment(classDecl(mods)) case _ => in.nextToken(); syntaxError("illegal start of type declaration", skipIt = true); List(errorTypeTree) } diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index c74a6938c6dd..e11ac9404162 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -577,21 +577,29 @@ trait JavaScanners extends ast.parser.ScannersCommon { } } - protected def skipComment(): Boolean = { - @tailrec def skipLineComment(): Unit = in.ch match { - case CR | LF | SU => - case _ => in.next; skipLineComment() - } - @tailrec def skipJavaComment(): Unit = in.ch match { - case SU => incompleteInputError("unclosed comment") - case '*' => in.next; if (in.ch == '/') in.next else skipJavaComment() - case _ => in.next; skipJavaComment() - } - in.ch match { - case '/' => in.next ; skipLineComment() ; true - case '*' => in.next ; skipJavaComment() ; true - case _ => false - } + protected def putCommentChar(): Unit = in.next() + + protected def skipBlockComment(isDoc: Boolean): Unit = in.ch match { + case SU => incompleteInputError("unclosed comment") + case '*' => putCommentChar() ; if (in.ch == '/') putCommentChar() else skipBlockComment(isDoc) + case _ => putCommentChar() ; skipBlockComment(isDoc) + } + + protected def skipLineComment(): Unit = in.ch match { + case CR | LF | SU => + case _ => putCommentChar() ; skipLineComment() + } + + protected def skipComment(): Boolean = in.ch match { + case '/' => putCommentChar() ; skipLineComment() ; true + case '*' => + putCommentChar() + in.ch match { + case '*' => skipBlockComment(isDoc = true) + case _ => skipBlockComment(isDoc = false) + } + true + case _ => false } // Identifiers --------------------------------------------------------------- diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9fa3564b2bab..ba104fb7a6b1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2247,9 +2247,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) } - if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass)) { - // At this point in AnyVal there is no supercall, which will blow up - // in computeParamAliases; there's nothing to be computed for Anyval anyway. + if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { + // There are no supercalls for AnyVal or constructors from Java sources, which + // would blow up in computeParamAliases; there's nothing to be computed for them + // anyway. if (meth.isPrimaryConstructor) computeParamAliases(meth.owner, vparamss1, rhs1) else diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index 8ea8c4deff82..2152ce234a78 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -101,52 +101,6 @@ trait ScaladocAnalyzer extends Analyzer { abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends SyntaxAnalyzer { import global._ - class ScaladocJavaUnitParser(unit: CompilationUnit) extends { - override val in = new ScaladocJavaUnitScanner(unit) - } with JavaUnitParser(unit) { } - - class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) { - /** buffer for the documentation comment - */ - var docBuffer: StringBuilder = null - - /** add the given character to the documentation buffer - */ - protected def putDocChar(c: Char) { - if (docBuffer ne null) docBuffer.append(c) - } - - override protected def skipComment(): Boolean = { - if (in.ch == '/') { - do { - in.next - } while ((in.ch != CR) && (in.ch != LF) && (in.ch != SU)) - true - } else if (in.ch == '*') { - docBuffer = null - in.next - val scaladoc = ("/**", "*/") - if (in.ch == '*') - docBuffer = new StringBuilder(scaladoc._1) - do { - do { - if (in.ch != '*' && in.ch != SU) { - in.next; putDocChar(in.ch) - } - } while (in.ch != '*' && in.ch != SU) - while (in.ch == '*') { - in.next; putDocChar(in.ch) - } - } while (in.ch != '/' && in.ch != SU) - if (in.ch == '/') in.next - else incompleteInputError("unclosed comment") - true - } else { - false - } - } - } - class ScaladocUnitScanner(unit0: CompilationUnit, patches0: List[BracePatch]) extends UnitScanner(unit0, patches0) { private var docBuffer: StringBuilder = null // buffer for comments (non-null while scanning) @@ -259,4 +213,77 @@ abstract class ScaladocSyntaxAnalyzer[G <: Global](val global: G) extends Syntax else trees } } + + class ScaladocJavaUnitScanner(unit: CompilationUnit) extends JavaUnitScanner(unit) { + + private val docBuffer: StringBuilder = new StringBuilder + private var inDocComment = false + private var docStart: Int = 0 + private var lastDoc: DocComment = null + + // get last doc comment + def flushDoc(): DocComment = try lastDoc finally lastDoc = null + + override protected def putCommentChar(): Unit = { + if (inDocComment) docBuffer append in.ch + in.next + } + + override protected def skipBlockComment(isDoc: Boolean): Unit = { + // condition is true when comment is entered the first time, + // i.e. immediately after "/*" and when current character is "*" + if (!inDocComment && isDoc) { + docBuffer append "/*" + docStart = currentPos.start + inDocComment = true + } + super.skipBlockComment(isDoc) + } + + override protected def skipComment(): Boolean = { + val skipped = super.skipComment() + if (skipped && inDocComment) { + val raw = docBuffer.toString + val position = Position.range(unit.source, docStart, docStart, in.cpos) + lastDoc = DocComment(raw, position) + signalParsedDocComment(raw, position) + docBuffer.setLength(0) // clear buffer + inDocComment = false + true + } else { + skipped + } + } + + } + + class ScaladocJavaUnitParser(unit: CompilationUnit) extends { + override val in = new ScaladocJavaUnitScanner(unit) + } with JavaUnitParser(unit) { + + override def joinComment(trees: => List[Tree]): List[Tree] = { + val doc = in.flushDoc() + + if ((doc ne null) && doc.raw.length > 0) { + log(s"joinComment(doc=$doc)") + val joined = trees map { t => + DocDef(doc, t) setPos { + if (t.pos.isDefined) { + val pos = doc.pos.withEnd(t.pos.end) + pos.makeTransparent + } else { + t.pos + } + } + } + joined.find(_.pos.isOpaqueRange) foreach { main => + val mains = List(main) + joined foreach { t => if (t ne main) ensureNonOverlapping(t, mains) } + } + joined + } else { + trees + } + } + } } diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala index 10bfe6f94ba8..625d074df5b0 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -13,6 +13,7 @@ trait ScaladocGlobalTrait extends Global { override val useOffsetPositions = false override def newUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocUnitParser(unit, Nil) + override def newJavaUnitParser(unit: CompilationUnit) = new syntaxAnalyzer.ScaladocJavaUnitParser(unit) override lazy val syntaxAnalyzer = new ScaladocSyntaxAnalyzer[outer.type](outer) { val runsAfter = List[String]() @@ -40,6 +41,8 @@ class ScaladocGlobal(settings: doc.Settings, reporter: Reporter) extends Global( phasesSet += analyzer.typerFactory } override def forScaladoc = true + override def createJavadoc = true + override lazy val analyzer = new { val global: ScaladocGlobal.this.type = ScaladocGlobal.this } with ScaladocAnalyzer diff --git a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala index 1ad3b3ff2bf9..44c1146a1457 100644 --- a/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala +++ b/src/scaladoc/scala/tools/partest/ScaladocModelTest.scala @@ -81,7 +81,7 @@ abstract class ScaladocModelTest extends DirectTest { private[this] var settings: doc.Settings = null // create a new scaladoc compiler - private[this] def newDocFactory: DocFactory = { + def newDocFactory: DocFactory = { settings = new doc.Settings(_ => ()) settings.scaladocQuietRun = true // yaay, no more "model contains X documentable templates"! val args = extraSettings + " " + scaladocSettings diff --git a/test/files/run/t5699.check b/test/files/run/t5699.check index df19644ae618..8d19ecd321c5 100644 --- a/test/files/run/t5699.check +++ b/test/files/run/t5699.check @@ -1,10 +1,10 @@ [[syntax trees at end of parser]] // annodef.java package { object MyAnnotation extends { - def () = _ + def () }; class MyAnnotation extends scala.annotation.Annotation with _root_.java.lang.annotation.Annotation with scala.annotation.ClassfileAnnotation { - def () = _; + def (); def value(): String } } diff --git a/test/scaladoc/resources/SI-4826.java b/test/scaladoc/resources/SI-4826.java new file mode 100644 index 000000000000..f735ce633515 --- /dev/null +++ b/test/scaladoc/resources/SI-4826.java @@ -0,0 +1,20 @@ +package test.scaladoc; + +/** + * Testing java comments. The presence of a :marker: + * tag is verified by tests. + */ +public class JavaComments { + + /** + * Compute the answer to the ultimate question of life, the + * universe, and everything. :marker: + * @param factor scaling factor to the answer + * @return the answer to everything (42) scaled by factor + */ + public int answer(int factor) { + return 42 * factor; + } + +} + diff --git a/test/scaladoc/run/SI-4826.check b/test/scaladoc/run/SI-4826.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/SI-4826.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/SI-4826.scala b/test/scaladoc/run/SI-4826.scala new file mode 100644 index 000000000000..50e446800287 --- /dev/null +++ b/test/scaladoc/run/SI-4826.scala @@ -0,0 +1,30 @@ +import scala.tools.nsc.doc.Universe +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def resourceFile = "SI-4826.java" + + // overridden to pass explicit files to newDocFactory.makeUniverse (rather than code strings) + // since the .java file extension is required + override def model: Option[Universe] = { + val path = resourcePath + "/" + resourceFile + newDocFactory.makeUniverse(Left(List(path))) + } + + // no need for special settings + def scaladocSettings = "" + + def testModel(rootPackage: Package) = { + import access._ + val Tag = ":marker:" + + val base = rootPackage._package("test")._package("scaladoc") + val clazz = base._class("JavaComments") + val method = clazz._method("answer") + + assert(extractCommentText(clazz.comment.get).contains(Tag)) + assert(extractCommentText(method.comment.get).contains(Tag)) + } +} From a0590aa9ba45e83c8c8d496b8ab132966b1a7a95 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 15 Jul 2016 12:17:52 +0200 Subject: [PATCH 0170/2793] SD-182 compiler option -Xgen-mixin-forwarders Introduce a compiler option -Xgen-mixin-forwarders to always generate mixin forwarder methods. --- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/nsc/transform/Mixin.scala | 78 ++++++++++--------- .../scala/lang/traits/BytecodeTest.scala | 29 +++++++ 3 files changed, 73 insertions(+), 35 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 4d236b226d78..dae8539c66ba 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -133,6 +133,7 @@ trait ScalaSettings extends AbsScalaSettings val XnoPatmatAnalysis = BooleanSetting ("-Xno-patmat-analysis", "Don't perform exhaustivity/unreachability analysis. Also, ignore @switch annotation.") val XfullLubs = BooleanSetting ("-Xfull-lubs", "Retains pre 2.10 behavior of less aggressive truncation of least upper bounds.") + val XgenMixinForwarders = BooleanSetting("-Xgen-mixin-forwarders", "Generate forwarder methods in classes inhering concrete methods from traits.") // XML parsing options object XxmlSettings extends MultiChoiceEnumeration { diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index d62b77dac26e..b787f64846b0 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -246,43 +246,51 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { case NoSymbol => val isMemberOfClazz = clazz.info.findMember(member.name, 0, 0L, stableOnly = false).alternatives.contains(member) if (isMemberOfClazz) { - // `member` is a concrete method defined in `mixinClass`, which is a base class of - // `clazz`, and the method is not overridden in `clazz`. A forwarder is needed if: - // - // - A non-trait base class of `clazz` defines a matching method. Example: - // class C {def f: Int}; trait T extends C {def f = 1}; class D extends T - // Even if C.f is abstract, the forwarder in D is needed, otherwise the JVM would - // resolve `D.f` to `C.f`, see jvms-6.5.invokevirtual. - // - // - There exists another concrete, matching method in a parent interface `p` of - // `clazz`, and the `mixinClass` does not itself extend `p`. In this case the - // forwarder is needed to disambiguate. Example: - // trait T1 {def f = 1}; trait T2 extends T1 {override def f = 2}; class C extends T2 - // In C we don't need a forwarder for f because T2 extends T1, so the JVM resolves - // C.f to T2.f non-ambiguously. See jvms-5.4.3.3, "maximally-specific method". - // trait U1 {def f = 1}; trait U2 {self:U1 => override def f = 2}; class D extends U2 - // In D the forwarder is needed, the interfaces U1 and U2 are unrelated at the JVM - // level. - - @tailrec - def existsCompetingMethod(baseClasses: List[Symbol]): Boolean = baseClasses match { - case baseClass :: rest => - if (baseClass ne mixinClass) { - val m = member.overriddenSymbol(baseClass) - val isCompeting = m.exists && { - !m.owner.isTraitOrInterface || - (!m.isDeferred && !mixinClass.isNonBottomSubClass(m.owner)) - } - isCompeting || existsCompetingMethod(rest) - } else existsCompetingMethod(rest) - - case _ => false + def genForwarder(): Unit = { + cloneAndAddMixinMember(mixinClass, member).asInstanceOf[TermSymbol] setAlias member } - if (existsCompetingMethod(clazz.baseClasses)) - cloneAndAddMixinMember(mixinClass, member).asInstanceOf[TermSymbol] setAlias member - else if (!settings.nowarnDefaultJunitMethods && JUnitTestClass.exists && member.hasAnnotation(JUnitTestClass)) - warning(member.pos, "JUnit tests in traits that are compiled as default methods are not executed by JUnit 4. JUnit 5 will fix this issue.") + if (settings.XgenMixinForwarders) genForwarder() + else { + + // `member` is a concrete method defined in `mixinClass`, which is a base class of + // `clazz`, and the method is not overridden in `clazz`. A forwarder is needed if: + // + // - A non-trait base class of `clazz` defines a matching method. Example: + // class C {def f: Int}; trait T extends C {def f = 1}; class D extends T + // Even if C.f is abstract, the forwarder in D is needed, otherwise the JVM would + // resolve `D.f` to `C.f`, see jvms-6.5.invokevirtual. + // + // - There exists another concrete, matching method in a parent interface `p` of + // `clazz`, and the `mixinClass` does not itself extend `p`. In this case the + // forwarder is needed to disambiguate. Example: + // trait T1 {def f = 1}; trait T2 extends T1 {override def f = 2}; class C extends T2 + // In C we don't need a forwarder for f because T2 extends T1, so the JVM resolves + // C.f to T2.f non-ambiguously. See jvms-5.4.3.3, "maximally-specific method". + // trait U1 {def f = 1}; trait U2 {self:U1 => override def f = 2}; class D extends U2 + // In D the forwarder is needed, the interfaces U1 and U2 are unrelated at the JVM + // level. + + @tailrec + def existsCompetingMethod(baseClasses: List[Symbol]): Boolean = baseClasses match { + case baseClass :: rest => + if (baseClass ne mixinClass) { + val m = member.overriddenSymbol(baseClass) + val isCompeting = m.exists && { + !m.owner.isTraitOrInterface || + (!m.isDeferred && !mixinClass.isNonBottomSubClass(m.owner)) + } + isCompeting || existsCompetingMethod(rest) + } else existsCompetingMethod(rest) + + case _ => false + } + + if (existsCompetingMethod(clazz.baseClasses)) + genForwarder() + else if (!settings.nowarnDefaultJunitMethods && JUnitTestClass.exists && member.hasAnnotation(JUnitTestClass)) + warning(member.pos, "JUnit tests in traits that are compiled as default methods are not executed by JUnit 4. JUnit 5 will fix this issue.") + } } case _ => diff --git a/test/junit/scala/lang/traits/BytecodeTest.scala b/test/junit/scala/lang/traits/BytecodeTest.scala index ec8508df999e..e6c74b86abfd 100644 --- a/test/junit/scala/lang/traits/BytecodeTest.scala +++ b/test/junit/scala/lang/traits/BytecodeTest.scala @@ -230,6 +230,35 @@ class BytecodeTest extends BytecodeTesting { List(ALOAD, ILOAD, PUTFIELD, ALOAD, ACONST_NULL, "", RETURN)) } + @Test + def mixinForwarders(): Unit = { + val code = + """trait T { def f = 1 } + |class C extends T + """.stripMargin + val List(c1, _) = compileClasses(code) + val List(c2, _) = newCompiler(extraArgs = "-Xgen-mixin-forwarders").compileClasses(code) + assert(getMethods(c1, "f").isEmpty) + assertSameCode(getMethod(c2, "f"), + List(VarOp(ALOAD, 0), Invoke(INVOKESTATIC, "T", "f$", "(LT;)I", true), Op(IRETURN))) + } + + @Test + def sd143(): Unit = { + // this tests the status quo, which is wrong. + val code = + """class A { def m = 1 } + |class B extends A { override def m = 2 } + |trait T extends A + |class C extends B with T { + | override def m = super[T].m // should invoke A.m + |} + """.stripMargin + val List(_, _, c, _) = compileClasses(code) + // even though the bytecode refers to A.m, invokespecial will resolve to B.m + assertSameCode(getMethod(c, "m"), + List(VarOp(ALOAD, 0), Invoke(INVOKESPECIAL, "A", "m", "()I", false), Op(IRETURN))) + } } object invocationReceiversTestCode { From 36e2ae2fbd3d3637f2be6453e67778b706e1baa9 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Mon, 11 Jul 2016 15:55:19 +0200 Subject: [PATCH 0171/2793] Switch the bootstrap build over to sbt MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit All of the individual ant builds that occured during `bootstrap` are replaced by equivalent sbt builds. - Allow extra dashes in version suffix when using SPLIT - Clean up ScriptCommands - Building an extra `locker` for stability testing with ant was not necessary but sbt also drops `strap`, so we need to build again with `quick` to get the equivalent of `strap`. The script for checking stability is invoked directly from the bootstrap script, not from sbt. - `STARR` and `locker` build output is still logged to `logs/builds`, the main build runs log directly to the main console with colored output. - Allow `—show-log` option on partest command line in sbt - Normalize inferred LUB in `run/t7747-repl.scala` - Add `normalize` feature from `ReplTest` to `InteractiveTest` - Normalize inferred LUBs in `presentation/callcc-interpreter` --- project/PartestUtil.scala | 2 +- project/ScriptCommands.scala | 124 +++++++++++++++--- project/VersionUtil.scala | 2 +- scripts/jobs/integrate/bootstrap | 72 +++++----- scripts/jobs/validate/test | 11 +- .../interactive/tests/InteractiveTest.scala | 10 +- .../callcc-interpreter/Runner.scala | 5 +- test/files/run/t7747-repl.scala | 8 +- 8 files changed, 164 insertions(+), 70 deletions(-) diff --git a/project/PartestUtil.scala b/project/PartestUtil.scala index 8793e9c0a55d..99b978515caf 100644 --- a/project/PartestUtil.scala +++ b/project/PartestUtil.scala @@ -29,7 +29,7 @@ object PartestUtil { val knownUnaryOptions = List( "--pos", "--neg", "--run", "--jvm", "--res", "--ant", "--scalap", "--specialized", "--scalacheck", "--instrumented", "--presentation", "--failed", "--update-check", - "--show-diff", "--verbose", "--terse", "--debug", "--version", "--self-test", "--help") + "--show-diff", "--show-log", "--verbose", "--terse", "--debug", "--version", "--self-test", "--help") val srcPathOption = "--srcpath" val grepOption = "--grep" diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 0bf43b18e872..accbadbbf66c 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -4,30 +4,116 @@ import BuildSettings.autoImport._ /** Custom commands for use by the Jenkins scripts. This keeps the surface area and call syntax small. */ object ScriptCommands { - def all = Seq(setupPublishCore, setupValidateTest) + def all = Seq( + setupPublishCore, + setupValidateTest, + setupBootstrapStarr, setupBootstrapLocker, setupBootstrapQuick, setupBootstrapPublish, + testAll + ) /** Set up the environment for `validate/publish-core`. The argument is the Artifactory snapshot repository URL. */ - def setupPublishCore = Command.single("setupPublishCore") { case (state, url) => - Project.extract(state).append(Seq( - baseVersionSuffix in Global := "SHA-SNAPSHOT", - // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088): - publishTo in Global := Some("scala-pr" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis), - publishArtifact in (Compile, packageDoc) in ThisBuild := false, - scalacOptions in Compile in ThisBuild += "-opt:l:classpath", - logLevel in ThisBuild := Level.Info, - logLevel in update in ThisBuild := Level.Warn - ), state) - } + def setupPublishCore = setup("setupPublishCore") { case Seq(url) => + Seq( + baseVersionSuffix in Global := "SHA-SNAPSHOT" + ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer + } /** Set up the environment for `validate/test`. The argument is the Artifactory snapshot repository URL. */ - def setupValidateTest = Command.single("setupValidateTest") { case (state, url) => + def setupValidateTest = setup("setupValidateTest") { case Seq(url) => //TODO When ant is gone, pass starr version as an argument to this command instead of using version.properties - Project.extract(state).append(Seq( + Seq( + resolvers in Global += "scala-pr" at url, + testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + ) ++ enableOptimizer + } + + /** Set up the environment for building STARR in `validate/bootstrap`. The arguments are: + * - Repository URL for publishing + * - Version number to publish */ + def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(url, ver) => + Seq( + baseVersion in Global := ver, + baseVersionSuffix in Global := "SPLIT" + ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer + } + + /** Set up the environment for building locker in `validate/bootstrap`. The arguments are: + * - Repository URL for publishing locker and resolving STARR + * - Version number to publish */ + def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(url, ver) => + Seq( + baseVersion in Global := ver, + baseVersionSuffix in Global := "SPLIT", + resolvers in Global += "scala-pr" at url + ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer + } + + /** Set up the environment for building quick in `validate/bootstrap`. The arguments are: + * - Repository URL for publishing + * - Version number to publish */ + def setupBootstrapQuick = setup("setupBootstrapQuick") { case Seq(url, ver) => + Seq( + baseVersion in Global := ver, + baseVersionSuffix in Global := "SPLIT", + resolvers in Global += "scala-pr" at url, + testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + ) ++ publishTarget(url) ++ enableOptimizer + } + + /** Set up the environment for publishing in `validate/bootstrap`. The arguments are: + * - Temporary bootstrap repository URL for resolving modules + * - Version number to publish + * All artifacts are published to Sonatype. GPG signing has to be configured from the + * shell script after `setupBootstrapPublish` because we don't pull the GPG plugin in + * by default, so we cannot reference its keys statically. */ + def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(url, ver) => + // Define a copy of the setting key here in case the plugin is not part of the build + val pgpPassphrase = SettingKey[Option[Array[Char]]]("pgp-passphrase", "The passphrase associated with the secret used to sign artifacts.", KeyRanks.BSetting) + Seq( + baseVersion in Global := ver, + baseVersionSuffix in Global := "SPLIT", resolvers in Global += "scala-pr" at url, - scalacOptions in Compile in ThisBuild += "-opt:l:classpath", - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")), - logLevel in ThisBuild := Level.Info, - logLevel in update in ThisBuild := Level.Warn - ), state) + publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), + credentials in Global += Credentials(Path.userHome / ".credentials-sonatype"), + pgpPassphrase in Global := Some(Array.empty) + ) ++ enableOptimizer + } + + private[this] def setup(name: String)(f: Seq[String] => Seq[Setting[_]]) = + Command.args(name, name) { case (state, seq) => Project.extract(state).append(f(seq) ++ resetLogLevels, state) } + + private[this] val resetLogLevels = Seq( + logLevel in ThisBuild := Level.Info, + logLevel in update in ThisBuild := Level.Warn + ) + + private[this] val enableOptimizer = Seq( + scalacOptions in Compile in ThisBuild += "-opt:l:classpath" + ) + + private[this] val noDocs = Seq( + publishArtifact in (Compile, packageDoc) in ThisBuild := false + ) + + private[this] def publishTarget(url: String) = Seq( + // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088): + publishTo in Global := Some("scala-pr-publish" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis) + ) + + def testAll = Command.command("testAll") { state => + val cmds = Seq( + "test", + "partest run pos neg jvm", + "partest res scalap specialized scalacheck", + "partest instrumented presentation", + "partest --srcpath scaladoc", + "osgiTestFelix/test", + "osgiTestEclipse/test", + "library/mima", + "reflect/mima", + "doc" + ) + state.log.info(cmds.mkString("Running all tests: \"", "\", \"", "\"")) + cmds ::: state } } diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 4705bbb6ce36..148fdfbc2343 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -61,7 +61,7 @@ object VersionUtil { val (base, suffix) = { val (b, s) = (baseVersion.value, baseVersionSuffix.value) if(s == "SPLIT") { - val split = """([\w+\.]+)(-[\w+\.]+)??""".r + val split = """([\w+\.]+)(-[\w+\.-]+)??""".r val split(b2, sOrNull) = b (b2, Option(sOrNull).map(_.drop(1)).getOrElse("")) } else (b, s) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 7716dc9eb1c1..bdc50323a8c6 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -92,7 +92,7 @@ publishLockerPrivateTask=${publishLockerPrivateTask-$publishPrivateTask} # set t forceRebuild=${forceRebuild-no} -antBuildTask="${antBuildTask-nightly}" # TESTING leave empty to avoid the sanity check (don't set it to "init" because ant will croak) +sbtBuildTask="testAll" # TESTING leave empty to avoid the sanity check clean="clean" # TESTING leave empty to speed up testing baseDir=${WORKSPACE-`pwd`} @@ -174,11 +174,11 @@ function st_stagingRepoClose() { # the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir # the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base # need to set sbt-dir to one that has the gpg.sbt plugin config -sbtArgs="-no-colors -ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13" +sbtArgs="-ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13" sbtBuild() { - echo "### sbtBuild: "$SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" - $SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1 + echo "### sbtBuild: "$SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" + $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1 } sbtResolve() { @@ -186,8 +186,8 @@ sbtResolve() { touch build.sbt # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to. cross=${4-binary} - echo "### sbtResolve: $SBT_CMD $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" - $SBT_CMD $sbtArgs "${scalaVersionTasks[@]}" \ + echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" + $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ 'show update' >> $baseDir/logs/resolution 2>&1 } @@ -490,13 +490,7 @@ bootstrap() { git clone --reference $WORKSPACE/.git $WORKSPACE/.git $STARR_DIR cd $STARR_DIR git co $STARR_REF - ant -Dmaven.version.number=$STARR_VER\ - -Dremote.snapshot.repository=NOPE\ - -Dremote.release.repository=$releaseTempRepoUrl\ - -Drepository.credentials.id=$releaseTempRepoCred\ - -Ddocs.skip=1\ - -Dlocker.skip=1\ - $publishStarrPrivateTask >> $baseDir/logs/builds 2>&1 + $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $releaseTempRepoUrl $STARR_VER" $clean publish >> $baseDir/logs/builds 2>&1 ) fi @@ -510,14 +504,7 @@ bootstrap() { # publish more than just core: partest needs scalap # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi - ant -Dmaven.version.number=$SCALA_VER\ - -Dremote.snapshot.repository=NOPE\ - $SET_STARR\ - -Dremote.release.repository=$releaseTempRepoUrl\ - -Drepository.credentials.id=$releaseTempRepoCred\ - -Ddocs.skip=1\ - -Dlocker.skip=1\ - $publishLockerPrivateTask >> $baseDir/logs/builds 2>&1 + $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $releaseTempRepoUrl $SCALA_VER" $clean publish >> $baseDir/logs/builds 2>&1 echo "### Building modules using locker" @@ -534,7 +521,7 @@ bootstrap() { echo "### Bootstrapping Scala using locker" # # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours - # # the ant call will create a new one + # # the sbt call will create a new one # # Rebuild Scala with these modules so that all binary versions are consistent. # Update versions.properties to new modules. @@ -542,23 +529,35 @@ bootstrap() { # don't skip locker (-Dlocker.skip=1), or stability will fail # overwrite "locker" version of scala at private-repo with bootstrapped version cd $baseDir - rm -rf build/ # must leave everything else in $baseDir for downstream jobs + rm -rf build/ - ant -Dstarr.version=$SCALA_VER\ - -Dextra.repo.url=$releaseTempRepoUrl\ - -Dmaven.version.suffix=$SCALA_VER_SUFFIX\ + $SBT_CMD $sbtArgs \ + --warn \ + -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ - -Dupdate.versions=1\ - -Dscaladoc.git.commit=$SCALADOC_SOURCE_LINKS_VER\ - -Dremote.snapshot.repository=NOPE\ - -Dremote.release.repository=$releaseTempRepoUrl\ - -Drepository.credentials.id=$releaseTempRepoCred\ - -Dscalac.args.optimise=-opt:l:classpath\ - $antBuildTask $publishPrivateTask + "setupBootstrapQuick $releaseTempRepoUrl $SCALA_VER" \ + $clean \ + $sbtBuildTask \ + dist/mkQuick \ + publish | grep -v "was too long to be displayed in the webview, and will be left out" # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala rm -rf $baseDir/ivy2 + # Run stability tests using the just built version as "quick" and a new version as "strap" + mv build/quick quick1 + rm -rf build/ + $SBT_CMD $sbtArgs \ + --warn \ + -Dstarr.version=$SCALA_VER \ + ${updatedModuleVersions[@]} \ + "setupBootstrapQuick $releaseTempRepoUrl $SCALA_VER" \ + $clean \ + dist/mkQuick + mv build/quick build/strap + mv quick1 build/quick + tools/stability-test.sh + # TODO: create PR with following commit (note that release will have been tagged already) # git commit versions.properties -m"Bump versions.properties for $SCALA_VER." } @@ -571,7 +570,12 @@ publishSonatype() { # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary, # since we're just publishing an existing build echo "### Publishing core to sonatype" - ant -Dmaven.version.number=$SCALA_VER $publishSonatypeTaskCore + $SBT_CMD $sbtArgs \ + --warn \ + -Dstarr.version=$SCALA_VER \ + ${updatedModuleVersions[@]} \ + "setupBootstrapPublish $releaseTempRepoUrl $SCALA_VER" \ + publishSigned | grep -v "was too long to be displayed in the webview, and will be left out" echo "### Publishing modules to sonatype" # build/test/publish scala core modules to sonatype (this will start a new staging repo) diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index 3cd8af56081b..dd36c5db18ce 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -20,16 +20,7 @@ case $prDryRun in --warn \ "setupValidateTest $prRepoUrl" \ $testExtraArgs \ - "test" \ - "partest run pos neg jvm" \ - "partest res scalap specialized scalacheck" \ - "partest instrumented presentation" \ - "partest --srcpath scaladoc" \ - osgiTestFelix/test \ - osgiTestEclipse/test \ - library/mima \ - reflect/mima \ - doc + testAll ;; diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala index 2cb4f5fd4a39..00096dd359d8 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -78,10 +78,16 @@ abstract class InteractiveTest } protected def execute(): Unit = { - loadSources() - runDefaultTests() + util.stringFromStream { ostream => + Console.withOut(ostream) { + loadSources() + runDefaultTests() + } + }.lines.map(normalize).foreach(println) } + protected def normalize(s: String) = s + /** Load all sources before executing the test. */ protected def loadSources() { // ask the presentation compiler to track all sources. We do diff --git a/test/files/presentation/callcc-interpreter/Runner.scala b/test/files/presentation/callcc-interpreter/Runner.scala index 1ef3cf902531..a5698be5c27e 100644 --- a/test/files/presentation/callcc-interpreter/Runner.scala +++ b/test/files/presentation/callcc-interpreter/Runner.scala @@ -1,3 +1,6 @@ import scala.tools.nsc.interactive.tests._ -object Test extends InteractiveTest \ No newline at end of file +object Test extends InteractiveTest { + // Normalize ordering of LUB + override def normalize(s: String) = s.replace("Serializable with Product", "Product with Serializable") +} diff --git a/test/files/run/t7747-repl.scala b/test/files/run/t7747-repl.scala index c6a7e419aa14..0094d3ba98e1 100644 --- a/test/files/run/t7747-repl.scala +++ b/test/files/run/t7747-repl.scala @@ -8,8 +8,12 @@ object Test extends ReplTest { s } - // replace indylambda function names by - override def normalize(s: String) = """\$Lambda.*""".r.replaceAllIn(s, "") + override def normalize(s: String) = { + // replace indylambda function names by + val s2 = """\$Lambda.*""".r.replaceAllIn(s, "") + // Normalize ordering of LUB + s2.replace("Serializable with Product", "Product with Serializable") + } def code = """ |var x = 10 From cc36165cf298594208a94bd9f631bc4b0aada645 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Fri, 15 Jul 2016 19:49:07 +0200 Subject: [PATCH 0172/2793] Improve Scaladoc for Either: - remove text on projections - add for comprehensions --- src/library/scala/util/Either.scala | 72 +++++++++++++++++++---------- 1 file changed, 48 insertions(+), 24 deletions(-) diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 5c61d83a1a07..24f6d241518b 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -12,7 +12,7 @@ package scala package util /** Represents a value of one of two possible types (a disjoint union.) - * Instances of Either are either an instance of [[scala.util.Left]] or [[scala.util.Right]]. + * An instance of Either is either an instance of [[scala.util.Left]] or [[scala.util.Right]]. * * A common use of Either is as an alternative to [[scala.Option]] for dealing * with possible missing values. In this usage, [[scala.None]] is replaced @@ -47,32 +47,48 @@ package util * Left(23).map(_ * 2) // Left(23) * }}} * - * A ''projection'' can be used to selectively operate on a value of type Either, - * depending on whether it is of type Left or Right. For example, to transform an - * Either using a function, in the case where it's a Left, one can first apply - * the `left` projection and invoke `map` on that projected Either. If a `right` - * projection is applied to that Left, the original Left is returned, unmodified. - * + * As Either defines the methods `map` and `flatMap`, it can also be used in for comprehensions: * {{{ - * val l: Either[String, Int] = Left("flower") - * val r: Either[String, Int] = Right(12) - * l.left.map(_.size): Either[Int, Int] // Left(6) - * r.left.map(_.size): Either[Int, Int] // Right(12) - * l.right.map(_.toDouble): Either[String, Double] // Left("flower") - * r.right.map(_.toDouble): Either[String, Double] // Right(12.0) - * }}} + * val right1: Right[Double, Int] = Right(1) + * val right2 = Right(2) + * val right3 = Right(3) + * val left23: Left[Double, Int] = Left(23.0) + * val left42 = Left(42.0) * - * Like with other types which define a `map` method, the same can be achieved - * using a for-comprehension: - * {{{ - * for (s <- l.left) yield s.size // Left(6) - * }}} + * for ( + * a <- right1; + * b <- right2; + * c <- right3 + * ) yield a + b + c // Right(6) + * + * for ( + * a <- right1; + * b <- right2; + * c <- left23 + * ) yield a + b + c // Left(23.0) * - * To support multiple projections as generators in for-comprehensions, the Either - * type also defines a `flatMap` method. + * for ( + * a <- right1; + * b <- left23; + * c <- right2 + * ) yield a + b + c // Left(23.0) + * + * // It is advisable to provide the type of the “missing” value (especially the right value for `Left`) + * // as otherwise that type might be infered as `Nothing` without context: + * for ( + * a <- left23; + * b <- right1; + * c <- left42 // type at this position: Either[Double, Nothing] + * ) yield a + b + c + * // ^ + * // error: ambiguous reference to overloaded definition, + * // both method + in class Int of type (x: Char)Int + * // and method + in class Int of type (x: Byte)Int + * // match argument types (Nothing) + * }}} * * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 + * @version 2.0, 2016-07-15 * @since 2.7 */ sealed abstract class Either[+A, +B] extends Product with Serializable { @@ -112,8 +128,16 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { * If this is a `Left`, then return the left value in `Right` or vice versa. * * @example {{{ - * val l: Either[String, Int] = Left("left") - * val r: Either[Int, String] = l.swap // Result: Right("left") + * val left: Either[String, Int] = Left("left") + * val right: Either[Int, String] = l.swap // Result: Right("left") + * }}} + * @example {{{ + * val right = Right(2) + * val left = Left(3) + * for ( + * r1 <- right; + * r2 <- left.swap + * ) yield r1 * r2 // Right(6) * }}} */ def swap: Either[B, A] = this match { From 11688eb95b88f02c89c5974c3ce22290b57a5374 Mon Sep 17 00:00:00 2001 From: Christopher Davenport Date: Fri, 15 Jul 2016 16:41:39 -0400 Subject: [PATCH 0173/2793] SI-9691 BufferedIterator should expose a headOption This exposes a new API to the BufferedIterator trait. It will return the next element of an iterator as an Option. The return will be Some(value) if there is a next value, and None if there is not a next element. --- .../scala/collection/BufferedIterator.scala | 6 ++++ .../junit/scala/collection/IteratorTest.scala | 28 +++++++++++++++++++ 2 files changed, 34 insertions(+) diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala index e6e97d584cb3..1424ef2fd049 100644 --- a/src/library/scala/collection/BufferedIterator.scala +++ b/src/library/scala/collection/BufferedIterator.scala @@ -24,5 +24,11 @@ trait BufferedIterator[+A] extends Iterator[A] { */ def head: A + /** Returns an option of the next element of an iterator without advancing beyond it. + * @return the next element of this iterator if it has a next element + * `None` if it does not + */ + def headOption : Option[A] = if (hasNext) Some(head) else None + override def buffered: this.type = this } diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 4df29e36c090..09061a3b29c0 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -214,4 +214,32 @@ class IteratorTest { assertSameElements(exp, res) assertEquals(8, counter) // was 14 } + // SI-9691 + @Test def bufferedHeadOptionReturnsValueWithHeadOrNone(): Unit = { + // Checks BufferedIterator returns Some(value) when there is a value + val validHeadOption = List(1,2,3).iterator.buffered.headOption + assertEquals(Some(1), validHeadOption) + // Checks BufferedIterator returns None when there is no value + val invalidHeadOption = List(1,2,3).iterator.drop(10).buffered.headOption + assertEquals(None: Option[Int], invalidHeadOption) + // Checks BufferedIterator returns Some(value) in the last position with a value + val validHeadOptionAtTail = List(1,2,3).iterator.drop(2).buffered.headOption + assertEquals(Some(3), validHeadOptionAtTail) + // Checks BufferedIterator returns None at the first position without a value + val invalidHeadOptionOnePastTail = List(1,2,3).iterator.drop(3).buffered.headOption + assertEquals(None, invalidHeadOptionOnePastTail) + // Checks BufferedIterator returns Some(null) if the next value is null. + val nullHandingList = List(null, "yellow").iterator.buffered.headOption + assertEquals(Some(null), nullHandingList) + // Checks that BufferedIterator is idempotent. That the head is not + // changed by its invocation, nor the headOption by the next call to head. + val it = List(1,2,3).iterator.buffered + val v1 = it.head + val v2 = it.headOption + val v3 = it.head + val v4 = it.headOption + assertEquals(v1, v3) + assertEquals(v2, v4) + assertEquals(Some(v1), v2) + } } From 9ac26c4626f906f4b561ec1fe9c308a0cf905608 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 15 Jul 2016 19:52:57 -0700 Subject: [PATCH 0174/2793] SI-9750 Tweak tests for what is a number Leaves the error string as is, but adds test to show how it looks. Java calls it a version number. `Not a version: 1.9`. Don't strip `1.` prefix recursively. (That was Snytt's fault.) --- src/library/scala/util/Properties.scala | 2 +- test/junit/scala/util/SpecVersionTest.scala | 25 ++++----------------- 2 files changed, 5 insertions(+), 22 deletions(-) diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index 6995f452fa68..fb28132dfe5a 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -185,7 +185,7 @@ private[scala] trait PropertiesTrait { def isJavaAtLeast(version: String): Boolean = { def versionOf(s: String): Int = s.indexOf('.') match { case 1 if s.charAt(0) == '1' => - val v = versionOf(s.substring(2)) + val v = s.substring(2).toInt if (v < 9) v else -1 case -1 => s.toInt case _ => -1 diff --git a/test/junit/scala/util/SpecVersionTest.scala b/test/junit/scala/util/SpecVersionTest.scala index 2b69f288fa84..4c16ff08fbe0 100644 --- a/test/junit/scala/util/SpecVersionTest.scala +++ b/test/junit/scala/util/SpecVersionTest.scala @@ -50,30 +50,13 @@ class SpecVersionTest { } @Test def variousBadVersionStrings(): Unit = { - assertThrows[NumberFormatException] { sut7 isJavaAtLeast "1.9" } + assertThrows[NumberFormatException](sut7.isJavaAtLeast("1.9"), _ == "Not a version: 1.9") assertThrows[NumberFormatException] { sut9 isJavaAtLeast "1.9" } assertThrows[NumberFormatException] { sut7 isJavaAtLeast "9.1" } assertThrows[NumberFormatException] { sut9 isJavaAtLeast "9.1" } - } - @Test(expected = classOf[NumberFormatException]) - def badVersion(): Unit = { - sut7 isJavaAtLeast "1.a" - } - @Test(expected = classOf[NumberFormatException]) - def noVersion(): Unit = { - sut7 isJavaAtLeast "" - } - @Test(expected = classOf[NumberFormatException]) - def dotOnly(): Unit = { - sut7 isJavaAtLeast "." - } - @Test(expected = classOf[NumberFormatException]) - def leadingDot(): Unit = { - sut7 isJavaAtLeast ".5" - } - @Test(expected = classOf[NumberFormatException]) - def notASpec(): Unit = { - sut7 isJavaAtLeast "1.7.1" + val badvs = List("1.1.8", "1.", "1.a", "", ".", ".5", "1.7.1") + + for (v <- badvs) assertThrows[NumberFormatException](sut7.isJavaAtLeast(v)) } } From 656162bb48fbbd703790a2c94d4563e40ddfdfc2 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 15 Jul 2016 21:47:53 -0700 Subject: [PATCH 0175/2793] SI-9750 isJavaAtLeast(Int) A good opportunity to simplify the API. Versions are strings, but a spec version is just a number. --- src/library/scala/util/Properties.scala | 2 ++ test/junit/scala/util/SpecVersionTest.scala | 9 +++++++-- 2 files changed, 9 insertions(+), 2 deletions(-) diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index fb28132dfe5a..1bdf50bac201 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -195,6 +195,8 @@ private[scala] trait PropertiesTrait { versionOf(javaSpecVersion) >= v } + def isJavaAtLeast(version: Int): Boolean = isJavaAtLeast(version.toString) + // provide a main method so version info can be obtained by running this def main(args: Array[String]) { val writer = new PrintWriter(Console.err, true) diff --git a/test/junit/scala/util/SpecVersionTest.scala b/test/junit/scala/util/SpecVersionTest.scala index 4c16ff08fbe0..9232c4721b40 100644 --- a/test/junit/scala/util/SpecVersionTest.scala +++ b/test/junit/scala/util/SpecVersionTest.scala @@ -23,33 +23,38 @@ class SpecVersionTest { // override because of vals like releaseVersion override lazy val scalaProps = new java.util.Properties } - val sut7 = new TestProperties("1.7") - val sut9 = new TestProperties("9") @Test def comparesJDK9Correctly(): Unit = { + val sut9 = new TestProperties("9") assert(sut9 isJavaAtLeast "1") assert(sut9 isJavaAtLeast "1.5") assert(sut9 isJavaAtLeast "5") assert(sut9 isJavaAtLeast "1.8") assert(sut9 isJavaAtLeast "8") assert(sut9 isJavaAtLeast "9") + assert(sut9.isJavaAtLeast(9)) } // SI-7265 @Test def comparesCorrectly(): Unit = { + val sut7 = new TestProperties("1.7") assert(sut7 isJavaAtLeast "1") assert(sut7 isJavaAtLeast "1.5") assert(sut7 isJavaAtLeast "5") assert(sut7 isJavaAtLeast "1.6") assert(sut7 isJavaAtLeast "1.7") + assert(sut7.isJavaAtLeast(7)) + assertFalse(sut7.isJavaAtLeast(9)) assertFalse(sut7 isJavaAtLeast "1.8") assertFalse(sut7 isJavaAtLeast "9") assertFalse(sut7 isJavaAtLeast "10") } @Test def variousBadVersionStrings(): Unit = { + val sut7 = new TestProperties("1.7") + val sut9 = new TestProperties("9") assertThrows[NumberFormatException](sut7.isJavaAtLeast("1.9"), _ == "Not a version: 1.9") assertThrows[NumberFormatException] { sut9 isJavaAtLeast "1.9" } assertThrows[NumberFormatException] { sut7 isJavaAtLeast "9.1" } From 905b52669973463070112643f9470ddac3c08795 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 5 Jul 2016 11:37:40 -0700 Subject: [PATCH 0176/2793] SI-9827 MatchIterator advances itself To avoid caveats about calling `next` (or `hasNext`) before using `MatchData` methods on `MatchIterator`, just do it internally as necessary. Note `MatchIterator` behavior in the docs. Added tests showing what people cried about. --- src/library/scala/util/matching/Regex.scala | 142 ++++++++++++------ .../junit/scala/util/matching/RegexTest.scala | 70 ++++++++- 2 files changed, 161 insertions(+), 51 deletions(-) diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index c4a3f1effa4e..ea9f02f85b4b 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -11,21 +11,14 @@ * with the main goal of pulling out information from those matches, or replacing * them with something else. * - * There are four classes and three objects, with most of them being members of - * Regex companion object. [[scala.util.matching.Regex]] is the class users instantiate - * to do regular expression matching. + * [[scala.util.matching.Regex]] is the class users instantiate to do regular expression matching. * - * The remaining classes and objects in the package are used in the following way: - * - * * The companion object to [[scala.util.matching.Regex]] just contains the other members. + * The companion object to [[scala.util.matching.Regex]] contains supporting members: * * [[scala.util.matching.Regex.Match]] makes more information about a match available. - * * [[scala.util.matching.Regex.MatchIterator]] is used to iterate over multiple matches. + * * [[scala.util.matching.Regex.MatchIterator]] is used to iterate over matched strings. * * [[scala.util.matching.Regex.MatchData]] is just a base trait for the above classes. * * [[scala.util.matching.Regex.Groups]] extracts group from a [[scala.util.matching.Regex.Match]] * without recomputing the match. - * * [[scala.util.matching.Regex.Match]] converts a [[scala.util.matching.Regex.Match]] - * into a [[java.lang.String]]. - * */ package scala.util.matching @@ -35,6 +28,7 @@ import java.util.regex.{ Pattern, Matcher } /** A regular expression is used to determine whether a string matches a pattern * and, if it does, to extract or transform the parts that match. * + * === Usage === * This class delegates to the [[java.util.regex]] package of the Java Platform. * See the documentation for [[java.util.regex.Pattern]] for details about * the regular expression syntax for pattern strings. @@ -53,6 +47,7 @@ import java.util.regex.{ Pattern, Matcher } * Since escapes are not processed in multi-line string literals, using triple quotes * avoids having to escape the backslash character, so that `"\\d"` can be written `"""\d"""`. * + * === Extraction === * To extract the capturing groups when a `Regex` is matched, use it as * an extractor in a pattern match: * @@ -92,48 +87,68 @@ import java.util.regex.{ Pattern, Matcher } * } * }}} * + * === Find Matches === * To find or replace matches of the pattern, use the various find and replace methods. - * There is a flavor of each method that produces matched strings and - * another that produces `Match` objects. + * For each method, there is a version for working with matched strings and + * another for working with `Match` objects. * * For example, pattern matching with an unanchored `Regex`, as in the previous example, - * is the same as using `findFirstMatchIn`, except that the findFirst methods return an `Option`, - * or `None` for no match: + * can also be accomplished using `findFirstMatchIn`. The `findFirst` methods return an `Option` + * which is non-empty if a match is found, or `None` for no match: * * {{{ * val dates = "Important dates in history: 2004-01-20, 1958-09-05, 2010-10-06, 2011-07-15" - * val firstDate = date findFirstIn dates getOrElse "No date found." - * val firstYear = for (m <- date findFirstMatchIn dates) yield m group 1 + * val firstDate = date.findFirstIn(dates).getOrElse("No date found.") + * val firstYear = for (m <- date.findFirstMatchIn(dates)) yield m.group(1) * }}} * * To find all matches: * * {{{ - * val allYears = for (m <- date findAllMatchIn dates) yield m group 1 + * val allYears = for (m <- date.findAllMatchIn(dates)) yield m.group(1) * }}} * - * But `findAllIn` returns a special iterator of strings that can be queried for the `MatchData` - * of the last match: + * To iterate over the matched strings, use `findAllIn`, which returns a special iterator + * that can be queried for the `MatchData` of the last match: * * {{{ - * val mi = date findAllIn dates - * val oldies = mi filter (_ => (mi group 1).toInt < 1960) map (s => s"$s: An oldie but goodie.") + * val mi = date.findAllIn(dates) + * while (mi.hasNext) { + * val d = mi.next + * if (mi.group(1).toInt < 1960) println(s"$d: An oldie but goodie.") * }}} * * Note that `findAllIn` finds matches that don't overlap. (See [[findAllIn]] for more examples.) * * {{{ * val num = """(\d+)""".r - * val all = (num findAllIn "123").toList // List("123"), not List("123", "23", "3") + * val all = num.findAllIn("123").toList // List("123"), not List("123", "23", "3") + * }}} + * + * Also, the "current match" of a `MatchIterator` may be advanced by either `hasNext` or `next`. + * By comparison, the `Iterator[Match]` returned by `findAllMatchIn` or `findAllIn.matchData` + * produces `Match` objects that remain valid after the iterator is advanced. + * + * {{{ + * val ns = num.findAllIn("1 2 3") + * ns.start // 0 + * ns.hasNext // true + * ns.start // 2 + * val ms = num.findAllMatchIn("1 2 3") + * val m = ms.next() + * m.start // 0 + * ms.hasNext // true + * m.start // still 0 * }}} * + * === Replace Text === * Text replacement can be performed unconditionally or as a function of the current match: * * {{{ - * val redacted = date replaceAllIn (dates, "XXXX-XX-XX") - * val yearsOnly = date replaceAllIn (dates, m => m group 1) - * val months = (0 to 11) map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" } - * val reformatted = date replaceAllIn (dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" }) + * val redacted = date.replaceAllIn(dates, "XXXX-XX-XX") + * val yearsOnly = date.replaceAllIn(dates, m => m.group(1)) + * val months = (0 to 11).map { i => val c = Calendar.getInstance; c.set(2014, i, 1); f"$c%tb" } + * val reformatted = date.replaceAllIn(dates, _ match { case date(y,m,d) => f"${months(m.toInt - 1)} $d, $y" }) * }}} * * Pattern matching the `Match` against the `Regex` that created it does not reapply the `Regex`. @@ -142,7 +157,7 @@ import java.util.regex.{ Pattern, Matcher } * * {{{ * val docSpree = """2011(?:-\d{2}){2}""".r - * val docView = date replaceAllIn (dates, _ match { + * val docView = date.replaceAllIn(dates, _ match { * case docSpree() => "Historic doc spree!" * case _ => "Something else happened" * }) @@ -338,8 +353,8 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * {{{ * val hat = "hat[^a]+".r * val hathaway = "hathatthattthatttt" - * val hats = (hat findAllIn hathaway).toList // List(hath, hattth) - * val pos = (hat findAllMatchIn hathaway map (_.start)).toList // List(0, 7) + * val hats = hat.findAllIn(hathaway).toList // List(hath, hattth) + * val pos = hat.findAllMatchIn(hathaway).map(_.start).toList // List(0, 7) * }}} * * To return overlapping matches, it is possible to formulate a regular expression @@ -347,13 +362,13 @@ class Regex private[matching](val pattern: Pattern, groupNames: String*) extends * * {{{ * val madhatter = "(h)(?=(at[^a]+))".r - * val madhats = (madhatter findAllMatchIn hathaway map { + * val madhats = madhatter.findAllMatchIn(hathaway).map { * case madhatter(x,y) => s"$x$y" - * }).toList // List(hath, hatth, hattth, hatttt) + * }.toList // List(hath, hatth, hattth, hatttt) * }}} * - * Attempting to retrieve match information before performing the first match - * or after exhausting the iterator results in [[java.lang.IllegalStateException]]. + * Attempting to retrieve match information after exhausting the iterator + * results in [[java.lang.IllegalStateException]]. * See [[scala.util.matching.Regex.MatchIterator]] for details. * * @param source The text to match against. @@ -743,11 +758,13 @@ object Regex { /** A class to step through a sequence of regex matches. * - * All methods inherited from [[scala.util.matching.Regex.MatchData]] will throw - * a [[java.lang.IllegalStateException]] until the matcher is initialized. The - * matcher can be initialized by calling `hasNext` or `next()` or causing these - * methods to be called, such as by invoking `toString` or iterating through - * the iterator's elements. + * This is an iterator that returns the matched strings. + * + * Queries about match data pertain to the current state of the underlying + * matcher, which is advanced by calling `hasNext` or `next`. + * + * When matches are exhausted, queries about match data will throw + * [[java.lang.IllegalStateException]]. * * @see [[java.util.regex.Matcher]] */ @@ -755,37 +772,62 @@ object Regex { extends AbstractIterator[String] with Iterator[String] with MatchData { self => protected[Regex] val matcher = regex.pattern.matcher(source) - private var nextSeen = false - /** Is there another match? */ + // 0 = not yet matched, 1 = matched, 2 = advanced to match, 3 = no more matches + private[this] var nextSeen = 0 + + /** Return true if `next` will find a match. + * As a side effect, advance the underlying matcher if necessary; + * queries about the current match data pertain to the underlying matcher. + */ def hasNext: Boolean = { - if (!nextSeen) nextSeen = matcher.find() - nextSeen + nextSeen match { + case 0 => nextSeen = if (matcher.find()) 1 else 3 + case 1 => () + case 2 => nextSeen = 0 ; hasNext + case 3 => () + } + nextSeen == 1 // otherwise, 3 } - /** The next matched substring of `source`. */ + /** The next matched substring of `source`. + * As a side effect, advance the underlying matcher if necessary. + */ def next(): String = { - if (!hasNext) throw new NoSuchElementException - nextSeen = false + nextSeen match { + case 0 => if (!hasNext) throw new NoSuchElementException ; next() + case 1 => nextSeen = 2 + case 2 => nextSeen = 0 ; next() + case 3 => throw new NoSuchElementException + } matcher.group } + /** Report emptiness. */ override def toString = super[AbstractIterator].toString + // ensure we're at a match + private[this] def ensure(): Unit = nextSeen match { + case 0 => if (!hasNext) throw new IllegalStateException + case 1 => () + case 2 => () + case 3 => throw new IllegalStateException + } + /** The index of the first matched character. */ - def start: Int = matcher.start + def start: Int = { ensure() ; matcher.start } /** The index of the first matched character in group `i`. */ - def start(i: Int): Int = matcher.start(i) + def start(i: Int): Int = { ensure() ; matcher.start(i) } /** The index of the last matched character. */ - def end: Int = matcher.end + def end: Int = { ensure() ; matcher.end } /** The index following the last matched character in group `i`. */ - def end(i: Int): Int = matcher.end(i) + def end(i: Int): Int = { ensure() ; matcher.end(i) } /** The number of subgroups. */ - def groupCount = matcher.groupCount + def groupCount = { ensure() ; matcher.groupCount } /** Convert to an iterator that yields MatchData elements instead of Strings. */ def matchData: Iterator[Match] = new AbstractIterator[Match] { diff --git a/test/junit/scala/util/matching/RegexTest.scala b/test/junit/scala/util/matching/RegexTest.scala index 06d0445e1ce6..d80e05e512df 100644 --- a/test/junit/scala/util/matching/RegexTest.scala +++ b/test/junit/scala/util/matching/RegexTest.scala @@ -85,8 +85,9 @@ class RegexTest { assertFalse(ms.hasNext) } - //type NoGroup = NoSuchElementException type NoGroup = IllegalArgumentException + type NoMatch = NoSuchElementException + type NoData = IllegalStateException @Test def `SI-9666: throw on bad name`(): Unit = { assertThrows[NoGroup] { @@ -108,4 +109,71 @@ class RegexTest { ms group "Bee" } } + + @Test def `SI-9827 MatchIterator ergonomics`(): Unit = { + val r = "(ab)(cd)".r + val s = "xxxabcdyyyabcdzzz" + assertEquals(3, r.findAllIn(s).start) + assertEquals(5, r.findAllIn(s).start(2)) + locally { + val mi = r.findAllIn(s) + assertTrue(mi.hasNext) + assertEquals(3, mi.start) + assertEquals("abcd", mi.next()) + assertEquals(3, mi.start) + assertTrue(mi.hasNext) + assertEquals(10, mi.start) + } + locally { + val mi = r.findAllIn(s) + assertEquals("abcd", mi.next()) + assertEquals(3, mi.start) + assertEquals("abcd", mi.next()) + assertEquals(10, mi.start) + assertThrows[NoMatch] { mi.next() } + assertThrows[NoData] { mi.start } + } + locally { + val mi = r.findAllIn("") + assertThrows[NoData] { mi.start } + assertThrows[NoMatch] { mi.next() } + } + locally { + val mi = r.findAllMatchIn(s) + val x = mi.next() + assertEquals("abcd", x.matched) + assertEquals(3, x.start) + val y = mi.next() + assertEquals("abcd", y.matched) + assertEquals(10, y.start) + assertThrows[NoMatch] { mi.next() } + assertEquals(3, x.start) + assertEquals(10, y.start) + } + locally { + val regex = "(foo)-(.*)".r + val s = "foo-abc-def" + val result = regex.findAllIn(s) + //result.toString // comment this line to make it not work + val r = (result.group(1), result.group(2)) + assertEquals(("foo", "abc-def"), r) + } + locally { + val t = "this is a test" + val rx = " ".r + val m = rx.findAllIn(t) + assertEquals(5, rx.findAllIn(t).end) + } + locally { + val data = "aaaaabbbbbbccccccc" + val p = "^(.+)(.+)(.+)$".r + val parts = p.findAllIn(data) + val aes = parts.group(1) + val bes = parts.group(2) + val ces = parts.group(3) + assertEquals("ccccccc", ces) + assertEquals("bbbbbb", bes) + assertEquals("aaaaa", aes) + } + } } From 2c2fd4f4b63e1d9a6ee1243637ad8bcb0deb11d6 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 19 Jul 2016 14:45:15 +0200 Subject: [PATCH 0177/2793] SD-20 Inlcude static methods in the InlineInfo in mixed compilation In mixed compilation, the InlineInfo for a Java-defined class is created using the class symbol (vs in separate compilation, where the info is created by looking at the classfile and its methods). The scala compiler puts static java methods into the companion symbol, and we forgot to include them in the list of methods in the InlineInfo. --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 3 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 9 ++++- .../nsc/backend/jvm/BackendReporting.scala | 4 +- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 20 +++++++++- .../backend/jvm/opt/InlineWarningTest.scala | 37 ++++++++++++++++++- .../nsc/backend/jvm/opt/InlinerTest.scala | 4 +- 6 files changed, 67 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 7b2686e7a9be..f6b640bea428 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1145,8 +1145,7 @@ object BTypes { final case class InlineInfo(isEffectivelyFinal: Boolean, sam: Option[String], methodInfos: Map[String, MethodInlineInfo], - warning: Option[ClassInlineInfoWarning]) { - } + warning: Option[ClassInlineInfoWarning]) val EmptyInlineInfo = InlineInfo(false, None, Map.empty, None) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 1a4590e7d175..e0f0f269cbfa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -557,9 +557,16 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { var warning = Option.empty[ClassSymbolInfoFailureSI9111] + def keepMember(sym: Symbol) = sym.isMethod && !scalaPrimitives.isPrimitive(sym) + val classMethods = classSym.info.decls.iterator.filter(keepMember) + val methods = if (!classSym.isJavaDefined) classMethods else { + val staticMethods = classSym.companionModule.info.decls.iterator.filter(m => !m.isConstructor && keepMember(m)) + staticMethods ++ classMethods + } + // Primitive methods cannot be inlined, so there's no point in building a MethodInlineInfo. Also, some // primitive methods (e.g., `isInstanceOf`) have non-erased types, which confuses [[typeToBType]]. - val methodInlineInfos = classSym.info.decls.iterator.filter(m => m.isMethod && !scalaPrimitives.isPrimitive(m)).flatMap({ + val methodInlineInfos = methods.flatMap({ case methodSym => if (completeSilentlyAndCheckErroneous(methodSym)) { // Happens due to SI-9111. Just don't provide any MethodInlineInfo for that method, we don't need fail the compiler. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala index 7b640ac54f53..72a371cabc71 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala @@ -96,8 +96,8 @@ object BackendReporting { val missingClassWarning = missingClass match { case None => "" case Some(c) => - if (c.definedInJavaSource) s"\nNote that the parent class ${c.internalName} is defined in a Java source (mixed compilation), no bytecode is available." - else s"\nNote that the parent class ${c.internalName} could not be found on the classpath." + if (c.definedInJavaSource) s"\nNote that class ${c.internalName} is defined in a Java source (mixed compilation), no bytecode is available." + else s"\nNote that class ${c.internalName} could not be found on the classpath." } s"The method $name$descriptor could not be found in the class $ownerInternalName or any of its parents." + missingClassWarning diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index a691d634718f..6f098e1432a0 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -2,18 +2,20 @@ package scala.tools.nsc package backend.jvm package opt +import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ import scala.collection.generic.Clearable +import scala.tools.nsc.backend.jvm.BTypes.MethodInlineInfo import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.testing.BytecodeTesting @RunWith(classOf[JUnit4]) class InlineInfoTest extends BytecodeTesting { - import compiler.global + import compiler._ import global.genBCode.bTypes override def compilerArgs = "-opt:l:classpath" @@ -59,4 +61,20 @@ class InlineInfoTest extends BytecodeTesting { assert(fromSyms == fromAttrs) } + + @Test // scala-dev#20 + def javaStaticMethodsInlineInfoInMixedCompilation(): Unit = { + val jCode = + """public class A { + | public static final int bar() { return 100; } + | public final int baz() { return 100; } + |} + """.stripMargin + compileClasses("class C { new A }", javaCode = List((jCode, "A.java"))) + val info = global.genBCode.bTypes.classBTypeFromInternalName("A").info.get.inlineInfo + assertEquals(info.methodInfos, Map( + "bar()I" -> MethodInlineInfo(true,false,false), + "()V" -> MethodInlineInfo(false,false,false), + "baz()I" -> MethodInlineInfo(true,false,false))) + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala index 5254d7e1f29f..5bd2ce68f1be 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineWarningTest.scala @@ -75,12 +75,12 @@ class InlineWarningTest extends BytecodeTesting { val warns = List( """failed to determine if bar should be inlined: |The method bar()I could not be found in the class A or any of its parents. - |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin, + |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin, """B::flop()I is annotated @inline but could not be inlined: |Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed: |The method bar()I could not be found in the class A or any of its parents. - |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin) + |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin) var c = 0 val List(b) = compileToBytes(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; warns.tail.exists(i.msg contains _)}) @@ -168,4 +168,37 @@ class InlineWarningTest extends BytecodeTesting { compileToBytes(code, allowMessage = i => { c += 1; i.msg contains warn }) assert(c == 1, c) } + + @Test // scala-dev#20 + def mixedCompilationSpuriousWarning(): Unit = { + val jCode = + """public class A { + | public static final int bar() { return 100; } + | public final int baz() { return 100; } + |} + """.stripMargin + + val sCode = + """class C { + | @inline final def foo = A.bar() + | @inline final def fii(a: A) = a.baz() + | def t = foo + fii(new A) + |} + """.stripMargin + + val warns = List( + """C::foo()I is annotated @inline but could not be inlined: + |Failed to check if C::foo()I can be safely inlined to C without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed: + |The method bar()I could not be found in the class A or any of its parents. + |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin, + + """C::fii(LA;)I is annotated @inline but could not be inlined: + |Failed to check if C::fii(LA;)I can be safely inlined to C without causing an IllegalAccessError. Checking instruction INVOKEVIRTUAL A.baz ()I failed: + |The method baz()I could not be found in the class A or any of its parents. + |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin + ) + var c = 0 + compileClasses(sCode, javaCode = List((jCode, "A.java")), allowMessage = i => { c += 1; warns.exists(i.msg.contains)}) + assert(c == 2) + } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index f531ce932238..0f292517ef0f 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -416,7 +416,7 @@ class InlinerTest extends BytecodeTesting { """B::flop()I is annotated @inline but could not be inlined: |Failed to check if B::flop()I can be safely inlined to B without causing an IllegalAccessError. Checking instruction INVOKESTATIC A.bar ()I failed: |The method bar()I could not be found in the class A or any of its parents. - |Note that the parent class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin + |Note that class A is defined in a Java source (mixed compilation), no bytecode is available.""".stripMargin var c = 0 val List(b) = compile(scalaCode, List((javaCode, "A.java")), allowMessage = i => {c += 1; i.msg contains warn}) @@ -819,7 +819,7 @@ class InlinerTest extends BytecodeTesting { val warn = """failed to determine if should be inlined: |The method ()V could not be found in the class A$Inner or any of its parents. - |Note that the parent class A$Inner could not be found on the classpath.""".stripMargin + |Note that class A$Inner could not be found on the classpath.""".stripMargin var c = 0 From 2b1e4efa0a8f30bda7fde8492e8ecfdcbf4801cb Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 19 Jul 2016 12:43:27 +0200 Subject: [PATCH 0178/2793] SD-48 limit the lenght of inlined local variable names When inlining local variables, the names are prefixed with the callee method name. In long chains of inlining, these names can grow indefinitely. This commits introduces a limit. --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 3 ++ .../nsc/backend/jvm/opt/BytecodeUtils.scala | 36 +++++++++++++----- .../tools/nsc/backend/jvm/opt/Inliner.scala | 2 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 37 +++++++++++++++++++ 4 files changed, 68 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 7b2686e7a9be..e04e73304f80 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1164,4 +1164,7 @@ object BTypes { // no static way (without symbol table instance) to get to nme.ScalaATTR / ScalaSignatureATTR val ScalaAttributeName = "Scala" val ScalaSigAttributeName = "ScalaSig" + + // when inlining, local variable names of the callee are prefixed with the name of the callee method + val InlinedLocalVariablePrefixMaxLenght = 128 } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index e21c46dbe99e..bfd92cac5cd7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -324,15 +324,33 @@ object BytecodeUtils { * Clone the local variable descriptors of `methodNode` and map their `start` and `end` labels * according to the `labelMap`. */ - def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], prefix: String, shift: Int): List[LocalVariableNode] = { - methodNode.localVariables.iterator().asScala.map(localVariable => new LocalVariableNode( - prefix + localVariable.name, - localVariable.desc, - localVariable.signature, - labelMap(localVariable.start), - labelMap(localVariable.end), - localVariable.index + shift - )).toList + def cloneLocalVariableNodes(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], calleeMethodName: String, shift: Int): List[LocalVariableNode] = { + methodNode.localVariables.iterator().asScala.map(localVariable => { + val name = + if (calleeMethodName.length + localVariable.name.length < BTypes.InlinedLocalVariablePrefixMaxLenght) { + calleeMethodName + "_" + localVariable.name + } else { + val parts = localVariable.name.split("_").toVector + val (methNames, varName) = (calleeMethodName +: parts.init, parts.last) + // keep at least 5 characters per method name + val maxNumMethNames = BTypes.InlinedLocalVariablePrefixMaxLenght / 5 + val usedMethNames = + if (methNames.length < maxNumMethNames) methNames + else { + val half = maxNumMethNames / 2 + methNames.take(half) ++ methNames.takeRight(half) + } + val charsPerMethod = BTypes.InlinedLocalVariablePrefixMaxLenght / usedMethNames.length + usedMethNames.foldLeft("")((res, methName) => res + methName.take(charsPerMethod) + "_") + varName + } + new LocalVariableNode( + name, + localVariable.desc, + localVariable.signature, + labelMap(localVariable.start), + labelMap(localVariable.end), + localVariable.index + shift) + }).toList } /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 9c5a1a9f980b..50dd65c56c3f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -382,7 +382,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { callsiteMethod.instructions.insert(callsiteInstruction, clonedInstructions) callsiteMethod.instructions.remove(callsiteInstruction) - callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name + "_", localVarShift).asJava) + callsiteMethod.localVariables.addAll(cloneLocalVariableNodes(callee, labelsMap, callee.name, localVarShift).asJava) // prepend the handlers of the callee. the order of handlers matters: when an exception is thrown // at some instruction, the first handler guarding that instruction and having a matching exception // type is executed. prepending the callee's handlers makes sure to test those handlers first if diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index f531ce932238..f88b95eae49b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1587,4 +1587,41 @@ class InlinerTest extends BytecodeTesting { val List(c, t) = compile(code) assertNoIndy(getMethod(c, "t1")) } + + @Test + def limitInlinedLocalVariableNames(): Unit = { + val code = + """class C { + | def f(x: Int): Int = x + | @inline final def methodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) = + | f(param) + | @inline final def anotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) = + | methodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param)) + | @inline final def oneMoreMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) = + | anotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param)) + | @inline final def yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) = + | oneMoreMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param)) + | @inline final def oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(param: Int) = + | yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(param)) + | def t(p: Int) = + | oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(p)) + + | oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence(f(p)) + |} + """.stripMargin + + val List(c) = compile(code) + assertEquals(getAsmMethod(c, "t").localVariables.asScala.toList.map(l => (l.name, l.index)).sortBy(_._2),List( + ("this",0), + ("p",1), + ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence_param",2), + ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchS_yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFren_param",3), + ("oneLastMethodWithVeryVeryLongNameAlmostLik_yetAnotherMethodWithVeryVeryLongNameAlmost_oneMoreMethodWithVeryVeryLongNameAlmostLik_param",4), + ("oneLastMethodWithVeryVeryLongNam_yetAnotherMethodWithVeryVeryLong_oneMoreMethodWithVeryVeryLongNam_anotherMethodWithVeryVeryLongNam_param",5), + ("oneLastMethodWithVeryVery_yetAnotherMethodWithVeryV_oneMoreMethodWithVeryVery_anotherMethodWithVeryVery_methodWithVeryVeryLongNam_param",6), + ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchSentence_param",7), + ("oneLastMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFrenchS_yetAnotherMethodWithVeryVeryLongNameAlmostLikeAGermanWordOrAFren_param",8), + ("oneLastMethodWithVeryVeryLongNameAlmostLik_yetAnotherMethodWithVeryVeryLongNameAlmost_oneMoreMethodWithVeryVeryLongNameAlmostLik_param",9), + ("oneLastMethodWithVeryVeryLongNam_yetAnotherMethodWithVeryVeryLong_oneMoreMethodWithVeryVeryLongNam_anotherMethodWithVeryVeryLongNam_param",10), + ("oneLastMethodWithVeryVery_yetAnotherMethodWithVeryV_oneMoreMethodWithVeryVery_anotherMethodWithVeryVery_methodWithVeryVeryLongNam_param",11))) + } } From f510aa55cfd537788720f9bb89409f0373d8c471 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 6 Jul 2016 21:00:43 +0200 Subject: [PATCH 0179/2793] Deprecate scala.remote --- src/library/scala/remote.scala | 1 + test/files/jvm/annotations.check | 3 +++ test/files/jvm/annotations.flags | 1 + 3 files changed, 5 insertions(+) create mode 100644 test/files/jvm/annotations.flags diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala index 4b16651af95f..7265a1519454 100644 --- a/src/library/scala/remote.scala +++ b/src/library/scala/remote.scala @@ -24,4 +24,5 @@ package scala * } * }}} */ +@deprecated("extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods", "2.12.0") class remote extends scala.annotation.StaticAnnotation {} diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check index a8dc5ecdd16f..43f85ca199cb 100644 --- a/test/files/jvm/annotations.check +++ b/test/files/jvm/annotations.check @@ -1,3 +1,6 @@ +annotations.scala:7: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods + def foo: Unit = () + ^ class java.rmi.RemoteException class java.io.IOException @java.lang.Deprecated() diff --git a/test/files/jvm/annotations.flags b/test/files/jvm/annotations.flags new file mode 100644 index 000000000000..c36e713ab84b --- /dev/null +++ b/test/files/jvm/annotations.flags @@ -0,0 +1 @@ +-deprecation \ No newline at end of file From 4c33a55c9596eea109ac583dc6b5896fec64c0c1 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 6 Jul 2016 21:32:30 +0200 Subject: [PATCH 0180/2793] Minor cleanups in GenBCode --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 35 +++++++++---------- 1 file changed, 16 insertions(+), 19 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index df3c2cb3d5ef..d779490ba84e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -883,25 +883,22 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { * * must-single-thread */ - private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, module: Symbol, m: Symbol): Unit = { - def staticForwarderGenericSignature(sym: Symbol, moduleClass: Symbol): String = { - if (sym.isDeferred) null // only add generic signature if method concrete; bug #1745 - else { - // SI-3452 Static forwarder generation uses the same erased signature as the method if forwards to. - // By rights, it should use the signature as-seen-from the module class, and add suitable - // primitive and value-class boxing/unboxing. - // But for now, just like we did in mixin, we just avoid writing a wrong generic signature - // (one that doesn't erase to the actual signature). See run/t3452b for a test case. - val memberTpe = enteringErasure(moduleClass.thisType.memberInfo(sym)) - val erasedMemberType = erasure.erasure(sym)(memberTpe) - if (erasedMemberType =:= sym.info) - getGenericSignature(sym, moduleClass, memberTpe) - else null - } + private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, moduleClass: Symbol, m: Symbol): Unit = { + def staticForwarderGenericSignature: String = { + // SI-3452 Static forwarder generation uses the same erased signature as the method if forwards to. + // By rights, it should use the signature as-seen-from the module class, and add suitable + // primitive and value-class boxing/unboxing. + // But for now, just like we did in mixin, we just avoid writing a wrong generic signature + // (one that doesn't erase to the actual signature). See run/t3452b for a test case. + val memberTpe = enteringErasure(moduleClass.thisType.memberInfo(m)) + val erasedMemberType = erasure.erasure(m)(memberTpe) + if (erasedMemberType =:= m.info) + getGenericSignature(m, moduleClass, memberTpe) + else null } - val moduleName = internalName(module) - val methodInfo = module.thisType.memberInfo(m) + val moduleName = internalName(moduleClass) + val methodInfo = moduleClass.thisType.memberInfo(m) val paramJavaTypes: List[BType] = methodInfo.paramTypes map typeToBType // val paramNames = 0 until paramJavaTypes.length map ("x_" + _) @@ -916,7 +913,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { ) // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } - val jgensig = staticForwarderGenericSignature(m, module) + val jgensig = staticForwarderGenericSignature addRemoteExceptionAnnot(isRemoteClass, hasPublicBitSet(flags), m) val (throws, others) = m.annotations partition (_.symbol == definitions.ThrowsClass) val thrownExceptions: List[String] = getExceptions(throws) @@ -937,7 +934,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { mirrorMethod.visitCode() - mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, classBTypeFromSymbol(module).descriptor) + mirrorMethod.visitFieldInsn(asm.Opcodes.GETSTATIC, moduleName, strMODULE_INSTANCE_FIELD, classBTypeFromSymbol(moduleClass).descriptor) var index = 0 for(jparamType <- paramJavaTypes) { From e619b033350a3378d650db4c3e5b1bfc83b73d81 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 20 Jul 2016 17:36:54 +0200 Subject: [PATCH 0181/2793] Upgrade asm to 5.1 The constructor of scala.tools.asm.Handle now takes an additional boolean parameter to denote whether the owner is an interface. --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 8 +++-- .../nsc/backend/jvm/BTypesFromSymbols.scala | 3 +- .../tools/nsc/backend/jvm/CoreBTypes.scala | 34 +++++++++++++++---- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 16 +-------- .../scala/tools/partest/ASMConverters.scala | 6 ++-- .../scala/reflect/internal/StdNames.scala | 1 + test/files/run/classfile-format-51.scala | 2 +- test/files/run/noInlineUnknownIndy/Test.scala | 7 +++- versions.properties | 2 +- 9 files changed, 47 insertions(+), 32 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 55fe47bde61f..acedf8301665 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1334,11 +1334,13 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC) def asmType(sym: Symbol) = classBTypeFromSymbol(sym).toASMType + val isInterface = lambdaTarget.owner.isTrait val implMethodHandle = - new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else if (lambdaTarget.owner.isTrait) asm.Opcodes.H_INVOKEINTERFACE else asm.Opcodes.H_INVOKEVIRTUAL, + new asm.Handle(if (lambdaTarget.hasFlag(Flags.STATIC)) asm.Opcodes.H_INVOKESTATIC else if (isInterface) asm.Opcodes.H_INVOKEINTERFACE else asm.Opcodes.H_INVOKEVIRTUAL, classBTypeFromSymbol(lambdaTarget.owner).internalName, lambdaTarget.name.toString, - methodBTypeFromSymbol(lambdaTarget).descriptor) + methodBTypeFromSymbol(lambdaTarget).descriptor, + /* itf = */ isInterface) val receiver = if (isStaticMethod) Nil else lambdaTarget.owner :: Nil val (capturedParams, lambdaParams) = lambdaTarget.paramss.head.splitAt(lambdaTarget.paramss.head.length - arity) // Requires https://github.com/scala/scala-java8-compat on the runtime classpath @@ -1351,7 +1353,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val flags = java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE | java.lang.invoke.LambdaMetafactory.FLAG_MARKERS val ScalaSerializable = classBTypeFromSymbol(definitions.SerializableClass).toASMType - bc.jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryBootstrapHandle, + bc.jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryAltMetafactoryHandle, /* samMethodType = */ samMethodType, /* implMethod = */ implMethodHandle, /* instantiatedMethodType = */ constrainedType, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 1a4590e7d175..383347a0d321 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -157,7 +157,8 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def staticHandleFromSymbol(sym: Symbol): asm.Handle = { val owner = if (sym.owner.isModuleClass) sym.owner.linkedClassOfClass else sym.owner val descriptor = methodBTypeFromMethodType(sym.info, isConstructor = false).descriptor - new asm.Handle(asm.Opcodes.H_INVOKESTATIC, classBTypeFromSymbol(owner).internalName, sym.name.encoded, descriptor) + val ownerBType = classBTypeFromSymbol(owner) + new asm.Handle(asm.Opcodes.H_INVOKESTATIC, ownerBType.internalName, sym.name.encoded, descriptor, /* itf = */ ownerBType.isInterface.get) } /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index d65380aa1fc7..c2010d282828 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -248,7 +248,22 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { }) } - lazy val lambdaMetaFactoryBootstrapHandle = + lazy val lambdaMetaFactoryMetafactoryHandle = + new asm.Handle(asm.Opcodes.H_INVOKESTATIC, + coreBTypes.jliLambdaMetafactoryRef.internalName, sn.Metafactory.toString, + MethodBType( + List( + coreBTypes.jliMethodHandlesLookupRef, + coreBTypes.StringRef, + coreBTypes.jliMethodTypeRef, + coreBTypes.jliMethodTypeRef, + coreBTypes.jliMethodHandleRef, + coreBTypes.jliMethodTypeRef), + coreBTypes.jliCallSiteRef + ).descriptor, + /* itf = */ coreBTypes.jliLambdaMetafactoryRef.isInterface.get) + + lazy val lambdaMetaFactoryAltMetafactoryHandle = new asm.Handle(asm.Opcodes.H_INVOKESTATIC, coreBTypes.jliLambdaMetafactoryRef.internalName, sn.AltMetafactory.toString, MethodBType( @@ -258,7 +273,8 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { coreBTypes.jliMethodTypeRef, ArrayBType(ObjectRef)), coreBTypes.jliCallSiteRef - ).descriptor) + ).descriptor, + /* itf = */ coreBTypes.jliLambdaMetafactoryRef.isInterface.get) lazy val lambdaDeserializeBootstrapHandle = new scala.tools.asm.Handle(scala.tools.asm.Opcodes.H_INVOKESTATIC, @@ -270,7 +286,8 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { coreBTypes.jliMethodTypeRef ), coreBTypes.jliCallSiteRef - ).descriptor) + ).descriptor, + /* itf = */ coreBTypes.srLambdaDeserialize.isInterface.get) } /** @@ -299,6 +316,7 @@ trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { def juHashMapRef : ClassBType def juMapRef : ClassBType def jliCallSiteRef : ClassBType + def jliLambdaMetafactoryRef : ClassBType def jliMethodTypeRef : ClassBType def jliSerializedLambdaRef : ClassBType def jliMethodHandleRef : ClassBType @@ -322,8 +340,9 @@ trait CoreBTypesProxyGlobalIndependent[BTS <: BTypes] { def srRefConstructors : Map[InternalName, MethodNameAndType] def tupleClassConstructors : Map[InternalName, MethodNameAndType] - def lambdaMetaFactoryBootstrapHandle : asm.Handle - def lambdaDeserializeBootstrapHandle : asm.Handle + def lambdaMetaFactoryMetafactoryHandle : asm.Handle + def lambdaMetaFactoryAltMetafactoryHandle : asm.Handle + def lambdaDeserializeBootstrapHandle : asm.Handle } /** @@ -405,6 +424,7 @@ final class CoreBTypesProxy[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: def String_valueOf: Symbol = _coreBTypes.String_valueOf - def lambdaMetaFactoryBootstrapHandle = _coreBTypes.lambdaMetaFactoryBootstrapHandle - def lambdaDeserializeBootstrapHandle = _coreBTypes.lambdaDeserializeBootstrapHandle + def lambdaMetaFactoryMetafactoryHandle : asm.Handle = _coreBTypes.lambdaMetaFactoryMetafactoryHandle + def lambdaMetaFactoryAltMetafactoryHandle : asm.Handle = _coreBTypes.lambdaMetaFactoryAltMetafactoryHandle + def lambdaDeserializeBootstrapHandle : asm.Handle = _coreBTypes.lambdaDeserializeBootstrapHandle } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index 5248183337fa..b088b5ee481d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -413,22 +413,8 @@ class CallGraph[BT <: BTypes](val btypes: BT) { final case class LambdaMetaFactoryCall(indy: InvokeDynamicInsnNode, samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type) object LambdaMetaFactoryCall { - private val lambdaMetaFactoryInternalName: InternalName = "java/lang/invoke/LambdaMetafactory" - - private val metafactoryHandle = { - val metafactoryMethodName: String = "metafactory" - val metafactoryDesc: String = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite;" - new Handle(Opcodes.H_INVOKESTATIC, lambdaMetaFactoryInternalName, metafactoryMethodName, metafactoryDesc) - } - - private val altMetafactoryHandle = { - val altMetafactoryMethodName: String = "altMetafactory" - val altMetafactoryDesc: String = "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;" - new Handle(Opcodes.H_INVOKESTATIC, lambdaMetaFactoryInternalName, altMetafactoryMethodName, altMetafactoryDesc) - } - def unapply(insn: AbstractInsnNode): Option[(InvokeDynamicInsnNode, Type, Handle, Type)] = insn match { - case indy: InvokeDynamicInsnNode if indy.bsm == metafactoryHandle || indy.bsm == altMetafactoryHandle => + case indy: InvokeDynamicInsnNode if indy.bsm == coreBTypes.lambdaMetaFactoryMetafactoryHandle || indy.bsm == coreBTypes.lambdaMetaFactoryAltMetafactoryHandle => indy.bsmArgs match { case Array(samMethodType: Type, implMethod: Handle, instantiatedMethodType: Type, _@_*) => // LambdaMetaFactory performs a number of automatic adaptations when invoking the lambda diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala index a3d849a9c12b..445d3c89c2b3 100644 --- a/src/partest-extras/scala/tools/partest/ASMConverters.scala +++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala @@ -94,7 +94,7 @@ object ASMConverters { case class FrameEntry (`type`: Int, local: List[Any], stack: List[Any]) extends Instruction { def opcode: Int = -1 } case class LineNumber (line: Int, start: Label) extends Instruction { def opcode: Int = -1 } - case class MethodHandle(tag: Int, owner: String, name: String, desc: String) + case class MethodHandle(tag: Int, owner: String, name: String, desc: String, itf: Boolean) case class ExceptionHandler(start: Label, end: Label, handler: Label, desc: Option[String]) case class LocalVariable(name: String, desc: String, signature: Option[String], start: Label, end: Label, index: Int) @@ -147,7 +147,7 @@ object ASMConverters { case _ => a // can be: Class, method Type, primitive constant })(collection.breakOut) - private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc) + private def convertMethodHandle(h: asm.Handle): MethodHandle = MethodHandle(h.getTag, h.getOwner, h.getName, h.getDesc, h.isInterface) private def convertHandlers(method: t.MethodNode): List[ExceptionHandler] = { method.tryCatchBlocks.asScala.map(h => ExceptionHandler(applyLabel(h.start), applyLabel(h.end), applyLabel(h.handler), Option(h.`type`)))(collection.breakOut) @@ -227,7 +227,7 @@ object ASMConverters { case x => x.asInstanceOf[Object] } - def unconvertMethodHandle(h: MethodHandle): asm.Handle = new asm.Handle(h.tag, h.owner, h.name, h.desc) + def unconvertMethodHandle(h: MethodHandle): asm.Handle = new asm.Handle(h.tag, h.owner, h.name, h.desc, h.itf) def unconvertBsmArgs(a: List[Object]): Array[Object] = a.map({ case h: MethodHandle => unconvertMethodHandle(h) case o => o diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 4f5a545c9559..11b5db979371 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -1171,6 +1171,7 @@ trait StdNames { final val Invoke: TermName = newTermName("invoke") final val InvokeExact: TermName = newTermName("invokeExact") + final val Metafactory: TermName = newTermName("metafactory") final val AltMetafactory: TermName = newTermName("altMetafactory") final val Bootstrap: TermName = newTermName("bootstrap") diff --git a/test/files/run/classfile-format-51.scala b/test/files/run/classfile-format-51.scala index 3a6c4861f193..40eebee19858 100644 --- a/test/files/run/classfile-format-51.scala +++ b/test/files/run/classfile-format-51.scala @@ -80,7 +80,7 @@ object Test extends DirectTest { val test = cw.visitMethod(ACC_PUBLIC + ACC_FINAL, "test", s"()Ljava/lang/String;", null, null) test.visitCode() - val bootstrapHandle = new Handle(H_INVOKESTATIC, invokerClassName, bootstrapMethodName, bootStrapMethodType) + val bootstrapHandle = new Handle(H_INVOKESTATIC, invokerClassName, bootstrapMethodName, bootStrapMethodType, /* itf = */ false) test.visitInvokeDynamicInsn("invoke", targetMethodType, bootstrapHandle) test.visitInsn(ARETURN) test.visitMaxs(1, 1) diff --git a/test/files/run/noInlineUnknownIndy/Test.scala b/test/files/run/noInlineUnknownIndy/Test.scala index c6d227b6f2c2..a666146f1588 100644 --- a/test/files/run/noInlineUnknownIndy/Test.scala +++ b/test/files/run/noInlineUnknownIndy/Test.scala @@ -15,7 +15,12 @@ object Test extends DirectTest { } def show(): Unit = { - val unknownBootstrapMethod = new Handle(Opcodes.H_INVOKESTATIC, "not/java/lang/SomeLambdaMetafactory", "notAMetaFactoryMethod", "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;") + val unknownBootstrapMethod = new Handle( + Opcodes.H_INVOKESTATIC, + "not/java/lang/SomeLambdaMetafactory", + "notAMetaFactoryMethod", + "(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;[Ljava/lang/Object;)Ljava/lang/invoke/CallSite;", + /* itf = */ false) modifyClassFile(new File(testOutput.toFile, "A_1.class"))((cn: ClassNode) => { val testMethod = cn.methods.iterator.asScala.find(_.name == "test").head val indy = testMethod.instructions.iterator.asScala.collect({ case i: InvokeDynamicInsnNode => i }).next() diff --git a/versions.properties b/versions.properties index 4d24e0d598af..ed90768780ad 100644 --- a/versions.properties +++ b/versions.properties @@ -27,7 +27,7 @@ scala-parser-combinators.version.number=1.0.4 scala-swing.version.number=2.0.0-M2 scala-swing.version.osgi=2.0.0.M2 jline.version=2.14.1 -scala-asm.version=5.0.4-scala-3 +scala-asm.version=5.1.0-scala-1 # external modules, used internally (not shipped) partest.version.number=1.0.17 From faa4110fc7d8b92cac1e50a6952cdf3bc85d1cdf Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 19 Jul 2016 15:14:34 +0200 Subject: [PATCH 0182/2793] Switch Windows CI build to sbt (w/ some sbt build improvements) - Use sbt in `integrate/windows`: This essentially combines the Unix CI jobs `validate/publish-core` and `validate/test`, first publishing a local release built with STARR and then building a new version with that and running all tests on it. - Unify repository handling across build scripts: A new function `generateRepositoriesConfig` in `common`, based on the existing code in `integrate/bootstrap`, writes the `repositories` file for sbt, either with or without an extra bootstrap repository for resolving a previously built version. It is used in all CI scripts to ensure that artifacts are only resolved through the sanctioned proxies and upstream repositories. - The repository URL arguments in `setupPublishCore` and `setupValidateTest` are now optional as well. These commands are used without a URL from `integrate/windows`, which publishes to `local` instead of a temporary remote repository. - `testAll` is now a task instead of a command. It runs the same sequence of sub-tasks as before but does not propagate failures immediately. It always runs all subtasks and reports errors at the end. - The `generateBuildCharacterPropertiesFile` task now includes all properties from `versions.properties` (whose values have potentially been overwritten with `-D` options) in `buildcharacter.properties`. --- build.sbt | 27 ++++++++++++++ project/ScriptCommands.scala | 58 ++++++++++++++---------------- project/VersionUtil.scala | 2 +- scripts/common | 20 +++++++++++ scripts/jobs/integrate/bootstrap | 24 +++---------- scripts/jobs/integrate/windows | 22 +++++++----- scripts/jobs/validate/publish-core | 7 ++-- scripts/jobs/validate/test | 7 ++-- 8 files changed, 102 insertions(+), 65 deletions(-) diff --git a/build.sbt b/build.sbt index 1d4e208da2e3..c789849af6ad 100644 --- a/build.sbt +++ b/build.sbt @@ -769,6 +769,32 @@ lazy val root: Project = (project in file(".")) GenerateAnyVals.run(dir.getAbsoluteFile) state }, + testAll := { + val results = ScriptCommands.sequence[Result[Unit]](List( + (Keys.test in Test in junit).result, + (testOnly in IntegrationTest in testP).toTask(" -- run pos neg jvm").result, + (testOnly in IntegrationTest in testP).toTask(" -- res scalap specialized scalacheck").result, + (testOnly in IntegrationTest in testP).toTask(" -- instrumented presentation").result, + (testOnly in IntegrationTest in testP).toTask(" -- --srcpath scaladoc").result, + (Keys.test in Test in osgiTestFelix).result, + (Keys.test in Test in osgiTestEclipse).result, + (MiMa.mima in library).result, + (MiMa.mima in reflect).result, + Def.task(()).dependsOn( // Run these in parallel: + doc in Compile in library, + doc in Compile in reflect, + doc in Compile in compiler, + doc in Compile in scalap + ).result + )).value + val failed = results.map(_.toEither).collect { case Left(i) => i } + if(failed.nonEmpty) { + val log = streams.value.log + log.error(s"${failed.size} of ${results.length} test tasks failed:") + failed.foreach(i => log.error(s" - $i")) + throw new RuntimeException + } + }, antStyle := false, incOptions := incOptions.value.withNameHashing(!antStyle.value).withAntStyle(antStyle.value) ) @@ -838,6 +864,7 @@ lazy val buildDirectory = settingKey[File]("The directory where all build produc lazy val mkBin = taskKey[Seq[File]]("Generate shell script (bash or Windows batch).") lazy val mkQuick = taskKey[File]("Generate a full build, including scripts, in build/quick") lazy val mkPack = taskKey[File]("Generate a full build, including scripts, in build/pack") +lazy val testAll = taskKey[Unit]("Run all test tasks sequentially") // Defining these settings is somewhat redundant as we also redefine settings that depend on them. // However, IntelliJ's project import works better when these are set correctly. diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index accbadbbf66c..8d5d09943a56 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -7,24 +7,29 @@ object ScriptCommands { def all = Seq( setupPublishCore, setupValidateTest, - setupBootstrapStarr, setupBootstrapLocker, setupBootstrapQuick, setupBootstrapPublish, - testAll + setupBootstrapStarr, setupBootstrapLocker, setupBootstrapQuick, setupBootstrapPublish ) - /** Set up the environment for `validate/publish-core`. The argument is the Artifactory snapshot repository URL. */ - def setupPublishCore = setup("setupPublishCore") { case Seq(url) => + /** Set up the environment for `validate/publish-core`. + * The optional argument is the Artifactory snapshot repository URL. */ + def setupPublishCore = setup("setupPublishCore") { args => Seq( baseVersionSuffix in Global := "SHA-SNAPSHOT" - ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer + ) ++ (args match { + case Seq(url) => publishTarget(url) + case Nil => Nil + }) ++ noDocs ++ enableOptimizer } - /** Set up the environment for `validate/test`. The argument is the Artifactory snapshot repository URL. */ - def setupValidateTest = setup("setupValidateTest") { case Seq(url) => - //TODO When ant is gone, pass starr version as an argument to this command instead of using version.properties + /** Set up the environment for `validate/test`. + * The optional argument is the Artifactory snapshot repository URL. */ + def setupValidateTest = setup("setupValidateTest") { args => Seq( - resolvers in Global += "scala-pr" at url, testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) - ) ++ enableOptimizer + ) ++ (args match { + case Seq(url) => Seq(resolvers in Global += "scala-pr" at url) + case Nil => Nil + }) ++ enableOptimizer } /** Set up the environment for building STARR in `validate/bootstrap`. The arguments are: @@ -63,9 +68,7 @@ object ScriptCommands { /** Set up the environment for publishing in `validate/bootstrap`. The arguments are: * - Temporary bootstrap repository URL for resolving modules * - Version number to publish - * All artifacts are published to Sonatype. GPG signing has to be configured from the - * shell script after `setupBootstrapPublish` because we don't pull the GPG plugin in - * by default, so we cannot reference its keys statically. */ + * All artifacts are published to Sonatype. */ def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(url, ver) => // Define a copy of the setting key here in case the plugin is not part of the build val pgpPassphrase = SettingKey[Option[Array[Char]]]("pgp-passphrase", "The passphrase associated with the secret used to sign artifacts.", KeyRanks.BSetting) @@ -95,25 +98,18 @@ object ScriptCommands { publishArtifact in (Compile, packageDoc) in ThisBuild := false ) - private[this] def publishTarget(url: String) = Seq( + private[this] def publishTarget(url: String) = { // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088): - publishTo in Global := Some("scala-pr-publish" at url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis) - ) + val url2 = if(url.startsWith("file:")) url else url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis + Seq(publishTo in Global := Some("scala-pr-publish" at url2)) + } - def testAll = Command.command("testAll") { state => - val cmds = Seq( - "test", - "partest run pos neg jvm", - "partest res scalap specialized scalacheck", - "partest instrumented presentation", - "partest --srcpath scaladoc", - "osgiTestFelix/test", - "osgiTestEclipse/test", - "library/mima", - "reflect/mima", - "doc" - ) - state.log.info(cmds.mkString("Running all tests: \"", "\", \"", "\"")) - cmds ::: state + /** Like `Def.sequential` but accumulate all results */ + def sequence[B](tasks: List[Def.Initialize[Task[B]]]): Def.Initialize[Task[List[B]]] = tasks match { + case Nil => Def.task { Nil } + case x :: xs => Def.taskDyn { + val v = x.value + sequence(xs).apply((t: Task[List[B]]) => t.map(l => v :: l)) + } } } diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 148fdfbc2343..6fe2b004f783 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -94,7 +94,7 @@ object VersionUtil { } private lazy val generateBuildCharacterPropertiesFileImpl: Def.Initialize[Task[File]] = Def.task { - writeProps(versionProperties.value.toMap, (baseDirectory in ThisBuild).value / "buildcharacter.properties") + writeProps(versionProperties.value.toMap ++ versionProps, (baseDirectory in ThisBuild).value / "buildcharacter.properties") } private def writeProps(m: Map[String, String], propFile: File): File = { diff --git a/scripts/common b/scripts/common index bfddf3d149f3..95389e5495c1 100644 --- a/scripts/common +++ b/scripts/common @@ -154,3 +154,23 @@ EOF fi popd } + +# Generate a repositories file with all allowed repositories in our build environment. +# Takes one optional argument, the private repository URL. +# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html +function generateRepositoriesConfig() { + jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} + sbtRepositoryConfig="$scriptsDir/sbt-repositories-config" + echo > "$sbtRepositoryConfig" '[repositories]' + if [ -n "$1" ] + then + echo >> "$sbtRepositoryConfig" " private-repo: $1" + fi + cat >> "$sbtRepositoryConfig" << EOF + jcenter-cache: $jcenterCacheUrl + typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + sbt-plugin-releases: https://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + maven-central + local +EOF +} diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index bdc50323a8c6..4d5dae89a201 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -85,10 +85,8 @@ moduleVersioning=${moduleVersioning-"versions.properties"} publishPrivateTask=${publishPrivateTask-"publish"} -publishSonatypeTaskCore=${publishSonatypeTaskCore-"publish-signed"} -publishSonatypeTaskModules=${publishSonatypeTaskModules-"publish-signed"} -publishStarrPrivateTask=${publishStarrPrivateTask-$publishPrivateTask} # set to "init" to speed up testing of the script (if you already built STARR before) -publishLockerPrivateTask=${publishLockerPrivateTask-$publishPrivateTask} # set to "init" to speed up testing of the script (if you already built locker before) +publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} +publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} forceRebuild=${forceRebuild-no} @@ -111,21 +109,7 @@ mkdir -p $baseDir/resolutionScratch_ # repo used to publish "locker" scala to (to start the bootstrap) releaseTempRepoCred="private-repo" releaseTempRepoUrl=${releaseTempRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-release-temp/"} -jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} - -# Used below in sbtArgs since we use a dedicated repository to share artifcacts between jobs, -# so we need to configure SBT to use these rather than its default, Maven Central. -# See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html -sbtRepositoryConfig="$scriptsDir/repositories-scala-release" -cat > "$sbtRepositoryConfig" << EOF -[repositories] - private-repo: $releaseTempRepoUrl - jcenter-cache: $jcenterCacheUrl - typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - sbt-plugin-releases: https://dl.bintray.com/sbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] - maven-central - local -EOF +generateRepositoriesConfig $releaseTempRepoUrl ##### git gfxd() { @@ -575,7 +559,7 @@ publishSonatype() { -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ "setupBootstrapPublish $releaseTempRepoUrl $SCALA_VER" \ - publishSigned | grep -v "was too long to be displayed in the webview, and will be left out" + $publishSonatypeTaskCore | grep -v "was too long to be displayed in the webview, and will be left out" echo "### Publishing modules to sonatype" # build/test/publish scala core modules to sonatype (this will start a new staging repo) diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows index ba48c5bc2541..5e04b0b38065 100755 --- a/scripts/jobs/integrate/windows +++ b/scripts/jobs/integrate/windows @@ -1,15 +1,19 @@ #!/bin/bash -./pull-binary-libs.sh - -export ANT_OPTS="-Dfile.encoding=UTF-8 -server -XX:+AggressiveOpts -XX:+UseParNewGC -Xmx2G -Xss1M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=128M" - -# TODO: don't hardcode these paths -- should be in scala/scala-jenkins-infra, passed in through env vars from jenkins -export PATH='/cygdrive/c/Program Files/Java/jdk1.8.0_92/bin:/cygdrive/c/apache-ant-1.9.6/bin:/cygdrive/c/Program Files (x86)/Git-2.5.3/Cmd:/bin:/usr/bin:' -export JAVA_HOME='C:/Program Files/Java/jdk1.8.0_92' +baseDir=${WORKSPACE-`pwd`} +scriptsDir="$baseDir/scripts" +. $scriptsDir/common java -version javac -version -ant -version -ant test-opt +generateRepositoriesConfig + +SBT="java $JAVA_OPTS -Dsbt.ivy.home=$WORKSPACE/.ivy2 -jar $sbtLauncher -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" + +# Build locker with STARR +$SBT --warn "setupPublishCore" generateBuildCharacterPropertiesFile publishLocal + +# Build quick and run the tests +parseScalaProperties buildcharacter.properties +$SBT -Dstarr.version=$maven_version_number --warn "setupValidateTest" testAll | grep -v "was too long to be displayed in the webview, and will be left out" diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core index b0bfd480836a..c71fbd12b796 100755 --- a/scripts/jobs/validate/publish-core +++ b/scripts/jobs/validate/publish-core @@ -9,6 +9,9 @@ baseDir=${WORKSPACE-`pwd`} scriptsDir="$baseDir/scripts" . $scriptsDir/common +generateRepositoriesConfig $prRepoUrl +SBT="$SBT_CMD -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" + case $prDryRun in yep) echo "DRY RUN" @@ -16,7 +19,7 @@ case $prDryRun in ;; *) echo ">>> Getting Scala version number." - $SBT_CMD --warn "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile + $SBT --warn "setupPublishCore $prRepoUrl" generateBuildCharacterPropertiesFile parseScalaProperties buildcharacter.properties # produce maven_version_number echo ">>> Checking availability of Scala ${maven_version_number} in $prRepoUrl." @@ -27,7 +30,7 @@ case $prDryRun in if $libraryAvailable && $reflectAvailable && $compilerAvailable; then echo "Scala core already built!" else - $SBT_CMD --warn "setupPublishCore $prRepoUrl" publish + $SBT --warn "setupPublishCore $prRepoUrl" publish fi mv buildcharacter.properties jenkins.properties # parsed by the jenkins job diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index dd36c5db18ce..9938319dd843 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -4,6 +4,9 @@ baseDir=${WORKSPACE-`pwd`} scriptsDir="$baseDir/scripts" . $scriptsDir/common +generateRepositoriesConfig $prRepoUrl +SBT="$SBT_CMD -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" + case $prDryRun in yep) @@ -15,12 +18,12 @@ case $prDryRun in # build quick using STARR built upstream, as specified by scalaVersion # (in that sense it's locker, since it was built with starr by that upstream job); # and run JUnit tests, partest, OSGi tests, MiMa and scaladoc - $SBT_CMD \ + $SBT \ -Dstarr.version=$scalaVersion \ --warn \ "setupValidateTest $prRepoUrl" \ $testExtraArgs \ - testAll + testAll | grep -v "was too long to be displayed in the webview, and will be left out" ;; From 06f8b6244ae8e80152f25a81cc2b92afd14c62f4 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 21 Jul 2016 06:22:48 -0700 Subject: [PATCH 0183/2793] SI-9750 Spec check major.minor.security Don't assume spec is just major, but allow arbitrary version number for both spec value and user value to check. Only the first three dot-separated fields are considered, after skipping optional leading value "1" in legacy format. Minimal validity checks of user arg are applied. Leading three fields, if present, must be number values, but subsequent fields are ignored. Note that a version number is not a version string, which optionally includes pre and build info, `9-ea+109`. --- src/library/scala/util/Properties.scala | 68 ++++++++++++++------- test/junit/scala/util/SpecVersionTest.scala | 52 +++++++++++++--- 2 files changed, 91 insertions(+), 29 deletions(-) diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index 1bdf50bac201..8722294ddef8 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -168,33 +168,59 @@ private[scala] trait PropertiesTrait { /** Compares the given specification version to the specification version of the platform. * - * @param version a specification version number (legacy forms acceptable) - * @return `true` iff the specification version of the current runtime - * is equal to or higher than the version denoted by the given string. - * @throws NumberFormatException if the given string is not a version string + * @param version a specification version number (legacy forms acceptable) + * @return `true` if the specification version of the current runtime + * is equal to or higher than the version denoted by the given string. + * @throws NumberFormatException if the given string is not a version string * - * @example {{{ - * // In this example, the runtime's Java specification is assumed to be at version 8. - * isJavaAtLeast("1.6") // true - * isJavaAtLeast("1.8") // true - * isJavaAtLeast("8") // true - * isJavaAtLeast("9") // false - * isJavaAtLeast("1.9") // throws - * }}} + * @example {{{ + * // In this example, the runtime's Java specification is assumed to be at version 8. + * isJavaAtLeast("1.8") // true + * isJavaAtLeast("8") // true + * isJavaAtLeast("9") // false + * isJavaAtLeast("9.1") // false + * isJavaAtLeast("1.9") // throws + * }}} */ def isJavaAtLeast(version: String): Boolean = { - def versionOf(s: String): Int = s.indexOf('.') match { - case 1 if s.charAt(0) == '1' => - val v = s.substring(2).toInt - if (v < 9) v else -1 - case -1 => s.toInt - case _ => -1 + def versionOf(s: String, depth: Int): (Int, String) = + s.indexOf('.') match { + case 0 => + (-2, s.substring(1)) + case 1 if depth == 0 && s.charAt(0) == '1' => + val r0 = s.substring(2) + val (v, r) = versionOf(r0, 1) + val n = if (v > 8 || r0.isEmpty) -2 else v // accept 1.8, not 1.9 or 1. + (n, r) + case -1 => + val n = if (!s.isEmpty) s.toInt else if (depth == 0) -2 else 0 + (n, "") + case i => + val r = s.substring(i + 1) + val n = if (depth < 2 && r.isEmpty) -2 else s.substring(0, i).toInt + (n, r) + } + def compareVersions(s: String, v: String, depth: Int): Int = { + if (depth >= 3) 0 + else { + val (sn, srest) = versionOf(s, depth) + val (vn, vrest) = versionOf(v, depth) + if (vn < 0) -2 + else if (sn < vn) -1 + else if (sn > vn) 1 + else compareVersions(srest, vrest, depth + 1) + } + } + compareVersions(javaSpecVersion, version, 0) match { + case -2 => throw new NumberFormatException(s"Not a version: $version") + case i => i >= 0 } - val v = versionOf(version) - if (v < 0) throw new NumberFormatException(s"Not a version: $version") - versionOf(javaSpecVersion) >= v } + /** Tests whether the major version of the platform specification is at least the given value. + * + * @param version a major version number + */ def isJavaAtLeast(version: Int): Boolean = isJavaAtLeast(version.toString) // provide a main method so version info can be obtained by running this diff --git a/test/junit/scala/util/SpecVersionTest.scala b/test/junit/scala/util/SpecVersionTest.scala index 9232c4721b40..4639389dd960 100644 --- a/test/junit/scala/util/SpecVersionTest.scala +++ b/test/junit/scala/util/SpecVersionTest.scala @@ -34,6 +34,8 @@ class SpecVersionTest { assert(sut9 isJavaAtLeast "8") assert(sut9 isJavaAtLeast "9") assert(sut9.isJavaAtLeast(9)) + assertFalse(sut9.isJavaAtLeast(10)) + assertFalse(sut9.isJavaAtLeast("10")) } // SI-7265 @@ -53,15 +55,49 @@ class SpecVersionTest { } @Test def variousBadVersionStrings(): Unit = { - val sut7 = new TestProperties("1.7") - val sut9 = new TestProperties("9") - assertThrows[NumberFormatException](sut7.isJavaAtLeast("1.9"), _ == "Not a version: 1.9") - assertThrows[NumberFormatException] { sut9 isJavaAtLeast "1.9" } - assertThrows[NumberFormatException] { sut7 isJavaAtLeast "9.1" } - assertThrows[NumberFormatException] { sut9 isJavaAtLeast "9.1" } + val sut = new TestProperties("9") + assertThrows[NumberFormatException](sut.isJavaAtLeast("1.9"), _ == "Not a version: 1.9") + assertThrows[NumberFormatException](sut.isJavaAtLeast("1.")) + assertThrows[NumberFormatException](sut.isJavaAtLeast("1.8.")) + assertThrows[NumberFormatException](sut.isJavaAtLeast("1.a")) + assertThrows[NumberFormatException](sut.isJavaAtLeast("")) + assertThrows[NumberFormatException](sut.isJavaAtLeast(".")) + assertThrows[NumberFormatException](sut.isJavaAtLeast("..")) + assertThrows[NumberFormatException](sut.isJavaAtLeast(".5")) + assertThrows[NumberFormatException](sut.isJavaAtLeast("9-ea")) //version number, not version string + } - val badvs = List("1.1.8", "1.", "1.a", "", ".", ".5", "1.7.1") + @Test def `spec has minor or more`(): Unit = { + val sut = new TestProperties("9.2.5") + assert(sut.isJavaAtLeast(9)) + assert(sut.isJavaAtLeast("9")) + assert(sut.isJavaAtLeast("9.0.1")) + assert(sut.isJavaAtLeast("9.2.1")) + assert(sut.isJavaAtLeast("8.3.1")) + assert(sut.isJavaAtLeast("8.3.1.1.1")) + assertFalse(sut.isJavaAtLeast("9.3.1")) + assertFalse(sut.isJavaAtLeast("10.3.1")) + } + + @Test def `compares only major minor security`(): Unit = { + val sut = new TestProperties("9.2.5.1.2.3") + assert(sut.isJavaAtLeast(9)) + assert(sut.isJavaAtLeast("9")) + assert(sut.isJavaAtLeast("9.0.1")) + assert(sut.isJavaAtLeast("9.2.5.9.9.9")) + assertFalse(sut.isJavaAtLeast("9.2.6")) + } - for (v <- badvs) assertThrows[NumberFormatException](sut7.isJavaAtLeast(v)) + @Test def `futurely proofed`(): Unit = { + val sut = new TestProperties("10.2.5") + assert(sut.isJavaAtLeast(9)) + assert(sut.isJavaAtLeast(10)) + assert(sut.isJavaAtLeast("9")) + assert(sut.isJavaAtLeast("9.0.1")) + assert(sut.isJavaAtLeast("9.2.1")) + assert(sut.isJavaAtLeast("8.3.1")) + assert(sut.isJavaAtLeast("8.3.1.1.1")) + assert(sut.isJavaAtLeast("9.3.1")) + assertFalse(sut.isJavaAtLeast("10.3.1")) } } From 9a2486087a9739108265e7830ebaa96373605d02 Mon Sep 17 00:00:00 2001 From: Carsten Varming Date: Wed, 20 Jul 2016 23:57:35 -0400 Subject: [PATCH 0184/2793] SI-8774 Null link fields in mutable hash maps on removal. --- src/library/scala/collection/mutable/HashTable.scala | 5 ++++- src/library/scala/collection/mutable/LinkedHashMap.scala | 2 ++ src/library/scala/collection/mutable/LinkedHashSet.scala | 2 ++ 3 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index b48a32fa3749..bb15788bdf45 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -183,6 +183,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU table(h) = e.next tableSize = tableSize - 1 nnSizeMapRemove(h) + e.next = null return e } else { var e1 = e.next @@ -194,6 +195,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU e.next = e1.next tableSize = tableSize - 1 nnSizeMapRemove(h) + e1.next = null return e1 } } @@ -227,8 +229,9 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU var es = iterTable(idx) while (es != null) { + val next = es.next // Cache next in case f removes es. f(es.asInstanceOf[Entry]) - es = es.next + es = next while (es == null && idx > 0) { idx -= 1 diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index 275f49067537..a731b1bbdc07 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -81,6 +81,8 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] else e.earlier.later = e.later if (e.later eq null) lastEntry = e.earlier else e.later.earlier = e.earlier + e.earlier = null // Null references to prevent nepotism + e.later = null Some(e.value) } } diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index 756a2f73c16c..f00cbd90dc7a 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -73,6 +73,8 @@ class LinkedHashSet[A] extends AbstractSet[A] else e.earlier.later = e.later if (e.later eq null) lastEntry = e.earlier else e.later.earlier = e.earlier + e.earlier = null // Null references to prevent nepotism + e.later = null true } } From 04649c74c47af1b6e21648e4dfe7db0417f3a9ba Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 22 Jul 2016 09:50:04 +1000 Subject: [PATCH 0185/2793] SD-186 Fix positions in trait method bytecode Concrete, non private methods in traits are translated into a static method with an explicit `$this` parameter. After this translation, the references to `$this` (subistuted for `this` in user written code) where being positioned at the position of the method, which makes debugging unpleasant. This commit leaves the `Ident($this)` trees unpositioned. This is analagous to what we do in the body of extension methods, which is the other user of `ThisSubstitutor`. It would be more correct to copy the position of each `This` tree over to the substituted tree. That would let us set a breakpoint on a line that _only_ contained `this`. But in 99% of cases users won't be able to spot the difference, so I've opted for the tried and tested approach here. --- .../scala/tools/nsc/ast/TreeGen.scala | 2 +- .../tools/nsc/backend/jvm/BytecodeTest.scala | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index bc89609a596c..2a4b1b6738cc 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -350,7 +350,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { case mt @ MethodType(params, res) => copyMethodType(mt, selfParamSym :: params, res) }) val selfParam = ValDef(selfParamSym) - val rhs = orig.rhs.substituteThis(newSym.owner, atPos(newSym.pos)(gen.mkAttributedIdent(selfParamSym))) + val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // SD-186 intentionally leaving Ident($this) is unpositioned .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym -> newSym) treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index b2ee8b3a4570..5904cb2441dc 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -168,4 +168,23 @@ class BytecodeTest extends BytecodeTesting { assertEquals(x.start, labels(1)) assertEquals(x.end, labels(7)) } + + @Test // wrong line numbers for rewritten `this` references in trait static methods + def sd186_traitLineNumber(): Unit = { + val code = + """trait T { + | def t(): Unit = { + | toString + | toString + | } + |} + """.stripMargin + val t = compileClass(code) + val tMethod = getMethod(t, "t$") + val invoke = Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false) + assertSameCode(tMethod.instructions, + List(Label(0), LineNumber(3, Label(0)), VarOp(ALOAD, 0), invoke, Op(POP), + Label(5), LineNumber(4, Label(5)), VarOp(ALOAD, 0), invoke, Op(POP), Op(RETURN), Label(11)) + ) + } } From caeedcf7f158cb7043e988d0a0ce7a216d973237 Mon Sep 17 00:00:00 2001 From: Oscar Boykin Date: Thu, 7 Jul 2016 10:19:55 -1000 Subject: [PATCH 0186/2793] Disable stub warning by default. When we create a class symbols from a classpath elements, references to other classes that are absent from the classpath are represented as references to "stub symbols". This is not a fatal error; for instance if these references are from the signature of a method that isn't called from the program being compiled, we don't need to know anything about them. A subsequent attempt to look at the type of a stub symbols will trigger a compile error. Currently, the creation of a stub symbol incurs a warning. This commit removes that warning on the basis that it isn't something users need to worry about. javac doesn't emit a comparable warning. The warning is still issued under any of `-verbose` / `-Xdev` / `-Ydebug`. --- .../scala/tools/nsc/symtab/classfile/ClassfileParser.scala | 2 +- test/files/run/t7439.check | 2 +- test/files/run/t8442.check | 2 +- test/files/run/t9268.check | 3 +-- 4 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 0533d420cdb4..f8c1a0d08278 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -366,7 +366,7 @@ abstract class ClassfileParser { // - better owner than `NoSymbol` // - remove eager warning val msg = s"Class $name not found - continuing with a stub." - if (!settings.isScaladoc) warning(msg) + if ((!settings.isScaladoc) && (settings.verbose || settings.developer)) warning(msg) return NoSymbol.newStubSymbol(name.toTypeName, msg) } val completer = new loaders.ClassfileLoader(file) diff --git a/test/files/run/t7439.check b/test/files/run/t7439.check index 9ea09f9c4094..b95884311a71 100644 --- a/test/files/run/t7439.check +++ b/test/files/run/t7439.check @@ -1,2 +1,2 @@ Recompiling after deleting t7439-run.obj/A_1.class -pos: NoPosition Class A_1 not found - continuing with a stub. WARNING + diff --git a/test/files/run/t8442.check b/test/files/run/t8442.check index ce9e8b52ff21..8b137891791f 100644 --- a/test/files/run/t8442.check +++ b/test/files/run/t8442.check @@ -1 +1 @@ -pos: NoPosition Class A_1 not found - continuing with a stub. WARNING + diff --git a/test/files/run/t9268.check b/test/files/run/t9268.check index 90ef940eb336..60afcbb6484e 100644 --- a/test/files/run/t9268.check +++ b/test/files/run/t9268.check @@ -1,5 +1,4 @@ Compiling Client1 -pos: NoPosition Class Waiter not found - continuing with a stub. WARNING + Compiling Client2 -pos: NoPosition Class Waiter not found - continuing with a stub. WARNING pos: NoPosition Unable to locate class corresponding to inner class entry for Predicate in owner Waiter ERROR From 3e64fdda48b8f2506756fc458f01f2e549d71720 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 14 Jul 2016 11:02:36 +1000 Subject: [PATCH 0187/2793] SD-120 Non FunctionN lambdas should not be universally serializable Instead, we follow the example set by javac, and predicate serializability of bot anon-class and invokedynamic-based lambdas on whether or not the SAM type extends java.io.Serializable. Fixes https://github.com/scala/scala-dev/issues/120 --- .../scala/tools/nsc/ast/TreeGen.scala | 18 +++---- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 38 +++++++-------- .../tools/nsc/transform/Delambdafy.scala | 6 ++- test/files/run/lambda-serialization.scala | 2 +- test/files/run/sammy_seriazable.scala | 47 +++++++++++++++++++ 5 files changed, 80 insertions(+), 31 deletions(-) create mode 100644 test/files/run/sammy_seriazable.scala diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index bc89609a596c..f53fb336e932 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -355,20 +355,22 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym) } - // TODO: the rewrite to AbstractFunction is superfluous once we compile FunctionN to a SAM type (aka functional interface) - def functionClassType(fun: Function): Type = - if (isFunctionType(fun.tpe)) abstractFunctionType(fun.vparams.map(_.symbol.tpe), fun.body.tpe.deconst) - else fun.tpe - def expandFunction(localTyper: analyzer.Typer)(fun: Function, inConstructorFlag: Long): Tree = { - val parents = addSerializable(functionClassType(fun)) - val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) addAnnotation SerialVersionUIDAnnotation + val anonClass = fun.symbol.owner newAnonymousFunctionClass(fun.pos, inConstructorFlag) + val parents = if (isFunctionType(fun.tpe)) { + anonClass addAnnotation SerialVersionUIDAnnotation + addSerializable(abstractFunctionType(fun.vparams.map(_.symbol.tpe), fun.body.tpe.deconst)) + } else { + if (fun.tpe.typeSymbol.isSubClass(JavaSerializableClass)) + anonClass addAnnotation SerialVersionUIDAnnotation + fun.tpe :: Nil + } + anonClass setInfo ClassInfoType(parents, newScope, anonClass) // The original owner is used in the backend for the EnclosingMethod attribute. If fun is // nested in a value-class method, its owner was already changed to the extension method. // Saving the original owner allows getting the source structure from the class symbol. defineOriginalOwner(anonClass, fun.symbol.originalOwner) - anonClass setInfo ClassInfoType(parents, newScope, anonClass) val samDef = mkMethodFromFunction(localTyper)(anonClass, fun) anonClass.info.decls enter samDef.symbol diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index acedf8301665..d5c4b5e20161 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -15,7 +15,7 @@ import scala.tools.asm import GenBCode._ import BackendReporting._ import scala.tools.asm.Opcodes -import scala.tools.asm.tree.MethodInsnNode +import scala.tools.asm.tree.{MethodInsnNode, MethodNode} import scala.tools.nsc.backend.jvm.BCodeHelpers.{InvokeStyle, TestOp} /* @@ -630,7 +630,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { case Apply(fun, args) if app.hasAttachment[delambdafy.LambdaMetaFactoryCapable] => val attachment = app.attachments.get[delambdafy.LambdaMetaFactoryCapable].get genLoadArguments(args, paramTKs(app)) - genInvokeDynamicLambda(attachment.target, attachment.arity, attachment.functionalInterface, attachment.sam) + genInvokeDynamicLambda(attachment.target, attachment.arity, attachment.functionalInterface, attachment.sam, attachment.isSerializable, attachment.addScalaSerializableMarker) generatedType = methodBTypeFromSymbol(fun.symbol).returnType case Apply(fun, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) => @@ -1330,7 +1330,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { def genSynchronized(tree: Apply, expectedType: BType): BType def genLoadTry(tree: Try): BType - def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol) { + def genInvokeDynamicLambda(lambdaTarget: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol, isSerializable: Boolean, addScalaSerializableMarker: Boolean) { val isStaticMethod = lambdaTarget.hasFlag(Flags.STATIC) def asmType(sym: Symbol) = classBTypeFromSymbol(sym).toASMType @@ -1343,26 +1343,24 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { /* itf = */ isInterface) val receiver = if (isStaticMethod) Nil else lambdaTarget.owner :: Nil val (capturedParams, lambdaParams) = lambdaTarget.paramss.head.splitAt(lambdaTarget.paramss.head.length - arity) - // Requires https://github.com/scala/scala-java8-compat on the runtime classpath val invokedType = asm.Type.getMethodDescriptor(asmType(functionalInterface), (receiver ::: capturedParams).map(sym => typeToBType(sym.info).toASMType): _*) - val constrainedType = new MethodBType(lambdaParams.map(p => typeToBType(p.tpe)), typeToBType(lambdaTarget.tpe.resultType)).toASMType - val samName = sam.name.toString val samMethodType = methodBTypeFromSymbol(sam).toASMType - - val flags = java.lang.invoke.LambdaMetafactory.FLAG_SERIALIZABLE | java.lang.invoke.LambdaMetafactory.FLAG_MARKERS - - val ScalaSerializable = classBTypeFromSymbol(definitions.SerializableClass).toASMType - bc.jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryAltMetafactoryHandle, - /* samMethodType = */ samMethodType, - /* implMethod = */ implMethodHandle, - /* instantiatedMethodType = */ constrainedType, - /* flags = */ flags.asInstanceOf[AnyRef], - /* markerInterfaceCount = */ 1.asInstanceOf[AnyRef], - /* markerInterfaces[0] = */ ScalaSerializable, - /* bridgeCount = */ 0.asInstanceOf[AnyRef] - ) - indyLambdaHosts += cnode.name + val markers = if (addScalaSerializableMarker) classBTypeFromSymbol(definitions.SerializableClass).toASMType :: Nil else Nil + visitInvokeDynamicInsnLMF(bc.jmethod, sam.name.toString, invokedType, samMethodType, implMethodHandle, constrainedType, isSerializable, markers) + if (isSerializable) + indyLambdaHosts += cnode.name } } + + private def visitInvokeDynamicInsnLMF(jmethod: MethodNode, samName: String, invokedType: String, samMethodType: asm.Type, + implMethodHandle: asm.Handle, instantiatedMethodType: asm.Type, + serializable: Boolean, markerInterfaces: Seq[asm.Type]) = { + import java.lang.invoke.LambdaMetafactory.{FLAG_MARKERS, FLAG_SERIALIZABLE} + def flagIf(b: Boolean, flag: Int): Int = if (b) flag else 0 + val flags = FLAG_MARKERS | flagIf(serializable, FLAG_SERIALIZABLE) + val bsmArgs = Seq(samMethodType, implMethodHandle, instantiatedMethodType, Int.box(flags), Int.box(markerInterfaces.length)) ++ markerInterfaces + jmethod.visitInvokeDynamicInsn(samName, invokedType, lambdaMetaFactoryAltMetafactoryHandle, bsmArgs: _*) + } + } diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 804bcddb7b88..5e5b9fd25058 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -28,7 +28,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre /** the following two members override abstract members in Transform */ val phaseName: String = "delambdafy" - final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol) + final case class LambdaMetaFactoryCapable(target: Symbol, arity: Int, functionalInterface: Symbol, sam: Symbol, isSerializable: Boolean, addScalaSerializableMarker: Boolean) /** * Get the symbol of the target lifted lambda body method from a function. I.e. if @@ -95,6 +95,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // no need for adaptation when the implemented sam is of a specialized built-in function type val lambdaTarget = if (isSpecialized) target else createBoxingBridgeMethodIfNeeded(fun, target, functionalInterface, sam) + val isSerializable = samUserDefined == NoSymbol || samUserDefined.owner.isNonBottomSubClass(definitions.JavaSerializableClass) + val addScalaSerializableMarker = samUserDefined == NoSymbol // The backend needs to know the target of the lambda and the functional interface in order // to emit the invokedynamic instruction. We pass this information as tree attachment. @@ -102,7 +104,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // see https://docs.oracle.com/javase/8/docs/api/java/lang/invoke/LambdaMetafactory.html // instantiatedMethodType is derived from lambdaTarget's signature // samMethodType is derived from samOf(functionalInterface)'s signature - apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, fun.vparams.length, functionalInterface, sam)) + apply.updateAttachment(LambdaMetaFactoryCapable(lambdaTarget, fun.vparams.length, functionalInterface, sam, isSerializable, addScalaSerializableMarker)) apply } diff --git a/test/files/run/lambda-serialization.scala b/test/files/run/lambda-serialization.scala index 0eee1193d79d..08e235b1cb9b 100644 --- a/test/files/run/lambda-serialization.scala +++ b/test/files/run/lambda-serialization.scala @@ -1,6 +1,6 @@ import java.io.{ByteArrayInputStream, ObjectInputStream, ObjectOutputStream, ByteArrayOutputStream} -trait IntToString { def apply(i: Int): String } +trait IntToString extends java.io.Serializable { def apply(i: Int): String } object Test { def main(args: Array[String]): Unit = { diff --git a/test/files/run/sammy_seriazable.scala b/test/files/run/sammy_seriazable.scala new file mode 100644 index 000000000000..458b99238a23 --- /dev/null +++ b/test/files/run/sammy_seriazable.scala @@ -0,0 +1,47 @@ +import java.io._ + +trait NotSerializableInterface { def apply(a: Any): Any } +abstract class NotSerializableClass { def apply(a: Any): Any } +// SAM type that supports lambdas-as-invoke-dynamic +trait IsSerializableInterface extends java.io.Serializable { def apply(a: Any): Any } +// SAM type that still requires lambdas-as-anonhmous-classes +abstract class IsSerializableClass extends java.io.Serializable { def apply(a: Any): Any } + +object Test { + def main(args: Array[String]) { + val nsi: NotSerializableInterface = x => x + val nsc: NotSerializableClass = x => x + + import SerDes._ + assertNotSerializable(nsi) + assertNotSerializable(nsc) + assert(serializeDeserialize[IsSerializableInterface](x => x).apply("foo") == "foo") + assert(serializeDeserialize[IsSerializableClass](x => x).apply("foo") == "foo") + assert(ObjectStreamClass.lookup(((x => x): IsSerializableClass).getClass).getSerialVersionUID == 0) + } +} + +object SerDes { + def assertNotSerializable(a: AnyRef): Unit = { + try { + serialize(a) + assert(false) + } catch { + case _: NotSerializableException => // okay + } + } + + def serialize(obj: AnyRef): Array[Byte] = { + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + buffer.toByteArray + } + + def deserialize(a: Array[Byte]): AnyRef = { + val in = new ObjectInputStream(new ByteArrayInputStream(a)) + in.readObject + } + + def serializeDeserialize[T <: AnyRef](obj: T) = deserialize(serialize(obj)).asInstanceOf[T] +} From dc326cd8213ef15cb7647dddfc5144ae47b01b5e Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Thu, 19 Nov 2015 19:32:19 +0100 Subject: [PATCH 0188/2793] SI-9560 Remove dependency on parser-combinators/json Also update a few example IDE files for Eclipse and IntelliJ. This drops the dependency by integrating the bare minimum functionality to keep things working. --- build.sbt | 7 +- build.xml | 13 ++-- dbuild-meta.json | 5 -- src/build/bnd/scala-compiler.bnd | 1 - src/build/dbuild-meta-json-gen.scala | 5 +- src/build/maven/scala-compiler-doc-pom.xml | 5 -- src/build/maven/scala-compiler-pom.xml | 5 -- src/eclipse/interactive/.classpath | 2 +- src/eclipse/partest/.classpath | 4 +- src/eclipse/repl/.classpath | 2 +- src/eclipse/scala-compiler/.classpath | 2 +- src/eclipse/scaladoc/.classpath | 7 +- src/eclipse/test-junit/.classpath | 4 +- src/intellij/scala.ipr.SAMPLE | 70 ++++++++----------- .../tools/nsc/doc/html/page/IndexScript.scala | 21 +++--- .../scala/tools/nsc/doc/html/page/JSON.scala | 56 +++++++++++++++ 16 files changed, 116 insertions(+), 93 deletions(-) create mode 100644 src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala diff --git a/build.sbt b/build.sbt index 1d4e208da2e3..78f30c220fd1 100644 --- a/build.sbt +++ b/build.sbt @@ -56,9 +56,9 @@ import VersionUtil._ // Scala dependencies: -val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-parser-combinators") val scalaSwingDep = scalaDep("org.scala-lang.modules", "scala-swing") val scalaXmlDep = scalaDep("org.scala-lang.modules", "scala-xml") +val scalaParserCombinatorsDep = scalaDep("org.scala-lang.modules", "scala-parser-combinators") val partestDep = scalaDep("org.scala-lang.modules", "scala-partest", versionProp = "partest") // Non-Scala dependencies: @@ -398,7 +398,7 @@ lazy val compiler = configureAsSubproject(project) description := "Scala Compiler", libraryDependencies ++= Seq(antDep, asmDep), // These are only needed for the POM: - libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, jlineDep % "optional"), + libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional"), // this a way to make sure that classes from interactive and scaladoc projects // end up in compiler jar (that's what Ant build does) // we need to use LocalProject references (with strings) to deal with mutual recursion @@ -427,7 +427,6 @@ lazy val compiler = configureAsSubproject(project) Osgi.headers ++= Seq( "Import-Package" -> ("jline.*;resolution:=optional," + "org.apache.tools.ant.*;resolution:=optional," + - "scala.util.parsing.*;version=\"${range;[====,====];"+versionNumber("scala-parser-combinators")+"}\";resolution:=optional," + "scala.xml.*;version=\"${range;[====,====];"+versionNumber("scala-xml")+"}\";resolution:=optional," + "scala.*;version=\"${range;[==,=+);${ver}}\"," + "*"), @@ -521,7 +520,7 @@ lazy val scaladoc = configureAsSubproject(project) .settings( name := "scala-compiler-doc", description := "Scala Documentation Generator", - libraryDependencies ++= Seq(scalaXmlDep, scalaParserCombinatorsDep, partestDep), + libraryDependencies ++= Seq(scalaXmlDep, partestDep), includeFilter in unmanagedResources in Compile := "*.html" | "*.css" | "*.gif" | "*.png" | "*.js" | "*.txt" | "*.svg" | "*.eot" | "*.woff" | "*.ttf" ) .dependsOn(compiler) diff --git a/build.xml b/build.xml index 6b2c9ade0da4..7c6f525c1cf9 100644 --- a/build.xml +++ b/build.xml @@ -367,7 +367,7 @@ TODO: - + @@ -791,7 +791,6 @@ TODO: - @@ -804,7 +803,9 @@ TODO: - + + + @@ -913,8 +914,7 @@ TODO: + scala-xml via external-modules-nocore, as part of `partest.classpath` --> @@ -922,7 +922,6 @@ TODO: - @@ -1278,8 +1277,6 @@ TODO: --> - - diff --git a/dbuild-meta.json b/dbuild-meta.json index 4806f9fa5a9e..ca3ce2a1104b 100644 --- a/dbuild-meta.json +++ b/dbuild-meta.json @@ -48,11 +48,6 @@ "extension" : "jar", "name" : "scala-xml", "organization" : "org.scala-lang.modules" - }, - { - "extension" : "jar", - "name" : "scala-parser-combinators", - "organization" : "org.scala-lang.modules" } ], "name" : "scala-compiler", diff --git a/src/build/bnd/scala-compiler.bnd b/src/build/bnd/scala-compiler.bnd index 3e60c4973c8c..c12c84c3f944 100644 --- a/src/build/bnd/scala-compiler.bnd +++ b/src/build/bnd/scala-compiler.bnd @@ -5,7 +5,6 @@ Bundle-Version: ${ver} Export-Package: *;version=${ver} Import-Package: jline.*;resolution:=optional, \ org.apache.tools.ant.*;resolution:=optional, \ - scala.util.parsing.*;version="${range;[====,====];@PARSER_COMBINATORS_VERSION@}";resolution:=optional, \ scala.xml.*;version="${range;[====,====];@XML_VERSION@}";resolution:=optional, \ scala.*;version="${range;[==,=+);${ver}}", \ * diff --git a/src/build/dbuild-meta-json-gen.scala b/src/build/dbuild-meta-json-gen.scala index 043ad19b2e70..6405650d881c 100644 --- a/src/build/dbuild-meta-json-gen.scala +++ b/src/build/dbuild-meta-json-gen.scala @@ -23,8 +23,7 @@ val meta = Project("scala-compiler", "org.scala-lang", Seq(ProjectRef("scala-compiler", "org.scala-lang")), Seq(ProjectRef("scala-reflect", "org.scala-lang"), - ProjectRef("scala-xml", "org.scala-lang.modules"), - ProjectRef("scala-parser-combinators", "org.scala-lang.modules") + ProjectRef("scala-xml", "org.scala-lang.modules") )), // Project("scala-repl", "org.scala-lang", @@ -37,7 +36,7 @@ val meta = // Project("scaladoc", "org.scala-lang", // Seq(ProjectRef("scaladoc", "org.scala-lang")), - // Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"), ProjectRef("scala-parser-combinators", "org.scala-lang"))), + // Seq(ProjectRef("scala-compiler", "org.scala-lang"),ProjectRef("scala-partest", "org.scala-lang"), ProjectRef("scala-xml", "org.scala-lang"))), Project("scalap", "org.scala-lang", Seq(ProjectRef("scalap", "org.scala-lang")), diff --git a/src/build/maven/scala-compiler-doc-pom.xml b/src/build/maven/scala-compiler-doc-pom.xml index 0c33d23d6150..86ca3f865baa 100644 --- a/src/build/maven/scala-compiler-doc-pom.xml +++ b/src/build/maven/scala-compiler-doc-pom.xml @@ -39,11 +39,6 @@ scala-xml_@SCALA_BINARY_VERSION@ @XML_VERSION@
- - org.scala-lang.modules - scala-parser-combinators_@SCALA_BINARY_VERSION@ - @PARSER_COMBINATORS_VERSION@ - diff --git a/src/build/maven/scala-compiler-pom.xml b/src/build/maven/scala-compiler-pom.xml index 15546109c8e3..9c157d17d97e 100644 --- a/src/build/maven/scala-compiler-pom.xml +++ b/src/build/maven/scala-compiler-pom.xml @@ -45,11 +45,6 @@ scala-xml_@SCALA_BINARY_VERSION@ @XML_VERSION@ - - org.scala-lang.modules - scala-parser-combinators_@SCALA_BINARY_VERSION@ - @PARSER_COMBINATORS_VERSION@ - jline jline diff --git a/src/eclipse/interactive/.classpath b/src/eclipse/interactive/.classpath index 721351a20775..9c02e9bb1aab 100644 --- a/src/eclipse/interactive/.classpath +++ b/src/eclipse/interactive/.classpath @@ -1,7 +1,7 @@ - + diff --git a/src/eclipse/partest/.classpath b/src/eclipse/partest/.classpath index 22afd65d439f..74eb47032dd0 100644 --- a/src/eclipse/partest/.classpath +++ b/src/eclipse/partest/.classpath @@ -1,7 +1,7 @@ - + @@ -9,6 +9,6 @@ - + diff --git a/src/eclipse/repl/.classpath b/src/eclipse/repl/.classpath index 141f84e6bb21..4b5369096254 100644 --- a/src/eclipse/repl/.classpath +++ b/src/eclipse/repl/.classpath @@ -1,7 +1,7 @@ - + diff --git a/src/eclipse/scala-compiler/.classpath b/src/eclipse/scala-compiler/.classpath index 625b9b2e4b8f..c4683059346b 100644 --- a/src/eclipse/scala-compiler/.classpath +++ b/src/eclipse/scala-compiler/.classpath @@ -4,7 +4,7 @@ - + diff --git a/src/eclipse/scaladoc/.classpath b/src/eclipse/scaladoc/.classpath index b4450df4ef19..9ae1a1c1dd41 100644 --- a/src/eclipse/scaladoc/.classpath +++ b/src/eclipse/scaladoc/.classpath @@ -2,12 +2,11 @@ - + - - - + + diff --git a/src/eclipse/test-junit/.classpath b/src/eclipse/test-junit/.classpath index 1e1b51066312..af112840b7d3 100644 --- a/src/eclipse/test-junit/.classpath +++ b/src/eclipse/test-junit/.classpath @@ -1,7 +1,7 @@ - + @@ -10,7 +10,7 @@ - + diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 79ad2808f692..01f08c4efdc0 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -74,9 +74,8 @@ - - - + + @@ -86,9 +85,8 @@ - - - + + @@ -97,14 +95,13 @@ - - - + + - + - + @@ -112,7 +109,7 @@ - + @@ -123,11 +120,10 @@ - - - + + - + @@ -136,7 +132,7 @@ - + @@ -145,9 +141,8 @@ - - - + + @@ -156,9 +151,8 @@ - - - + + @@ -259,10 +253,9 @@ - - - - + + + @@ -273,9 +266,8 @@ - - - + + @@ -284,12 +276,11 @@ @@ -300,11 +291,10 @@ - - - + + - + diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala index 6b24c0f5687c..8f58a7b84563 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala @@ -10,7 +10,6 @@ package page import scala.tools.nsc.doc import scala.tools.nsc.doc.model.{Package, DocTemplateEntity} import scala.tools.nsc.doc.html.{Page, HtmlFactory} -import scala.util.parsing.json.{JSONObject, JSONArray, JSONType} class IndexScript(universe: doc.Universe) extends Page { import model._ @@ -21,7 +20,7 @@ class IndexScript(universe: doc.Universe) extends Page { override def writeFor(site: HtmlFactory) { writeFile(site) { - _.write("Index.PACKAGES = " + packages.toString() + ";") + _.write(s"Index.PACKAGES = $packages;") } } @@ -30,7 +29,7 @@ class IndexScript(universe: doc.Universe) extends Page { case (pack, templates) => { val merged = mergeByQualifiedName(templates) - val ary = merged.keys.toList.sortBy(_.toLowerCase).map { key => + val ary = merged.keys.toVector.sortBy(_.toLowerCase).map { key => /** One pair is generated for the class/trait and one for the * companion object, both will have the same {"name": key} * @@ -44,7 +43,7 @@ class IndexScript(universe: doc.Universe) extends Page { Seq( kind -> relativeLinkTo(t), "kind" -> kind, - s"members_$kind" -> membersToJSON(t.members.filter(!_.isShadowedOrAmbiguousImplicit), t), + s"members_$kind" -> membersToJSON(t.members.toVector.filter(!_.isShadowedOrAmbiguousImplicit), t), "shortDescription" -> shortDesc(t)) } @@ -58,18 +57,18 @@ class IndexScript(universe: doc.Universe) extends Page { JSONObject(Map(pairs : _*)) } - def mergeByQualifiedName(source: List[DocTemplateEntity]) = { - var result = Map[String, List[DocTemplateEntity]]() + private def mergeByQualifiedName(source: List[DocTemplateEntity]): collection.mutable.Map[String, List[DocTemplateEntity]] = { + val result = collection.mutable.Map[String, List[DocTemplateEntity]]() for (t <- source) { val k = t.qualifiedName - result += k -> (result.getOrElse(k, List()) :+ t) + result += k -> (result.getOrElse(k, Nil) :+ t) } result } - def allPackages = { + def allPackages: List[Package] = { def f(parent: Package): List[Package] = { parent.packages.flatMap( p => f(p) :+ p @@ -78,7 +77,7 @@ class IndexScript(universe: doc.Universe) extends Page { f(universe.rootPackage).sortBy(_.toString) } - def allPackagesWithTemplates = { + def allPackagesWithTemplates: Map[Package, List[DocTemplateEntity]] = { Map(allPackages.map((key) => { key -> key.templates.collect { case t: DocTemplateEntity if !t.isPackage && !universe.settings.hardcoded.isExcluded(t.qualifiedName) => t @@ -92,8 +91,8 @@ class IndexScript(universe: doc.Universe) extends Page { } /** Returns the json representation of the supplied members */ - def membersToJSON(entities: List[MemberEntity], parent: DocTemplateEntity): JSONType = - JSONArray(entities map (memberToJSON(_, parent))) + def membersToJSON(entities: Vector[MemberEntity], parent: DocTemplateEntity): JSONArray = + JSONArray(entities.map(memberToJSON(_, parent))) private def memberToJSON(mbr: MemberEntity, parent: DocTemplateEntity): JSONObject = { /** This function takes a member and gets eventual parameters and the diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala new file mode 100644 index 000000000000..5f6cb7e7995d --- /dev/null +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala @@ -0,0 +1,56 @@ +package scala.tools.nsc.doc.html.page + +import JSONFormat.format + +private[page] object JSONFormat { + + def format(x: Any): String = x match { + case s: String => s"""\"${quoteString(s)}\"""" + case jo: JSONObject => jo.toString + case ja: JSONArray => ja.toString + case other => throw new UnsupportedOperationException(s"Value $other of class ${other.getClass} cannot be formatted.") + } + + /** This function can be used to properly quote Strings for JSON output. */ + def quoteString(s: String): String = { + val len: Int = s.length + val buf = new StringBuilder(len + len/4) + var i: Int = 0 + while (i < len) { + s.apply(i) match { + case '"' => buf ++= "\\\"" + case '\\' => buf ++= "\\\\" + case '/' => buf ++= "\\/" + case '\b' => buf ++= "\\b" + case '\f' => buf ++= "\\f" + case '\n' => buf ++= "\\n" + case '\r' => buf ++= "\\r" + case '\t' => buf ++= "\\t" + /* We'll unicode escape any control characters. These include: + * 0x00 -> 0x1f : ASCII Control (C0 Control Codes) + * 0x7f : ASCII DELETE + * 0x80 -> 0x9f : C1 Control Codes + * + * Per RFC4627, section 2.5, we're not technically required to + * encode the C1 codes, but we do to be safe. + */ + case c if ((c >= '\u0000' && c <= '\u001f') || (c >= '\u007f' && c <= '\u009f')) => + val cint = c.toInt + buf ++= f"\\u$cint%04x" + case c => buf += c + } + i += 1 + } + buf.toString() + } +} + +/** Represents a JSON Object (map). */ +private[page] case class JSONObject(obj: Map[String,Any]) { + override def toString = obj.map({ case (k,v) => format(k) + " : " + format(v) }).mkString("{", ", ", "}") +} + +/** Represents a JSON Array (vector). */ +private[page] case class JSONArray(vector: Vector[Any]) { + override def toString = vector.map(format).mkString("[", ", ", "]") +} From e4a978d06b410700b2ee85696b9ad6f6aa7ff183 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 22 Jul 2016 14:19:19 +1000 Subject: [PATCH 0189/2793] SD-121 Remove now-unneeded J{Function,Proc}N functional interfaces Non specialized functions can directly use `scala.FunctionN` as the functional interface, now that mixin generates default methods in the new trait encoding. Unfortunately we can't do this for specialized functions as things stand: specialization leaves the wrong method abstract. In principle, we could/should amend the specialization transform to fix this. But my earlier attempts at this weren't sucessful, so for now we have to stick with the fallback plan of keeping some hand rolled functional interfaces around. This commit reduces the surface area of `scala.runtime.java8` to the minimal requiremnt: one functional interface for each specialized variant of `Function{0,1,2}`. --- .../tools/nsc/transform/Delambdafy.scala | 8 +- .../scala/runtime/java8/JFunction.java | 146 ----- .../runtime/java8/JFunction0$mcB$sp.java | 2 +- .../runtime/java8/JFunction0$mcC$sp.java | 2 +- .../runtime/java8/JFunction0$mcD$sp.java | 2 +- .../runtime/java8/JFunction0$mcF$sp.java | 2 +- .../runtime/java8/JFunction0$mcI$sp.java | 2 +- .../runtime/java8/JFunction0$mcJ$sp.java | 2 +- .../runtime/java8/JFunction0$mcS$sp.java | 2 +- .../runtime/java8/JFunction0$mcV$sp.java | 2 +- .../runtime/java8/JFunction0$mcZ$sp.java | 2 +- .../scala/runtime/java8/JFunction0.java | 39 -- .../runtime/java8/JFunction1$mcDD$sp.java | 2 +- .../runtime/java8/JFunction1$mcDF$sp.java | 2 +- .../runtime/java8/JFunction1$mcDI$sp.java | 2 +- .../runtime/java8/JFunction1$mcDJ$sp.java | 2 +- .../runtime/java8/JFunction1$mcFD$sp.java | 2 +- .../runtime/java8/JFunction1$mcFF$sp.java | 2 +- .../runtime/java8/JFunction1$mcFI$sp.java | 2 +- .../runtime/java8/JFunction1$mcFJ$sp.java | 2 +- .../runtime/java8/JFunction1$mcID$sp.java | 2 +- .../runtime/java8/JFunction1$mcIF$sp.java | 2 +- .../runtime/java8/JFunction1$mcII$sp.java | 2 +- .../runtime/java8/JFunction1$mcIJ$sp.java | 2 +- .../runtime/java8/JFunction1$mcJD$sp.java | 2 +- .../runtime/java8/JFunction1$mcJF$sp.java | 2 +- .../runtime/java8/JFunction1$mcJI$sp.java | 2 +- .../runtime/java8/JFunction1$mcJJ$sp.java | 2 +- .../runtime/java8/JFunction1$mcVD$sp.java | 2 +- .../runtime/java8/JFunction1$mcVF$sp.java | 2 +- .../runtime/java8/JFunction1$mcVI$sp.java | 2 +- .../runtime/java8/JFunction1$mcVJ$sp.java | 2 +- .../runtime/java8/JFunction1$mcZD$sp.java | 2 +- .../runtime/java8/JFunction1$mcZF$sp.java | 2 +- .../runtime/java8/JFunction1$mcZI$sp.java | 2 +- .../runtime/java8/JFunction1$mcZJ$sp.java | 2 +- .../scala/runtime/java8/JFunction1.java | 228 -------- .../scala/runtime/java8/JFunction10.java | 10 - .../scala/runtime/java8/JFunction11.java | 10 - .../scala/runtime/java8/JFunction12.java | 10 - .../scala/runtime/java8/JFunction13.java | 10 - .../scala/runtime/java8/JFunction14.java | 10 - .../scala/runtime/java8/JFunction15.java | 10 - .../scala/runtime/java8/JFunction16.java | 10 - .../scala/runtime/java8/JFunction17.java | 10 - .../scala/runtime/java8/JFunction18.java | 10 - .../scala/runtime/java8/JFunction19.java | 10 - .../runtime/java8/JFunction2$mcDDD$sp.java | 2 +- .../runtime/java8/JFunction2$mcDDI$sp.java | 2 +- .../runtime/java8/JFunction2$mcDDJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcDID$sp.java | 2 +- .../runtime/java8/JFunction2$mcDII$sp.java | 2 +- .../runtime/java8/JFunction2$mcDIJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcDJD$sp.java | 2 +- .../runtime/java8/JFunction2$mcDJI$sp.java | 2 +- .../runtime/java8/JFunction2$mcDJJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcFDD$sp.java | 2 +- .../runtime/java8/JFunction2$mcFDI$sp.java | 2 +- .../runtime/java8/JFunction2$mcFDJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcFID$sp.java | 2 +- .../runtime/java8/JFunction2$mcFII$sp.java | 2 +- .../runtime/java8/JFunction2$mcFIJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcFJD$sp.java | 2 +- .../runtime/java8/JFunction2$mcFJI$sp.java | 2 +- .../runtime/java8/JFunction2$mcFJJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcIDD$sp.java | 2 +- .../runtime/java8/JFunction2$mcIDI$sp.java | 2 +- .../runtime/java8/JFunction2$mcIDJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcIID$sp.java | 2 +- .../runtime/java8/JFunction2$mcIII$sp.java | 2 +- .../runtime/java8/JFunction2$mcIIJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcIJD$sp.java | 2 +- .../runtime/java8/JFunction2$mcIJI$sp.java | 2 +- .../runtime/java8/JFunction2$mcIJJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcJDD$sp.java | 2 +- .../runtime/java8/JFunction2$mcJDI$sp.java | 2 +- .../runtime/java8/JFunction2$mcJDJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcJID$sp.java | 2 +- .../runtime/java8/JFunction2$mcJII$sp.java | 2 +- .../runtime/java8/JFunction2$mcJIJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcJJD$sp.java | 2 +- .../runtime/java8/JFunction2$mcJJI$sp.java | 2 +- .../runtime/java8/JFunction2$mcJJJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcVDD$sp.java | 2 +- .../runtime/java8/JFunction2$mcVDI$sp.java | 2 +- .../runtime/java8/JFunction2$mcVDJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcVID$sp.java | 2 +- .../runtime/java8/JFunction2$mcVII$sp.java | 2 +- .../runtime/java8/JFunction2$mcVIJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcVJD$sp.java | 2 +- .../runtime/java8/JFunction2$mcVJI$sp.java | 2 +- .../runtime/java8/JFunction2$mcVJJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcZDD$sp.java | 2 +- .../runtime/java8/JFunction2$mcZDI$sp.java | 2 +- .../runtime/java8/JFunction2$mcZDJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcZID$sp.java | 2 +- .../runtime/java8/JFunction2$mcZII$sp.java | 2 +- .../runtime/java8/JFunction2$mcZIJ$sp.java | 2 +- .../runtime/java8/JFunction2$mcZJD$sp.java | 2 +- .../runtime/java8/JFunction2$mcZJI$sp.java | 2 +- .../runtime/java8/JFunction2$mcZJJ$sp.java | 2 +- .../scala/runtime/java8/JFunction2.java | 498 ------------------ .../scala/runtime/java8/JFunction20.java | 10 - .../scala/runtime/java8/JFunction21.java | 10 - .../scala/runtime/java8/JFunction22.java | 10 - .../scala/runtime/java8/JFunction3.java | 10 - .../scala/runtime/java8/JFunction4.java | 10 - .../scala/runtime/java8/JFunction5.java | 10 - .../scala/runtime/java8/JFunction6.java | 10 - .../scala/runtime/java8/JFunction7.java | 10 - .../scala/runtime/java8/JFunction8.java | 10 - .../scala/runtime/java8/JFunction9.java | 10 - .../scala/runtime/java8/JProcedure0.java | 21 - .../scala/runtime/java8/JProcedure1.java | 21 - .../scala/runtime/java8/JProcedure10.java | 21 - .../scala/runtime/java8/JProcedure11.java | 21 - .../scala/runtime/java8/JProcedure12.java | 21 - .../scala/runtime/java8/JProcedure13.java | 21 - .../scala/runtime/java8/JProcedure14.java | 21 - .../scala/runtime/java8/JProcedure15.java | 21 - .../scala/runtime/java8/JProcedure16.java | 21 - .../scala/runtime/java8/JProcedure17.java | 21 - .../scala/runtime/java8/JProcedure18.java | 21 - .../scala/runtime/java8/JProcedure19.java | 21 - .../scala/runtime/java8/JProcedure2.java | 21 - .../scala/runtime/java8/JProcedure20.java | 21 - .../scala/runtime/java8/JProcedure21.java | 21 - .../scala/runtime/java8/JProcedure22.java | 21 - .../scala/runtime/java8/JProcedure3.java | 21 - .../scala/runtime/java8/JProcedure4.java | 21 - .../scala/runtime/java8/JProcedure5.java | 21 - .../scala/runtime/java8/JProcedure6.java | 21 - .../scala/runtime/java8/JProcedure7.java | 21 - .../scala/runtime/java8/JProcedure8.java | 21 - .../scala/runtime/java8/JProcedure9.java | 21 - 135 files changed, 93 insertions(+), 1683 deletions(-) delete mode 100644 src/library/scala/runtime/java8/JFunction.java delete mode 100644 src/library/scala/runtime/java8/JFunction0.java delete mode 100644 src/library/scala/runtime/java8/JFunction1.java delete mode 100644 src/library/scala/runtime/java8/JFunction10.java delete mode 100644 src/library/scala/runtime/java8/JFunction11.java delete mode 100644 src/library/scala/runtime/java8/JFunction12.java delete mode 100644 src/library/scala/runtime/java8/JFunction13.java delete mode 100644 src/library/scala/runtime/java8/JFunction14.java delete mode 100644 src/library/scala/runtime/java8/JFunction15.java delete mode 100644 src/library/scala/runtime/java8/JFunction16.java delete mode 100644 src/library/scala/runtime/java8/JFunction17.java delete mode 100644 src/library/scala/runtime/java8/JFunction18.java delete mode 100644 src/library/scala/runtime/java8/JFunction19.java delete mode 100644 src/library/scala/runtime/java8/JFunction2.java delete mode 100644 src/library/scala/runtime/java8/JFunction20.java delete mode 100644 src/library/scala/runtime/java8/JFunction21.java delete mode 100644 src/library/scala/runtime/java8/JFunction22.java delete mode 100644 src/library/scala/runtime/java8/JFunction3.java delete mode 100644 src/library/scala/runtime/java8/JFunction4.java delete mode 100644 src/library/scala/runtime/java8/JFunction5.java delete mode 100644 src/library/scala/runtime/java8/JFunction6.java delete mode 100644 src/library/scala/runtime/java8/JFunction7.java delete mode 100644 src/library/scala/runtime/java8/JFunction8.java delete mode 100644 src/library/scala/runtime/java8/JFunction9.java delete mode 100644 src/library/scala/runtime/java8/JProcedure0.java delete mode 100644 src/library/scala/runtime/java8/JProcedure1.java delete mode 100644 src/library/scala/runtime/java8/JProcedure10.java delete mode 100644 src/library/scala/runtime/java8/JProcedure11.java delete mode 100644 src/library/scala/runtime/java8/JProcedure12.java delete mode 100644 src/library/scala/runtime/java8/JProcedure13.java delete mode 100644 src/library/scala/runtime/java8/JProcedure14.java delete mode 100644 src/library/scala/runtime/java8/JProcedure15.java delete mode 100644 src/library/scala/runtime/java8/JProcedure16.java delete mode 100644 src/library/scala/runtime/java8/JProcedure17.java delete mode 100644 src/library/scala/runtime/java8/JProcedure18.java delete mode 100644 src/library/scala/runtime/java8/JProcedure19.java delete mode 100644 src/library/scala/runtime/java8/JProcedure2.java delete mode 100644 src/library/scala/runtime/java8/JProcedure20.java delete mode 100644 src/library/scala/runtime/java8/JProcedure21.java delete mode 100644 src/library/scala/runtime/java8/JProcedure22.java delete mode 100644 src/library/scala/runtime/java8/JProcedure3.java delete mode 100644 src/library/scala/runtime/java8/JProcedure4.java delete mode 100644 src/library/scala/runtime/java8/JProcedure5.java delete mode 100644 src/library/scala/runtime/java8/JProcedure6.java delete mode 100644 src/library/scala/runtime/java8/JProcedure7.java delete mode 100644 src/library/scala/runtime/java8/JProcedure8.java delete mode 100644 src/library/scala/runtime/java8/JProcedure9.java diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 804bcddb7b88..88837842fa3b 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -241,8 +241,12 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre exitingErasure(target.info.paramTypes).map(reboxValueClass) :+ reboxValueClass(exitingErasure(target.info.resultType))).toTypeName val isSpecialized = specializedName != funSym.name - val functionalInterface = // TODO: this is no longer needed, right? we can just use the regular function classes - if (isSpecialized) currentRun.runDefinitions.Scala_Java8_CompatPackage.info.decl(specializedName.prepend("J")) + val functionalInterface = + if (isSpecialized) { + // Unfortunately we still need to use custom functional interfaces for specialized functions so that the + // unboxed apply method is left abstract for us to implement. + currentRun.runDefinitions.Scala_Java8_CompatPackage.info.decl(specializedName.prepend("J")) + } else FunctionClass(originalFunction.vparams.length) (functionalInterface, isSpecialized) diff --git a/src/library/scala/runtime/java8/JFunction.java b/src/library/scala/runtime/java8/JFunction.java deleted file mode 100644 index 326aad3fecc9..000000000000 --- a/src/library/scala/runtime/java8/JFunction.java +++ /dev/null @@ -1,146 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -public final class JFunction { - private JFunction() {} - public static scala.Function0 func(JFunction0 f) { return f; } - public static scala.Function0 proc(JProcedure0 p) { return p; } - public static scala.Function0 procSpecialized(JFunction0$mcV$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcB$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcS$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcI$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcJ$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcC$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcF$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcD$sp f) { return f; } - public static scala.Function0 funcSpecialized(JFunction0$mcZ$sp f) { return f; } - public static scala.Function1 func(JFunction1 f) { return f; } - public static scala.Function1 proc(JProcedure1 p) { return p; } - public static scala.Function1 procSpecialized(JFunction1$mcVI$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcZI$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcII$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcFI$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcJI$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcDI$sp f) { return f; } - public static scala.Function1 procSpecialized(JFunction1$mcVJ$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcZJ$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcIJ$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcFJ$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcJJ$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcDJ$sp f) { return f; } - public static scala.Function1 procSpecialized(JFunction1$mcVF$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcZF$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcIF$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcFF$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcJF$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcDF$sp f) { return f; } - public static scala.Function1 procSpecialized(JFunction1$mcVD$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcZD$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcID$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcFD$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcJD$sp f) { return f; } - public static scala.Function1 funcSpecialized(JFunction1$mcDD$sp f) { return f; } - public static scala.Function2 func(JFunction2 f) { return f; } - public static scala.Function2 proc(JProcedure2 p) { return p; } - public static scala.Function2 procSpecialized(JFunction2$mcVII$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZII$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIII$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFII$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJII$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDII$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVIJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZIJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIIJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFIJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJIJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDIJ$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVID$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZID$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIID$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFID$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJID$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDID$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVJI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZJI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIJI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFJI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJJI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDJI$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVJJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZJJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIJJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFJJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJJJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDJJ$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVJD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZJD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIJD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFJD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJJD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDJD$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVDI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZDI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIDI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFDI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJDI$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDDI$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVDJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZDJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIDJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFDJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJDJ$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDDJ$sp f) { return f; } - public static scala.Function2 procSpecialized(JFunction2$mcVDD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcZDD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcIDD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcFDD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcJDD$sp f) { return f; } - public static scala.Function2 funcSpecialized(JFunction2$mcDDD$sp f) { return f; } - public static scala.Function3 func(JFunction3 f) { return f; } - public static scala.Function3 proc(JProcedure3 p) { return p; } - public static scala.Function4 func(JFunction4 f) { return f; } - public static scala.Function4 proc(JProcedure4 p) { return p; } - public static scala.Function5 func(JFunction5 f) { return f; } - public static scala.Function5 proc(JProcedure5 p) { return p; } - public static scala.Function6 func(JFunction6 f) { return f; } - public static scala.Function6 proc(JProcedure6 p) { return p; } - public static scala.Function7 func(JFunction7 f) { return f; } - public static scala.Function7 proc(JProcedure7 p) { return p; } - public static scala.Function8 func(JFunction8 f) { return f; } - public static scala.Function8 proc(JProcedure8 p) { return p; } - public static scala.Function9 func(JFunction9 f) { return f; } - public static scala.Function9 proc(JProcedure9 p) { return p; } - public static scala.Function10 func(JFunction10 f) { return f; } - public static scala.Function10 proc(JProcedure10 p) { return p; } - public static scala.Function11 func(JFunction11 f) { return f; } - public static scala.Function11 proc(JProcedure11 p) { return p; } - public static scala.Function12 func(JFunction12 f) { return f; } - public static scala.Function12 proc(JProcedure12 p) { return p; } - public static scala.Function13 func(JFunction13 f) { return f; } - public static scala.Function13 proc(JProcedure13 p) { return p; } - public static scala.Function14 func(JFunction14 f) { return f; } - public static scala.Function14 proc(JProcedure14 p) { return p; } - public static scala.Function15 func(JFunction15 f) { return f; } - public static scala.Function15 proc(JProcedure15 p) { return p; } - public static scala.Function16 func(JFunction16 f) { return f; } - public static scala.Function16 proc(JProcedure16 p) { return p; } - public static scala.Function17 func(JFunction17 f) { return f; } - public static scala.Function17 proc(JProcedure17 p) { return p; } - public static scala.Function18 func(JFunction18 f) { return f; } - public static scala.Function18 proc(JProcedure18 p) { return p; } - public static scala.Function19 func(JFunction19 f) { return f; } - public static scala.Function19 proc(JProcedure19 p) { return p; } - public static scala.Function20 func(JFunction20 f) { return f; } - public static scala.Function20 proc(JProcedure20 p) { return p; } - public static scala.Function21 func(JFunction21 f) { return f; } - public static scala.Function21 proc(JProcedure21 p) { return p; } - public static scala.Function22 func(JFunction22 f) { return f; } - public static scala.Function22 proc(JProcedure22 p) { return p; } -} - diff --git a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java index c88275763001..622dbabcf111 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcB$sp extends JFunction0 { +public interface JFunction0$mcB$sp extends scala.Function0, java.io.Serializable { byte apply$mcB$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToByte(apply$mcB$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java index c804529f718b..ad9a14ffa8fc 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcC$sp extends JFunction0 { +public interface JFunction0$mcC$sp extends scala.Function0, java.io.Serializable { char apply$mcC$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToCharacter(apply$mcC$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java index dacf50237c5b..291b50db4bd0 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcD$sp extends JFunction0 { +public interface JFunction0$mcD$sp extends scala.Function0, java.io.Serializable { double apply$mcD$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcD$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java index 2a9f8249246e..73b31dea0f0f 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcF$sp extends JFunction0 { +public interface JFunction0$mcF$sp extends scala.Function0, java.io.Serializable { float apply$mcF$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcF$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java index 75c612f9165f..f9b2d659ad31 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcI$sp extends JFunction0 { +public interface JFunction0$mcI$sp extends scala.Function0, java.io.Serializable { int apply$mcI$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcI$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java index d08984c794cc..73c41976b7a3 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcJ$sp extends JFunction0 { +public interface JFunction0$mcJ$sp extends scala.Function0, java.io.Serializable { long apply$mcJ$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJ$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java index d9e36a39f0c0..5fbabb2358e2 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcS$sp extends JFunction0 { +public interface JFunction0$mcS$sp extends scala.Function0, java.io.Serializable { short apply$mcS$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToShort(apply$mcS$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java index abd5e6ebbe47..735843796ce8 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcV$sp extends JFunction0 { +public interface JFunction0$mcV$sp extends scala.Function0, java.io.Serializable { void apply$mcV$sp(); default Object apply() { apply$mcV$sp(); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java index e1cd62a913b9..01234c1728a2 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction0$mcZ$sp extends JFunction0 { +public interface JFunction0$mcZ$sp extends scala.Function0, java.io.Serializable { boolean apply$mcZ$sp(); default Object apply() { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZ$sp()); } diff --git a/src/library/scala/runtime/java8/JFunction0.java b/src/library/scala/runtime/java8/JFunction0.java deleted file mode 100644 index 13426cc8af61..000000000000 --- a/src/library/scala/runtime/java8/JFunction0.java +++ /dev/null @@ -1,39 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction0 extends scala.Function0, java.io.Serializable { - default void $init$() { - }; - default void apply$mcV$sp() { - apply(); - } - default byte apply$mcB$sp() { - return scala.runtime.BoxesRunTime.unboxToByte(apply()); - } - default short apply$mcS$sp() { - return scala.runtime.BoxesRunTime.unboxToShort(apply()); - } - default int apply$mcI$sp() { - return scala.runtime.BoxesRunTime.unboxToInt(apply()); - } - default long apply$mcJ$sp() { - return scala.runtime.BoxesRunTime.unboxToLong(apply()); - } - default char apply$mcC$sp() { - return scala.runtime.BoxesRunTime.unboxToChar(apply()); - } - default float apply$mcF$sp() { - return scala.runtime.BoxesRunTime.unboxToFloat(apply()); - } - default double apply$mcD$sp() { - return scala.runtime.BoxesRunTime.unboxToDouble(apply()); - } - default boolean apply$mcZ$sp() { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply()); - } -} diff --git a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java index 4fbb370b8bff..07b85eed59a9 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcDD$sp extends JFunction1 { +public interface JFunction1$mcDD$sp extends scala.Function1, java.io.Serializable { double apply$mcDD$sp(double v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java index ce45666dd1d7..f09edd2ce25f 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcDF$sp extends JFunction1 { +public interface JFunction1$mcDF$sp extends scala.Function1, java.io.Serializable { double apply$mcDF$sp(float v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java index 09cac947c967..3cf40cb74998 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcDI$sp extends JFunction1 { +public interface JFunction1$mcDI$sp extends scala.Function1, java.io.Serializable { double apply$mcDI$sp(int v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java index f5154c3854e3..4023f30bc052 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcDJ$sp extends JFunction1 { +public interface JFunction1$mcDJ$sp extends scala.Function1, java.io.Serializable { double apply$mcDJ$sp(long v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java index 758b432d99ba..d4608958383a 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcFD$sp extends JFunction1 { +public interface JFunction1$mcFD$sp extends scala.Function1, java.io.Serializable { float apply$mcFD$sp(double v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java index 7e13e287a51d..6c591800cadc 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcFF$sp extends JFunction1 { +public interface JFunction1$mcFF$sp extends scala.Function1, java.io.Serializable { float apply$mcFF$sp(float v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java index e3c4a203c78a..666919591438 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcFI$sp extends JFunction1 { +public interface JFunction1$mcFI$sp extends scala.Function1, java.io.Serializable { float apply$mcFI$sp(int v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java index d989fa1ea8e6..cd953677aec6 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcFJ$sp extends JFunction1 { +public interface JFunction1$mcFJ$sp extends scala.Function1, java.io.Serializable { float apply$mcFJ$sp(long v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java index bde5d88d4636..37f686493681 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcID$sp extends JFunction1 { +public interface JFunction1$mcID$sp extends scala.Function1, java.io.Serializable { int apply$mcID$sp(double v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcID$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java index d1d235aef111..8a7656a286d5 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcIF$sp extends JFunction1 { +public interface JFunction1$mcIF$sp extends scala.Function1, java.io.Serializable { int apply$mcIF$sp(float v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java index ef44b3830cf3..792627b4005e 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcII$sp extends JFunction1 { +public interface JFunction1$mcII$sp extends scala.Function1, java.io.Serializable { int apply$mcII$sp(int v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcII$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java index 373d13cd46cf..01c47a67dac7 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcIJ$sp extends JFunction1 { +public interface JFunction1$mcIJ$sp extends scala.Function1, java.io.Serializable { int apply$mcIJ$sp(long v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java index 86fd7b7779b1..d8d5274ca1db 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcJD$sp extends JFunction1 { +public interface JFunction1$mcJD$sp extends scala.Function1, java.io.Serializable { long apply$mcJD$sp(double v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java index 3bcf264034db..cc1fad36d04a 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcJF$sp extends JFunction1 { +public interface JFunction1$mcJF$sp extends scala.Function1, java.io.Serializable { long apply$mcJF$sp(float v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java index 11bc15ef6e96..fe941dd61a7e 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcJI$sp extends JFunction1 { +public interface JFunction1$mcJI$sp extends scala.Function1, java.io.Serializable { long apply$mcJI$sp(int v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java index 2e1ad7878fb9..7034115bad79 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcJJ$sp extends JFunction1 { +public interface JFunction1$mcJJ$sp extends scala.Function1, java.io.Serializable { long apply$mcJJ$sp(long v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java index c8077e12680c..dde9f557226b 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcVD$sp extends JFunction1 { +public interface JFunction1$mcVD$sp extends scala.Function1, java.io.Serializable { void apply$mcVD$sp(double v1); default Object apply(Object t) { apply$mcVD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java index e7be77f8e3d8..0ffd80621f13 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcVF$sp extends JFunction1 { +public interface JFunction1$mcVF$sp extends scala.Function1, java.io.Serializable { void apply$mcVF$sp(float v1); default Object apply(Object t) { apply$mcVF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java index 7597ca529451..2543d23e3134 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcVI$sp extends JFunction1 { +public interface JFunction1$mcVI$sp extends scala.Function1, java.io.Serializable { void apply$mcVI$sp(int v1); default Object apply(Object t) { apply$mcVI$sp(scala.runtime.BoxesRunTime.unboxToInt(t)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java index 55c6c3997fc4..7564175402df 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcVJ$sp extends JFunction1 { +public interface JFunction1$mcVJ$sp extends scala.Function1, java.io.Serializable { void apply$mcVJ$sp(long v1); default Object apply(Object t) { apply$mcVJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java index 883a0e84fa94..ce5bd300297d 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcZD$sp extends JFunction1 { +public interface JFunction1$mcZD$sp extends scala.Function1, java.io.Serializable { boolean apply$mcZD$sp(double v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZD$sp(scala.runtime.BoxesRunTime.unboxToDouble(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java index 884832ca37bd..baa691e5480f 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcZF$sp extends JFunction1 { +public interface JFunction1$mcZF$sp extends scala.Function1, java.io.Serializable { boolean apply$mcZF$sp(float v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZF$sp(scala.runtime.BoxesRunTime.unboxToFloat(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java index 8a51aa99a2a1..bf04b5922b93 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcZI$sp extends JFunction1 { +public interface JFunction1$mcZI$sp extends scala.Function1, java.io.Serializable { boolean apply$mcZI$sp(int v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZI$sp(scala.runtime.BoxesRunTime.unboxToInt(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java index dc619666dcbc..808eea87b8c7 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction1$mcZJ$sp extends JFunction1 { +public interface JFunction1$mcZJ$sp extends scala.Function1, java.io.Serializable { boolean apply$mcZJ$sp(long v1); default Object apply(Object t) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJ$sp(scala.runtime.BoxesRunTime.unboxToLong(t))); } diff --git a/src/library/scala/runtime/java8/JFunction1.java b/src/library/scala/runtime/java8/JFunction1.java deleted file mode 100644 index e1f886dad748..000000000000 --- a/src/library/scala/runtime/java8/JFunction1.java +++ /dev/null @@ -1,228 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction1 extends scala.Function1, java.io.Serializable { - default void apply$mcVI$sp(int v1) { - apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1)); - } - default boolean apply$mcZI$sp(int v1) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1))); - } - default int apply$mcII$sp(int v1) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1))); - } - default float apply$mcFI$sp(int v1) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1))); - } - default long apply$mcJI$sp(int v1) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1))); - } - default double apply$mcDI$sp(int v1) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1))); - } - default void apply$mcVJ$sp(long v1) { - apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1)); - } - default boolean apply$mcZJ$sp(long v1) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1))); - } - default int apply$mcIJ$sp(long v1) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1))); - } - default float apply$mcFJ$sp(long v1) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1))); - } - default long apply$mcJJ$sp(long v1) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1))); - } - default double apply$mcDJ$sp(long v1) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1))); - } - default void apply$mcVF$sp(float v1) { - apply((T1) scala.runtime.BoxesRunTime.boxToFloat(v1)); - } - default boolean apply$mcZF$sp(float v1) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToFloat(v1))); - } - default int apply$mcIF$sp(float v1) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToFloat(v1))); - } - default float apply$mcFF$sp(float v1) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToFloat(v1))); - } - default long apply$mcJF$sp(float v1) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToFloat(v1))); - } - default double apply$mcDF$sp(float v1) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToFloat(v1))); - } - default void apply$mcVD$sp(double v1) { - apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1)); - } - default boolean apply$mcZD$sp(double v1) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1))); - } - default int apply$mcID$sp(double v1) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1))); - } - default float apply$mcFD$sp(double v1) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1))); - } - default long apply$mcJD$sp(double v1) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1))); - } - default double apply$mcDD$sp(double v1) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1))); - } - - default scala.Function1 compose$mcVI$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcZI$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcII$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcFI$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcJI$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcDI$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcVJ$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcZJ$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcIJ$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcFJ$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcJJ$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcDJ$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcVF$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcZF$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcIF$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcFF$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcJF$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcDF$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcVD$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcZD$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcID$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcFD$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcJD$sp(scala.Function1 g) { - return compose(g); - } - default scala.Function1 compose$mcDD$sp(scala.Function1 g) { - return compose(g); - } - - default scala.Function1 andThen$mcVI$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcZI$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcII$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcFI$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcJI$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcDI$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcVJ$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcZJ$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcIJ$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcFJ$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcJJ$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcDJ$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcVF$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcZF$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcIF$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcFF$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcJF$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcDF$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcVD$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcZD$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcID$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcFD$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcJD$sp(scala.Function1 g) { - return andThen(g); - } - default scala.Function1 andThen$mcDD$sp(scala.Function1 g) { - return andThen(g); - } -} diff --git a/src/library/scala/runtime/java8/JFunction10.java b/src/library/scala/runtime/java8/JFunction10.java deleted file mode 100644 index f7a25c0df2ae..000000000000 --- a/src/library/scala/runtime/java8/JFunction10.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction10 extends scala.Function10, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction11.java b/src/library/scala/runtime/java8/JFunction11.java deleted file mode 100644 index 9a548b8fc990..000000000000 --- a/src/library/scala/runtime/java8/JFunction11.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction11 extends scala.Function11, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction12.java b/src/library/scala/runtime/java8/JFunction12.java deleted file mode 100644 index 12fb73faafba..000000000000 --- a/src/library/scala/runtime/java8/JFunction12.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction12 extends scala.Function12, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction13.java b/src/library/scala/runtime/java8/JFunction13.java deleted file mode 100644 index c85c63448ab5..000000000000 --- a/src/library/scala/runtime/java8/JFunction13.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction13 extends scala.Function13, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction14.java b/src/library/scala/runtime/java8/JFunction14.java deleted file mode 100644 index 9a578833aaad..000000000000 --- a/src/library/scala/runtime/java8/JFunction14.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction14 extends scala.Function14, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction15.java b/src/library/scala/runtime/java8/JFunction15.java deleted file mode 100644 index e993643953ce..000000000000 --- a/src/library/scala/runtime/java8/JFunction15.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction15 extends scala.Function15, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction16.java b/src/library/scala/runtime/java8/JFunction16.java deleted file mode 100644 index a252cb5303bd..000000000000 --- a/src/library/scala/runtime/java8/JFunction16.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction16 extends scala.Function16, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction17.java b/src/library/scala/runtime/java8/JFunction17.java deleted file mode 100644 index 045aa7196fa9..000000000000 --- a/src/library/scala/runtime/java8/JFunction17.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction17 extends scala.Function17, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction18.java b/src/library/scala/runtime/java8/JFunction18.java deleted file mode 100644 index ba2bf31206c1..000000000000 --- a/src/library/scala/runtime/java8/JFunction18.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction18 extends scala.Function18, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction19.java b/src/library/scala/runtime/java8/JFunction19.java deleted file mode 100644 index dde48242930f..000000000000 --- a/src/library/scala/runtime/java8/JFunction19.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction19 extends scala.Function19, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java index 1c11fb52522d..80ab5203d954 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDDD$sp extends JFunction2 { +public interface JFunction2$mcDDD$sp extends scala.Function2, java.io.Serializable { double apply$mcDDD$sp(double v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java index e080bc87fa20..8e92338b825b 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDDI$sp extends JFunction2 { +public interface JFunction2$mcDDI$sp extends scala.Function2, java.io.Serializable { double apply$mcDDI$sp(double v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java index f96b19dff7ed..3d4f4a7cded9 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDDJ$sp extends JFunction2 { +public interface JFunction2$mcDDJ$sp extends scala.Function2, java.io.Serializable { double apply$mcDDJ$sp(double v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java index 944f469a6dba..bd6652e51ac1 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDID$sp extends JFunction2 { +public interface JFunction2$mcDID$sp extends scala.Function2, java.io.Serializable { double apply$mcDID$sp(int v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java index a04f616b5a7c..d06a246d3385 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDII$sp extends JFunction2 { +public interface JFunction2$mcDII$sp extends scala.Function2, java.io.Serializable { double apply$mcDII$sp(int v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java index 3a7d33d4a50e..cda23c4dcd74 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDIJ$sp extends JFunction2 { +public interface JFunction2$mcDIJ$sp extends scala.Function2, java.io.Serializable { double apply$mcDIJ$sp(int v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java index 86b48486e648..723efd8451eb 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDJD$sp extends JFunction2 { +public interface JFunction2$mcDJD$sp extends scala.Function2, java.io.Serializable { double apply$mcDJD$sp(long v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java index b9375c787025..c90352ef301b 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDJI$sp extends JFunction2 { +public interface JFunction2$mcDJI$sp extends scala.Function2, java.io.Serializable { double apply$mcDJI$sp(long v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java index 4adbd17e148f..33612197878e 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcDJJ$sp extends JFunction2 { +public interface JFunction2$mcDJJ$sp extends scala.Function2, java.io.Serializable { double apply$mcDJJ$sp(long v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToDouble(apply$mcDJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java index 7e53d117c7f0..2b9236b5d1b4 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFDD$sp extends JFunction2 { +public interface JFunction2$mcFDD$sp extends scala.Function2, java.io.Serializable { float apply$mcFDD$sp(double v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java index 64c4b2f133e9..2c564962a716 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFDI$sp extends JFunction2 { +public interface JFunction2$mcFDI$sp extends scala.Function2, java.io.Serializable { float apply$mcFDI$sp(double v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java index c7ffcbc66a9e..a0785f4cd2c6 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFDJ$sp extends JFunction2 { +public interface JFunction2$mcFDJ$sp extends scala.Function2, java.io.Serializable { float apply$mcFDJ$sp(double v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java index 43944751e68a..ba67ddb5931d 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFID$sp extends JFunction2 { +public interface JFunction2$mcFID$sp extends scala.Function2, java.io.Serializable { float apply$mcFID$sp(int v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java index a9a4540ca380..d58284b7522d 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFII$sp extends JFunction2 { +public interface JFunction2$mcFII$sp extends scala.Function2, java.io.Serializable { float apply$mcFII$sp(int v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java index 217615c7a3ab..4bc6eeb9085f 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFIJ$sp extends JFunction2 { +public interface JFunction2$mcFIJ$sp extends scala.Function2, java.io.Serializable { float apply$mcFIJ$sp(int v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java index 8400e478769b..f2435e23f7fd 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFJD$sp extends JFunction2 { +public interface JFunction2$mcFJD$sp extends scala.Function2, java.io.Serializable { float apply$mcFJD$sp(long v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java index e6b6259f96b9..1362d00e9408 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFJI$sp extends JFunction2 { +public interface JFunction2$mcFJI$sp extends scala.Function2, java.io.Serializable { float apply$mcFJI$sp(long v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java index 68a4c8ecc0ec..c9bcf515b736 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcFJJ$sp extends JFunction2 { +public interface JFunction2$mcFJJ$sp extends scala.Function2, java.io.Serializable { float apply$mcFJJ$sp(long v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToFloat(apply$mcFJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java index 76fe0b6ead42..28693910a57c 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIDD$sp extends JFunction2 { +public interface JFunction2$mcIDD$sp extends scala.Function2, java.io.Serializable { int apply$mcIDD$sp(double v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java index 908078f73528..50c775fbd9fc 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIDI$sp extends JFunction2 { +public interface JFunction2$mcIDI$sp extends scala.Function2, java.io.Serializable { int apply$mcIDI$sp(double v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java index 35c943e32466..3231aa7a888b 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIDJ$sp extends JFunction2 { +public interface JFunction2$mcIDJ$sp extends scala.Function2, java.io.Serializable { int apply$mcIDJ$sp(double v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java index f245ec8788ae..01568b2fd626 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIID$sp extends JFunction2 { +public interface JFunction2$mcIID$sp extends scala.Function2, java.io.Serializable { int apply$mcIID$sp(int v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java index f3a7a56dffe3..e0fba76675cc 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIII$sp extends JFunction2 { +public interface JFunction2$mcIII$sp extends scala.Function2, java.io.Serializable { int apply$mcIII$sp(int v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java index 9736196b9ee6..7155548e9f01 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIIJ$sp extends JFunction2 { +public interface JFunction2$mcIIJ$sp extends scala.Function2, java.io.Serializable { int apply$mcIIJ$sp(int v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java index 3211432ccb2a..f541cfdef405 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIJD$sp extends JFunction2 { +public interface JFunction2$mcIJD$sp extends scala.Function2, java.io.Serializable { int apply$mcIJD$sp(long v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java index 74f76404e060..e484efe42778 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIJI$sp extends JFunction2 { +public interface JFunction2$mcIJI$sp extends scala.Function2, java.io.Serializable { int apply$mcIJI$sp(long v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java index 7b9060bcb830..ec3538779cb6 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcIJJ$sp extends JFunction2 { +public interface JFunction2$mcIJJ$sp extends scala.Function2, java.io.Serializable { int apply$mcIJJ$sp(long v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToInteger(apply$mcIJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java index b4595cdf6ae9..b13502de5b30 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJDD$sp extends JFunction2 { +public interface JFunction2$mcJDD$sp extends scala.Function2, java.io.Serializable { long apply$mcJDD$sp(double v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java index 59aad669e79a..9ec9adda6003 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJDI$sp extends JFunction2 { +public interface JFunction2$mcJDI$sp extends scala.Function2, java.io.Serializable { long apply$mcJDI$sp(double v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java index 8111e0361715..68ef9ead143a 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJDJ$sp extends JFunction2 { +public interface JFunction2$mcJDJ$sp extends scala.Function2, java.io.Serializable { long apply$mcJDJ$sp(double v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java index 8a06a40a4ab2..29c9c5e3d300 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJID$sp extends JFunction2 { +public interface JFunction2$mcJID$sp extends scala.Function2, java.io.Serializable { long apply$mcJID$sp(int v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java index 3d2e03ddbc70..bb23086125bf 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJII$sp extends JFunction2 { +public interface JFunction2$mcJII$sp extends scala.Function2, java.io.Serializable { long apply$mcJII$sp(int v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java index 32408269c8b7..649fe2432562 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJIJ$sp extends JFunction2 { +public interface JFunction2$mcJIJ$sp extends scala.Function2, java.io.Serializable { long apply$mcJIJ$sp(int v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java index cf75bc5c197f..8e6071d44817 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJJD$sp extends JFunction2 { +public interface JFunction2$mcJJD$sp extends scala.Function2, java.io.Serializable { long apply$mcJJD$sp(long v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java index eddcea671dc1..61366ac26de1 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJJI$sp extends JFunction2 { +public interface JFunction2$mcJJI$sp extends scala.Function2, java.io.Serializable { long apply$mcJJI$sp(long v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java index 4f5626a3e695..a44e97318e11 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcJJJ$sp extends JFunction2 { +public interface JFunction2$mcJJJ$sp extends scala.Function2, java.io.Serializable { long apply$mcJJJ$sp(long v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToLong(apply$mcJJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java index 45b9739c91fc..8e7cbd7d1bd2 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVDD$sp extends JFunction2 { +public interface JFunction2$mcVDD$sp extends scala.Function2, java.io.Serializable { void apply$mcVDD$sp(double v1, double v2); default Object apply(Object v1, Object v2) { apply$mcVDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java index c344ea501751..1dee353d6b32 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVDI$sp extends JFunction2 { +public interface JFunction2$mcVDI$sp extends scala.Function2, java.io.Serializable { void apply$mcVDI$sp(double v1, int v2); default Object apply(Object v1, Object v2) { apply$mcVDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java index 94b01d59d58d..0b9560868472 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVDJ$sp extends JFunction2 { +public interface JFunction2$mcVDJ$sp extends scala.Function2, java.io.Serializable { void apply$mcVDJ$sp(double v1, long v2); default Object apply(Object v1, Object v2) { apply$mcVDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java index 47c29525a75f..f0ed7e7e9789 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVID$sp extends JFunction2 { +public interface JFunction2$mcVID$sp extends scala.Function2, java.io.Serializable { void apply$mcVID$sp(int v1, double v2); default Object apply(Object v1, Object v2) { apply$mcVID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java index 546a994cb990..52d7922cc1aa 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVII$sp extends JFunction2 { +public interface JFunction2$mcVII$sp extends scala.Function2, java.io.Serializable { void apply$mcVII$sp(int v1, int v2); default Object apply(Object v1, Object v2) { apply$mcVII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java index d9871efee33a..ac256bf163d6 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVIJ$sp extends JFunction2 { +public interface JFunction2$mcVIJ$sp extends scala.Function2, java.io.Serializable { void apply$mcVIJ$sp(int v1, long v2); default Object apply(Object v1, Object v2) { apply$mcVIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java index 525c8ee059fc..6e2dea3fbfef 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVJD$sp extends JFunction2 { +public interface JFunction2$mcVJD$sp extends scala.Function2, java.io.Serializable { void apply$mcVJD$sp(long v1, double v2); default Object apply(Object v1, Object v2) { apply$mcVJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java index 98f33bf9420e..d1cba439e667 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVJI$sp extends JFunction2 { +public interface JFunction2$mcVJI$sp extends scala.Function2, java.io.Serializable { void apply$mcVJI$sp(long v1, int v2); default Object apply(Object v1, Object v2) { apply$mcVJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java index adb8934b57eb..67f848a60e7c 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcVJJ$sp extends JFunction2 { +public interface JFunction2$mcVJJ$sp extends scala.Function2, java.io.Serializable { void apply$mcVJJ$sp(long v1, long v2); default Object apply(Object v1, Object v2) { apply$mcVJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2)); return scala.runtime.BoxedUnit.UNIT; } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java index 9272e025a652..b430c5f1343f 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZDD$sp extends JFunction2 { +public interface JFunction2$mcZDD$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZDD$sp(double v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDD$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java index 4406e00abdc2..01fb8ba003e6 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZDI$sp extends JFunction2 { +public interface JFunction2$mcZDI$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZDI$sp(double v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDI$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java index 1f92dddfafcb..a7d28e3cfc71 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZDJ$sp extends JFunction2 { +public interface JFunction2$mcZDJ$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZDJ$sp(double v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZDJ$sp(scala.runtime.BoxesRunTime.unboxToDouble(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java index 06b73f9897fc..e77719bf7568 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZID$sp extends JFunction2 { +public interface JFunction2$mcZID$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZID$sp(int v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZID$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java index 729f86063f9d..5f1f83aaf8b2 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZII$sp extends JFunction2 { +public interface JFunction2$mcZII$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZII$sp(int v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZII$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java index 38da681cd196..38fabd6f691c 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZIJ$sp extends JFunction2 { +public interface JFunction2$mcZIJ$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZIJ$sp(int v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZIJ$sp(scala.runtime.BoxesRunTime.unboxToInt(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java index 6dc9534811f5..59c82cb01e60 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZJD$sp extends JFunction2 { +public interface JFunction2$mcZJD$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZJD$sp(long v1, double v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJD$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToDouble(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java index a86f63be366a..3e73b8a794e8 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZJI$sp extends JFunction2 { +public interface JFunction2$mcZJI$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZJI$sp(long v1, int v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJI$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToInt(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java index 728a781e8ecf..96a14e98a531 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java @@ -6,7 +6,7 @@ package scala.runtime.java8; @FunctionalInterface -public interface JFunction2$mcZJJ$sp extends JFunction2 { +public interface JFunction2$mcZJJ$sp extends scala.Function2, java.io.Serializable { boolean apply$mcZJJ$sp(long v1, long v2); default Object apply(Object v1, Object v2) { return scala.runtime.BoxesRunTime.boxToBoolean(apply$mcZJJ$sp(scala.runtime.BoxesRunTime.unboxToLong(v1), scala.runtime.BoxesRunTime.unboxToLong(v2))); } diff --git a/src/library/scala/runtime/java8/JFunction2.java b/src/library/scala/runtime/java8/JFunction2.java deleted file mode 100644 index 548ff60cf618..000000000000 --- a/src/library/scala/runtime/java8/JFunction2.java +++ /dev/null @@ -1,498 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction2 extends scala.Function2, java.io.Serializable { - default void apply$mcVII$sp(int v1, int v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2)); - } - default boolean apply$mcZII$sp(int v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default int apply$mcIII$sp(int v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default float apply$mcFII$sp(int v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default long apply$mcJII$sp(int v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default double apply$mcDII$sp(int v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default void apply$mcVIJ$sp(int v1, long v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2)); - } - default boolean apply$mcZIJ$sp(int v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default int apply$mcIIJ$sp(int v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default float apply$mcFIJ$sp(int v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default long apply$mcJIJ$sp(int v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default double apply$mcDIJ$sp(int v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default void apply$mcVID$sp(int v1, double v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2)); - } - default boolean apply$mcZID$sp(int v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default int apply$mcIID$sp(int v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default float apply$mcFID$sp(int v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default long apply$mcJID$sp(int v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default double apply$mcDID$sp(int v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToInteger(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default void apply$mcVJI$sp(long v1, int v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2)); - } - default boolean apply$mcZJI$sp(long v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default int apply$mcIJI$sp(long v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default float apply$mcFJI$sp(long v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default long apply$mcJJI$sp(long v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default double apply$mcDJI$sp(long v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default void apply$mcVJJ$sp(long v1, long v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2)); - } - default boolean apply$mcZJJ$sp(long v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default int apply$mcIJJ$sp(long v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default float apply$mcFJJ$sp(long v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default long apply$mcJJJ$sp(long v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default double apply$mcDJJ$sp(long v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default void apply$mcVJD$sp(long v1, double v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2)); - } - default boolean apply$mcZJD$sp(long v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default int apply$mcIJD$sp(long v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default float apply$mcFJD$sp(long v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default long apply$mcJJD$sp(long v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default double apply$mcDJD$sp(long v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToLong(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default void apply$mcVDI$sp(double v1, int v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2)); - } - default boolean apply$mcZDI$sp(double v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default int apply$mcIDI$sp(double v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default float apply$mcFDI$sp(double v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default long apply$mcJDI$sp(double v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default double apply$mcDDI$sp(double v1, int v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToInteger(v2))); - } - default void apply$mcVDJ$sp(double v1, long v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2)); - } - default boolean apply$mcZDJ$sp(double v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default int apply$mcIDJ$sp(double v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default float apply$mcFDJ$sp(double v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default long apply$mcJDJ$sp(double v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default double apply$mcDDJ$sp(double v1, long v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToLong(v2))); - } - default void apply$mcVDD$sp(double v1, double v2) { - apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2)); - } - default boolean apply$mcZDD$sp(double v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToBoolean(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default int apply$mcIDD$sp(double v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToInt(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default float apply$mcFDD$sp(double v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToFloat(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default long apply$mcJDD$sp(double v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToLong(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - default double apply$mcDDD$sp(double v1, double v2) { - return scala.runtime.BoxesRunTime.unboxToDouble(apply((T1) scala.runtime.BoxesRunTime.boxToDouble(v1), (T2) scala.runtime.BoxesRunTime.boxToDouble(v2))); - } - - default scala.Function1 curried$mcVII$sp() { - return curried(); - } - default scala.Function1 curried$mcZII$sp() { - return curried(); - } - default scala.Function1 curried$mcIII$sp() { - return curried(); - } - default scala.Function1 curried$mcFII$sp() { - return curried(); - } - default scala.Function1 curried$mcJII$sp() { - return curried(); - } - default scala.Function1 curried$mcDII$sp() { - return curried(); - } - default scala.Function1 curried$mcVIJ$sp() { - return curried(); - } - default scala.Function1 curried$mcZIJ$sp() { - return curried(); - } - default scala.Function1 curried$mcIIJ$sp() { - return curried(); - } - default scala.Function1 curried$mcFIJ$sp() { - return curried(); - } - default scala.Function1 curried$mcJIJ$sp() { - return curried(); - } - default scala.Function1 curried$mcDIJ$sp() { - return curried(); - } - default scala.Function1 curried$mcVID$sp() { - return curried(); - } - default scala.Function1 curried$mcZID$sp() { - return curried(); - } - default scala.Function1 curried$mcIID$sp() { - return curried(); - } - default scala.Function1 curried$mcFID$sp() { - return curried(); - } - default scala.Function1 curried$mcJID$sp() { - return curried(); - } - default scala.Function1 curried$mcDID$sp() { - return curried(); - } - default scala.Function1 curried$mcVJI$sp() { - return curried(); - } - default scala.Function1 curried$mcZJI$sp() { - return curried(); - } - default scala.Function1 curried$mcIJI$sp() { - return curried(); - } - default scala.Function1 curried$mcFJI$sp() { - return curried(); - } - default scala.Function1 curried$mcJJI$sp() { - return curried(); - } - default scala.Function1 curried$mcDJI$sp() { - return curried(); - } - default scala.Function1 curried$mcVJJ$sp() { - return curried(); - } - default scala.Function1 curried$mcZJJ$sp() { - return curried(); - } - default scala.Function1 curried$mcIJJ$sp() { - return curried(); - } - default scala.Function1 curried$mcFJJ$sp() { - return curried(); - } - default scala.Function1 curried$mcJJJ$sp() { - return curried(); - } - default scala.Function1 curried$mcDJJ$sp() { - return curried(); - } - default scala.Function1 curried$mcVJD$sp() { - return curried(); - } - default scala.Function1 curried$mcZJD$sp() { - return curried(); - } - default scala.Function1 curried$mcIJD$sp() { - return curried(); - } - default scala.Function1 curried$mcFJD$sp() { - return curried(); - } - default scala.Function1 curried$mcJJD$sp() { - return curried(); - } - default scala.Function1 curried$mcDJD$sp() { - return curried(); - } - default scala.Function1 curried$mcVDI$sp() { - return curried(); - } - default scala.Function1 curried$mcZDI$sp() { - return curried(); - } - default scala.Function1 curried$mcIDI$sp() { - return curried(); - } - default scala.Function1 curried$mcFDI$sp() { - return curried(); - } - default scala.Function1 curried$mcJDI$sp() { - return curried(); - } - default scala.Function1 curried$mcDDI$sp() { - return curried(); - } - default scala.Function1 curried$mcVDJ$sp() { - return curried(); - } - default scala.Function1 curried$mcZDJ$sp() { - return curried(); - } - default scala.Function1 curried$mcIDJ$sp() { - return curried(); - } - default scala.Function1 curried$mcFDJ$sp() { - return curried(); - } - default scala.Function1 curried$mcJDJ$sp() { - return curried(); - } - default scala.Function1 curried$mcDDJ$sp() { - return curried(); - } - default scala.Function1 curried$mcVDD$sp() { - return curried(); - } - default scala.Function1 curried$mcZDD$sp() { - return curried(); - } - default scala.Function1 curried$mcIDD$sp() { - return curried(); - } - default scala.Function1 curried$mcFDD$sp() { - return curried(); - } - default scala.Function1 curried$mcJDD$sp() { - return curried(); - } - default scala.Function1 curried$mcDDD$sp() { - return curried(); - } - - default scala.Function1 tupled$mcVII$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZII$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIII$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFII$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJII$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDII$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVIJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZIJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIIJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFIJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJIJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDIJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVID$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZID$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIID$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFID$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJID$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDID$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVJI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZJI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIJI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFJI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJJI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDJI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVJJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZJJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIJJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFJJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJJJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDJJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVJD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZJD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIJD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFJD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJJD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDJD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVDI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZDI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIDI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFDI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJDI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDDI$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVDJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZDJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIDJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFDJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJDJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDDJ$sp() { - return tupled(); - } - default scala.Function1 tupled$mcVDD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcZDD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcIDD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcFDD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcJDD$sp() { - return tupled(); - } - default scala.Function1 tupled$mcDDD$sp() { - return tupled(); - } -} diff --git a/src/library/scala/runtime/java8/JFunction20.java b/src/library/scala/runtime/java8/JFunction20.java deleted file mode 100644 index 5505743c20e4..000000000000 --- a/src/library/scala/runtime/java8/JFunction20.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction20 extends scala.Function20, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction21.java b/src/library/scala/runtime/java8/JFunction21.java deleted file mode 100644 index 80e96d37150f..000000000000 --- a/src/library/scala/runtime/java8/JFunction21.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction21 extends scala.Function21, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction22.java b/src/library/scala/runtime/java8/JFunction22.java deleted file mode 100644 index 45e689458b5a..000000000000 --- a/src/library/scala/runtime/java8/JFunction22.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction22 extends scala.Function22, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction3.java b/src/library/scala/runtime/java8/JFunction3.java deleted file mode 100644 index 6d81bb3a189f..000000000000 --- a/src/library/scala/runtime/java8/JFunction3.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction3 extends scala.Function3, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction4.java b/src/library/scala/runtime/java8/JFunction4.java deleted file mode 100644 index 6c5cd3b61d3c..000000000000 --- a/src/library/scala/runtime/java8/JFunction4.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction4 extends scala.Function4, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction5.java b/src/library/scala/runtime/java8/JFunction5.java deleted file mode 100644 index eca1a406a63f..000000000000 --- a/src/library/scala/runtime/java8/JFunction5.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction5 extends scala.Function5, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction6.java b/src/library/scala/runtime/java8/JFunction6.java deleted file mode 100644 index 1c9daed5aa05..000000000000 --- a/src/library/scala/runtime/java8/JFunction6.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction6 extends scala.Function6, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction7.java b/src/library/scala/runtime/java8/JFunction7.java deleted file mode 100644 index c1aa130ba1d1..000000000000 --- a/src/library/scala/runtime/java8/JFunction7.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction7 extends scala.Function7, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction8.java b/src/library/scala/runtime/java8/JFunction8.java deleted file mode 100644 index 425e694df8cc..000000000000 --- a/src/library/scala/runtime/java8/JFunction8.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction8 extends scala.Function8, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JFunction9.java b/src/library/scala/runtime/java8/JFunction9.java deleted file mode 100644 index 21c3c8c6e30c..000000000000 --- a/src/library/scala/runtime/java8/JFunction9.java +++ /dev/null @@ -1,10 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -@FunctionalInterface -public interface JFunction9 extends scala.Function9, java.io.Serializable { -} diff --git a/src/library/scala/runtime/java8/JProcedure0.java b/src/library/scala/runtime/java8/JProcedure0.java deleted file mode 100644 index 6004364d0326..000000000000 --- a/src/library/scala/runtime/java8/JProcedure0.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure0 extends JFunction0 { - default void $init$() { - } - - void applyVoid(); - - default BoxedUnit apply() { - applyVoid(); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure1.java b/src/library/scala/runtime/java8/JProcedure1.java deleted file mode 100644 index 184d943042d4..000000000000 --- a/src/library/scala/runtime/java8/JProcedure1.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure1 extends JFunction1 { - default void $init$() { - } - - void applyVoid(T1 t1); - - default BoxedUnit apply(T1 t1) { - applyVoid(t1); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure10.java b/src/library/scala/runtime/java8/JProcedure10.java deleted file mode 100644 index 2aadd7d21596..000000000000 --- a/src/library/scala/runtime/java8/JProcedure10.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure10 extends JFunction10 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure11.java b/src/library/scala/runtime/java8/JProcedure11.java deleted file mode 100644 index c29853be1f57..000000000000 --- a/src/library/scala/runtime/java8/JProcedure11.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure11 extends JFunction11 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure12.java b/src/library/scala/runtime/java8/JProcedure12.java deleted file mode 100644 index 0607600c3373..000000000000 --- a/src/library/scala/runtime/java8/JProcedure12.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure12 extends JFunction12 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure13.java b/src/library/scala/runtime/java8/JProcedure13.java deleted file mode 100644 index c390fed2a510..000000000000 --- a/src/library/scala/runtime/java8/JProcedure13.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure13 extends JFunction13 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure14.java b/src/library/scala/runtime/java8/JProcedure14.java deleted file mode 100644 index d67cff1b5ae7..000000000000 --- a/src/library/scala/runtime/java8/JProcedure14.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure14 extends JFunction14 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure15.java b/src/library/scala/runtime/java8/JProcedure15.java deleted file mode 100644 index 81e0f524f552..000000000000 --- a/src/library/scala/runtime/java8/JProcedure15.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure15 extends JFunction15 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure16.java b/src/library/scala/runtime/java8/JProcedure16.java deleted file mode 100644 index 3d29ae25c56e..000000000000 --- a/src/library/scala/runtime/java8/JProcedure16.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure16 extends JFunction16 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure17.java b/src/library/scala/runtime/java8/JProcedure17.java deleted file mode 100644 index 85f40b2cd56e..000000000000 --- a/src/library/scala/runtime/java8/JProcedure17.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure17 extends JFunction17 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure18.java b/src/library/scala/runtime/java8/JProcedure18.java deleted file mode 100644 index fe2ab6f22c16..000000000000 --- a/src/library/scala/runtime/java8/JProcedure18.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure18 extends JFunction18 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure19.java b/src/library/scala/runtime/java8/JProcedure19.java deleted file mode 100644 index 9289d639a581..000000000000 --- a/src/library/scala/runtime/java8/JProcedure19.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure19 extends JFunction19 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure2.java b/src/library/scala/runtime/java8/JProcedure2.java deleted file mode 100644 index 273357a3b0a4..000000000000 --- a/src/library/scala/runtime/java8/JProcedure2.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure2 extends JFunction2 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2); - - default BoxedUnit apply(T1 t1, T2 t2) { - applyVoid(t1, t2); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure20.java b/src/library/scala/runtime/java8/JProcedure20.java deleted file mode 100644 index 8701e9d4225b..000000000000 --- a/src/library/scala/runtime/java8/JProcedure20.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure20 extends JFunction20 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure21.java b/src/library/scala/runtime/java8/JProcedure21.java deleted file mode 100644 index f8e38f6c70d7..000000000000 --- a/src/library/scala/runtime/java8/JProcedure21.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure21 extends JFunction21 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure22.java b/src/library/scala/runtime/java8/JProcedure22.java deleted file mode 100644 index 8bae4d7e0d91..000000000000 --- a/src/library/scala/runtime/java8/JProcedure22.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure22 extends JFunction22 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21, T22 t22); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9, T10 t10, T11 t11, T12 t12, T13 t13, T14 t14, T15 t15, T16 t16, T17 t17, T18 t18, T19 t19, T20 t20, T21 t21, T22 t22) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9, t10, t11, t12, t13, t14, t15, t16, t17, t18, t19, t20, t21, t22); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure3.java b/src/library/scala/runtime/java8/JProcedure3.java deleted file mode 100644 index 7c53187f3118..000000000000 --- a/src/library/scala/runtime/java8/JProcedure3.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure3 extends JFunction3 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3) { - applyVoid(t1, t2, t3); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure4.java b/src/library/scala/runtime/java8/JProcedure4.java deleted file mode 100644 index 33161bc1517c..000000000000 --- a/src/library/scala/runtime/java8/JProcedure4.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure4 extends JFunction4 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4) { - applyVoid(t1, t2, t3, t4); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure5.java b/src/library/scala/runtime/java8/JProcedure5.java deleted file mode 100644 index c834c48bf693..000000000000 --- a/src/library/scala/runtime/java8/JProcedure5.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure5 extends JFunction5 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5) { - applyVoid(t1, t2, t3, t4, t5); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure6.java b/src/library/scala/runtime/java8/JProcedure6.java deleted file mode 100644 index 995bdd6734f1..000000000000 --- a/src/library/scala/runtime/java8/JProcedure6.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure6 extends JFunction6 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6) { - applyVoid(t1, t2, t3, t4, t5, t6); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure7.java b/src/library/scala/runtime/java8/JProcedure7.java deleted file mode 100644 index 1821d8d406ff..000000000000 --- a/src/library/scala/runtime/java8/JProcedure7.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure7 extends JFunction7 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7) { - applyVoid(t1, t2, t3, t4, t5, t6, t7); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure8.java b/src/library/scala/runtime/java8/JProcedure8.java deleted file mode 100644 index 4b9dd0929a3c..000000000000 --- a/src/library/scala/runtime/java8/JProcedure8.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure8 extends JFunction8 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8); - return BoxedUnit.UNIT; - } -} diff --git a/src/library/scala/runtime/java8/JProcedure9.java b/src/library/scala/runtime/java8/JProcedure9.java deleted file mode 100644 index c4cbc65b6c9d..000000000000 --- a/src/library/scala/runtime/java8/JProcedure9.java +++ /dev/null @@ -1,21 +0,0 @@ - -/* - * Copyright (C) 2012-2015 Typesafe Inc. - */ - -package scala.runtime.java8; - -import scala.runtime.BoxedUnit; - -@FunctionalInterface -public interface JProcedure9 extends JFunction9 { - default void $init$() { - } - - void applyVoid(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9); - - default BoxedUnit apply(T1 t1, T2 t2, T3 t3, T4 t4, T5 t5, T6 t6, T7 t7, T8 t8, T9 t9) { - applyVoid(t1, t2, t3, t4, t5, t6, t7, t8, t9); - return BoxedUnit.UNIT; - } -} From 3205091c5530b41072dc0624a3362809bdadf6ee Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Pociecha?= Date: Fri, 22 Jul 2016 12:11:55 +0200 Subject: [PATCH 0190/2793] Improve sbt-based IntelliJ integration in case of Windows Use File.pathSeparator when processing classpath instead of just : to don't end up with classpath like "C:\sth\a.jar:C:\sth\b.jar" what was causing problems after split(":"). Display questions first and then wait for user input. I tested on 3 computes and without flush the printed questions were always displayed just after user replied - never before. --- build.sbt | 6 +++++- project/plugins.sbt | 4 ++-- src/intellij/README.md | 2 +- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index 1d4e208da2e3..e53f15a3326c 100644 --- a/build.sbt +++ b/build.sbt @@ -936,7 +936,7 @@ intellij := { val modules: List[(String, Seq[File])] = { // for the sbt build module, the dependencies are fetched from the project's build using sbt-buildinfo - val buildModule = ("scala-build", scalabuild.BuildInfo.buildClasspath.split(":").toSeq.map(new File(_))) + val buildModule = ("scala-build", scalabuild.BuildInfo.buildClasspath.split(java.io.File.pathSeparator).toSeq.map(new File(_))) // `sbt projects` lists all modules in the build buildModule :: List( moduleDeps(compilerP).value, @@ -1015,12 +1015,14 @@ intellij := { var continue = false if (!ipr.exists) { scala.Console.print(s"Could not find src/intellij/scala.ipr. Create new project files from src/intellij/*.SAMPLE (y/N)? ") + scala.Console.flush() if (scala.Console.readLine() == "y") { intellijCreateFromSample((baseDirectory in ThisBuild).value) continue = true } } else { scala.Console.print("Update library classpaths in the current src/intellij/scala.ipr (y/N)? ") + scala.Console.flush() continue = scala.Console.readLine() == "y" } if (continue) { @@ -1045,6 +1047,7 @@ lazy val intellijFromSample = taskKey[Unit]("Create fresh IntelliJ project files intellijFromSample := { val s = streams.value scala.Console.print(s"Create new project files from src/intellij/*.SAMPLE (y/N)? ") + scala.Console.flush() if (scala.Console.readLine() == "y") intellijCreateFromSample((baseDirectory in ThisBuild).value) else @@ -1062,6 +1065,7 @@ lazy val intellijToSample = taskKey[Unit]("Update src/intellij/*.SAMPLE using th intellijToSample := { val s = streams.value scala.Console.print(s"Update src/intellij/*.SAMPLE using the current IntelliJ project files (y/N)? ") + scala.Console.flush() if (scala.Console.readLine() == "y") { val basedir = (baseDirectory in ThisBuild).value val existing = basedir / "src/intellij" * "*.SAMPLE" diff --git a/project/plugins.sbt b/project/plugins.sbt index 98ec8f16ed7a..0a5b8f3dd428 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -11,9 +11,9 @@ enablePlugins(BuildInfoPlugin) // configure sbt-buildinfo to send the externalDependencyClasspath to the main build, which allows using it for the IntelliJ project config -lazy val buildClasspath = taskKey[String]("Colon-separated list of entries on the sbt build classpath.") +lazy val buildClasspath = taskKey[String]("Colon-separated (or semicolon-separated in case of Windows) list of entries on the sbt build classpath.") -buildClasspath := (externalDependencyClasspath in Compile).value.map(_.data).mkString(":") +buildClasspath := (externalDependencyClasspath in Compile).value.map(_.data).mkString(java.io.File.pathSeparator) buildInfoKeys := Seq[BuildInfoKey](buildClasspath) diff --git a/src/intellij/README.md b/src/intellij/README.md index 650d91e5d104..7bd990288b3d 100644 --- a/src/intellij/README.md +++ b/src/intellij/README.md @@ -17,7 +17,7 @@ are ignored. ## Dependencies -For every module in the IntelliJ project there is a corresponding `-deps` library, for exmaple `compiler-deps` provides `ant.jar` for the compiler codebase. +For every module in the IntelliJ project there is a corresponding `-deps` library, for example `compiler-deps` provides `ant.jar` for the compiler codebase. The `.jar` files in these `-deps` libraries can be easily kept up-to-date by running `sbt intellij` again. This is necessary whenever the dependencies in the sbt build change, for example when the `starr` version is updated. From ce262e45141db642c2d5c7e4c7427f84fd08f854 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Fri, 22 Jul 2016 17:38:46 +0100 Subject: [PATCH 0191/2793] Added tests for SI-482/SI-4914 --- test/files/pos/t482.scala | 7 +++++++ test/files/pos/t4914.scala | 20 ++++++++++++++++++++ 2 files changed, 27 insertions(+) create mode 100644 test/files/pos/t482.scala create mode 100644 test/files/pos/t4914.scala diff --git a/test/files/pos/t482.scala b/test/files/pos/t482.scala new file mode 100644 index 000000000000..b121c933377a --- /dev/null +++ b/test/files/pos/t482.scala @@ -0,0 +1,7 @@ +object Test { + class Foo { val z = "foo"; val y : z.type = z } + + val x : ({ val y : z.type } forSome { val z : String }) = new Foo + + val x2 : ({ val y : T } forSome { type T <: String with Singleton }) = new Foo +} diff --git a/test/files/pos/t4914.scala b/test/files/pos/t4914.scala new file mode 100644 index 000000000000..a6c8ef5a4e22 --- /dev/null +++ b/test/files/pos/t4914.scala @@ -0,0 +1,20 @@ +trait Type { + type S +} + +class ConcreteType extends Type { + type S = Double +} + +trait Base { + type T <: Type + val m: Map[t#S, t#S] forSome { type t <: T with Singleton } + val n: Map[x.type#S, x.type#S] forSome { val x: T } +} + +abstract class Derived extends Base { + override type T = ConcreteType + override val m = Map[Double, Double]() + /** This does not work. §3.2.10 indicates that types n is shorthand for type of m. */ + override val n = Map[Double, Double]() +} From a2cba53e18864a5b9092f1e329c6e0afb09566c5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Jul 2016 10:21:26 +1000 Subject: [PATCH 0192/2793] SD-167 Fine tuning constructor pattern translation - Avoid calling NoSymbol.owner when checking whether we're dealing with a case class constructor pattern or a general extractor. Tested manually with the test case in the ticket, no more output is produced under `-Xdev`. - Be more conservative about the conversion to a case class pattern: rather than looking just at the type of the pattern tree, also look at the tree itself to ensure its safe to elide. This change is analagous to SI-4859, which restricted rewrites of case apply calls to case constructors. I've manually tested that case class patterns are still efficiently translated: ``` object Test { def main(args: Array[String]) { Some(1) match { case Some(x) => } } } ``` ``` % qscalac -Xprint:patmat sandbox/test.scala [[syntax trees at end of patmat]] // test.scala package { object Test extends scala.AnyRef { def (): Test.type = { Test.super.(); () }; def main(args: Array[String]): Unit = { case val x1: Some[Int] = scala.Some.apply[Int](1); case4(){ if (x1.ne(null)) matchEnd3(()) else case5() }; case5(){ matchEnd3(throw new MatchError(x1)) }; matchEnd3(x: Unit){ x } } } } ``` --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 2 +- .../scala/tools/nsc/typechecker/PatternTypers.scala | 3 ++- test/files/run/sd167.check | 1 + test/files/run/sd167.scala | 8 ++++++++ 4 files changed, 12 insertions(+), 2 deletions(-) create mode 100644 test/files/run/sd167.check create mode 100644 test/files/run/sd167.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index ad9377f8b4bd..caad4a907b00 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1764,7 +1764,7 @@ trait Namers extends MethodSynthesis { * bugs waiting to be reported? If not, why not? When exactly do we need to * call this method? */ - def companionSymbolOf(original: Symbol, ctx: Context): Symbol = { + def companionSymbolOf(original: Symbol, ctx: Context): Symbol = if (original == NoSymbol) NoSymbol else { val owner = original.owner // SI-7264 Force the info of owners from previous compilation runs. // Doing this generally would trigger cycles; that's what we also diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index f90e61ff9248..1df3449ce66e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -79,6 +79,7 @@ trait PatternTypers { // do not update the symbol if the tree's symbol's type does not define an unapply member // (e.g. since it's some method that returns an object with an unapply member) val fun = inPlaceAdHocOverloadingResolution(fun0)(hasUnapplyMember) + val canElide = treeInfo.isQualifierSafeToElide(fun) val caseClass = companionSymbolOf(fun.tpe.typeSymbol.sourceModule, context) val member = unapplyMember(fun.tpe) def resultType = (fun.tpe memberType member).finalResultType @@ -94,7 +95,7 @@ trait PatternTypers { // Dueling test cases: pos/overloaded-unapply.scala, run/case-class-23.scala, pos/t5022.scala // A case class with 23+ params has no unapply method. // A case class constructor may be overloaded with unapply methods in the companion. - if (caseClass.isCase && !member.isOverloaded) + if (canElide && caseClass.isCase && !member.isOverloaded) logResult(s"convertToCaseConstructor($fun, $caseClass, pt=$pt)")(convertToCaseConstructor(fun, caseClass, pt)) else if (!reallyExists(member)) CaseClassConstructorError(fun, s"${fun.symbol} is not a case class, nor does it have an unapply/unapplySeq member") diff --git a/test/files/run/sd167.check b/test/files/run/sd167.check new file mode 100644 index 000000000000..587be6b4c3f9 --- /dev/null +++ b/test/files/run/sd167.check @@ -0,0 +1 @@ +x diff --git a/test/files/run/sd167.scala b/test/files/run/sd167.scala new file mode 100644 index 000000000000..5095e772ad78 --- /dev/null +++ b/test/files/run/sd167.scala @@ -0,0 +1,8 @@ +object Test { + implicit class ToExtractor(val s: StringContext) { + def x = {println("x"); Some } + } + def main(args: Array[String]) { + Some(1) match { case x"${a}" => } // used to convert to `case Some(a) =>` and omit side effects + } +} From a84c7b9e8894f54d05e5f5f5f6e470f8db784090 Mon Sep 17 00:00:00 2001 From: Dima Tkach Date: Wed, 27 Jul 2016 10:14:48 -0400 Subject: [PATCH 0193/2793] Fixed some style issues --- src/library/scala/collection/mutable/ArrayOps.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index b384decbfb11..3329e350424d 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -44,9 +44,9 @@ sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomPara override def slice(from: Int, until: Int): Array[T] = { val lo = math.max(from, 0) val hi = math.min(math.max(until, 0), repr.length) - val size = math.max(hi-lo, 0) + val size = math.max(hi - lo, 0) val result = java.lang.reflect.Array.newInstance(elementClass, size) - if(size > 0) { + if (size > 0) { Array.copy(repr, lo, result, 0, size) } result.asInstanceOf[Array[T]] From 1ae579cae866ea91f340ef8de1f795e8236ad648 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 29 Jul 2016 12:20:06 -0700 Subject: [PATCH 0194/2793] Make sure sbt's exit code is seen as script exit code ... and not grep's exit code, which would mean the test suite's result is determined by whether grep fails or not, instead of partest/junit's hard work --- scripts/jobs/integrate/bootstrap | 4 ++-- scripts/jobs/integrate/windows | 2 +- scripts/jobs/validate/test | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 4d5dae89a201..86ba67bd8b6b 100644 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -523,7 +523,7 @@ bootstrap() { $clean \ $sbtBuildTask \ dist/mkQuick \ - publish | grep -v "was too long to be displayed in the webview, and will be left out" + publish # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala rm -rf $baseDir/ivy2 @@ -559,7 +559,7 @@ publishSonatype() { -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ "setupBootstrapPublish $releaseTempRepoUrl $SCALA_VER" \ - $publishSonatypeTaskCore | grep -v "was too long to be displayed in the webview, and will be left out" + $publishSonatypeTaskCore echo "### Publishing modules to sonatype" # build/test/publish scala core modules to sonatype (this will start a new staging repo) diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows index 5e04b0b38065..f5e068684e3b 100755 --- a/scripts/jobs/integrate/windows +++ b/scripts/jobs/integrate/windows @@ -16,4 +16,4 @@ $SBT --warn "setupPublishCore" generateBuildCharacterPropertiesFile publishLocal # Build quick and run the tests parseScalaProperties buildcharacter.properties -$SBT -Dstarr.version=$maven_version_number --warn "setupValidateTest" testAll | grep -v "was too long to be displayed in the webview, and will be left out" +$SBT -Dstarr.version=$maven_version_number --warn "setupValidateTest" testAll diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index 9938319dd843..7b00356390e6 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -23,7 +23,7 @@ case $prDryRun in --warn \ "setupValidateTest $prRepoUrl" \ $testExtraArgs \ - testAll | grep -v "was too long to be displayed in the webview, and will be left out" + testAll ;; From 10336958aba9b8af5f9127a4dc21c0899836ff8d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 29 Jul 2016 23:51:27 -0700 Subject: [PATCH 0195/2793] SI-9750 Remove isJavaAtLeast from util.StackTracing Formatting suppressed exceptions required reflection for platform compatibility. No longer, since Java 8 is assumed. Minor tidying. --- .../scala/tools/nsc/util/StackTracing.scala | 44 +++++++------------ 1 file changed, 17 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala index 0765bb923f0d..c6749a13f328 100644 --- a/src/compiler/scala/tools/nsc/util/StackTracing.scala +++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala @@ -8,7 +8,7 @@ private[util] trait StackTracing extends Any { /** Format a stack trace, returning the prefix consisting of frames that satisfy * a given predicate. - * The format is similar to the typical case described in the JavaDoc + * The format is similar to the typical case described in the Javadoc * for [[java.lang.Throwable#printStackTrace]]. * If a stack trace is truncated, it will be followed by a line of the form * `... 3 elided`, by analogy to the lines `... 3 more` which indicate @@ -19,25 +19,18 @@ private[util] trait StackTracing extends Any { def stackTracePrefixString(e: Throwable)(p: StackTraceElement => Boolean): String = { import collection.mutable.{ ArrayBuffer, ListBuffer } import compat.Platform.EOL - import scala.util.Properties.isJavaAtLeast - - val sb = ListBuffer.empty[String] type TraceRelation = String val Self = new TraceRelation("") val CausedBy = new TraceRelation("Caused by: ") val Suppressed = new TraceRelation("Suppressed: ") - val suppressable = isJavaAtLeast("1.7") - - def clazz(e: Throwable) = e.getClass.getName + def clazz(e: Throwable): String = e.getClass.getName def because(e: Throwable): String = e.getCause match { case null => null ; case c => header(c) } def msg(e: Throwable): String = e.getMessage match { case null => because(e) ; case s => s } def txt(e: Throwable): String = msg(e) match { case null => "" ; case s => s": $s" } def header(e: Throwable): String = s"${clazz(e)}${txt(e)}" - val indent = "\u0020\u0020" - val seen = new ArrayBuffer[Throwable](16) def unseen(t: Throwable) = { def inSeen = seen exists (_ eq t) @@ -46,28 +39,25 @@ private[util] trait StackTracing extends Any { interesting } + val sb = ListBuffer.empty[String] + + // format the stack trace, skipping the shared trace def print(e: Throwable, r: TraceRelation, share: Array[StackTraceElement], indents: Int): Unit = if (unseen(e)) { val trace = e.getStackTrace - val frames = ( - if (share.nonEmpty) { - val spare = share.reverseIterator - val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _) - trimmed.reverse - } else trace - ) - val prefix = frames takeWhile p - val margin = indent * indents - val indented = margin + indent + val frames = if (share.isEmpty) trace else { + val spare = share.reverseIterator + val trimmed = trace.reverse dropWhile (spare.hasNext && spare.next == _) + trimmed.reverse + } + val prefix = frames takeWhile p + val margin = " " * indents + val indent = margin + " " sb append s"${margin}${r}${header(e)}" - prefix foreach (f => sb append s"${indented}at $f") - if (frames.size < trace.size) sb append s"$indented... ${trace.size - frames.size} more" - if (r == Self && prefix.size < frames.size) sb append s"$indented... ${frames.size - prefix.size} elided" + prefix foreach (f => sb append s"${margin} at $f") + if (frames.size < trace.size) sb append s"${margin} ... ${trace.size - frames.size} more" + if (r == Self && prefix.size < frames.size) sb append s"${margin} ... ${frames.size - prefix.size} elided" print(e.getCause, CausedBy, trace, indents) - if (suppressable) { - import scala.language.reflectiveCalls - type Suppressing = { def getSuppressed(): Array[Throwable] } - for (s <- e.asInstanceOf[Suppressing].getSuppressed) print(s, Suppressed, frames, indents + 1) - } + e.getSuppressed foreach (t => print(t, Suppressed, frames, indents + 1)) } print(e, Self, share = Array.empty, indents = 0) From c1bd857318b01b223b3f5dbd99aa2e71b1b232d4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 31 Jul 2016 17:13:09 -0700 Subject: [PATCH 0196/2793] Fix race condition in lambda deserialization Review of the code made me aware that concurrent calls to `$deserializeLambda$` for some lambda hosting class could result in concurrent calls to operations on `j.u.HashMap`. I've added a synchronized block to avoid this problem. I don't think this is likely to be a bottleneck in practical use cases, but if so we could come up with a lock-free scheme in the future. --- .../scala/runtime/LambdaDeserializer.scala | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala index ad7d12ba5d75..a6e08e6e614f 100644 --- a/src/library/scala/runtime/LambdaDeserializer.scala +++ b/src/library/scala/runtime/LambdaDeserializer.scala @@ -94,13 +94,15 @@ object LambdaDeserializer { val key = serialized.getImplMethodName + " : " + serialized.getImplMethodSignature val factory: MethodHandle = if (cache == null) { makeCallSite.getTarget - } else cache.get(key) match { - case null => - val callSite = makeCallSite - val temp = callSite.getTarget - cache.put(key, temp) - temp - case target => target + } else cache.synchronized{ + cache.get(key) match { + case null => + val callSite = makeCallSite + val temp = callSite.getTarget + cache.put(key, temp) + temp + case target => target + } } val captures = Array.tabulate(serialized.getCapturedArgCount)(n => serialized.getCapturedArg(n)) From adb3c010d3b95094161bd6725b3e2077cad98879 Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Tue, 26 Jul 2016 09:49:12 +0200 Subject: [PATCH 0197/2793] Reduce deprecations and warnings --- .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 8 ++++++++ .../scala/tools/nsc/plugins/Plugin.scala | 2 +- .../scala/tools/nsc/util/ClassPath.scala | 12 +++++------ .../scala/collection/JavaConversions.scala | 2 +- .../scala/collection/convert/WrapAsJava.scala | 4 ++-- .../collection/convert/WrapAsScala.scala | 4 ++-- .../scala/collection/convert/package.scala | 12 +++++------ .../scala/collection/mutable/BitSet.scala | 2 +- src/library/scala/deprecated.scala | 20 +++++++++++-------- src/library/scala/deprecatedInheritance.scala | 10 ++++++++-- src/library/scala/deprecatedName.scala | 11 +++++++--- src/library/scala/deprecatedOverriding.scala | 10 ++++++++-- src/library/scala/util/Random.scala | 3 --- .../scala/tools/nsc/interpreter/ILoop.scala | 10 +++++----- .../tools/scalap/scalax/rules/Rules.scala | 2 +- .../scalasig/SourceFileAttributeParser.scala | 3 +-- test/files/jvm/serialization-new.check | 2 +- test/files/jvm/serialization.check | 2 +- test/files/neg/t9684.check | 4 ++-- test/files/run/bitsets.check | 2 +- 20 files changed, 75 insertions(+), 50 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 447ee209b593..fedacdac4185 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -828,8 +828,10 @@ object LocalOptImpls { /** * Replace jumps to a sequence of GOTO instructions by a jump to the final destination. * + * {{{ * Jump l; [any ops]; l: GOTO m; [any ops]; m: GOTO n; [any ops]; n: NotGOTO; [...] * => Jump n; [rest unchanged] + * }}} * * If there's a loop of GOTOs, the initial jump is replaced by one of the labels in the loop. */ @@ -848,8 +850,10 @@ object LocalOptImpls { /** * Eliminates unnecessary jump instructions * + * {{{ * Jump l; [nops]; l: [...] * => POP*; [nops]; l: [...] + * }}} * * Introduces 0, 1 or 2 POP instructions, depending on the number of values consumed by the Jump. */ @@ -865,8 +869,10 @@ object LocalOptImpls { * If the "else" part of a conditional branch is a simple GOTO, negates the conditional branch * and eliminates the GOTO. * + * {{{ * CondJump l; [nops, no jump targets]; GOTO m; [nops]; l: [...] * => NegatedCondJump m; [nops, no jump targets]; [nops]; l: [...] + * }}} * * Note that no jump targets are allowed in the first [nops] section. Otherwise, there could * be some other jump to the GOTO, and eliminating it would change behavior. @@ -893,8 +899,10 @@ object LocalOptImpls { /** * Inlines xRETURN and ATHROW * + * {{{ * GOTO l; [any ops]; l: xRETURN/ATHROW * => xRETURN/ATHROW; [any ops]; l: xRETURN/ATHROW + * }}} * * inlining is only done if the GOTO instruction is not part of a try block, otherwise the * rewrite might change the behavior. For xRETURN, the reason is that return instructions may throw diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 5caf7e41bf11..ed1675e4cc18 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -64,7 +64,7 @@ abstract class Plugin { true } - @deprecated("use Plugin#init instead", since="2.11") + @deprecated("use Plugin#init instead", since="2.11.0") def processOptions(options: List[String], error: String => Unit): Unit = { if (!options.isEmpty) error(s"Error: $name takes no options") } diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index cef2fc4bbf50..f286cfe2467f 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -52,7 +52,7 @@ trait ClassPath { */ def asClassPathString: String = ClassPath.join(asClassPathStrings: _*) // for compatibility purposes - @deprecated("Use asClassPathString instead of this one", "2.11.5") + @deprecated("use asClassPathString instead of this one", "2.11.5") def asClasspathString: String = asClassPathString /** The whole sourcepath in the form of one String. @@ -128,10 +128,10 @@ object ClassPath { resources.asScala.filter(_.getProtocol == "jar").toList } - @deprecated("Shim for sbt's compiler interface", since = "2.12") + @deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class ClassPathContext - @deprecated("Shim for sbt's compiler interface", since = "2.12") + @deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class JavaContext } @@ -141,11 +141,11 @@ trait ClassRepresentation { def source: Option[AbstractFile] } -@deprecated("Shim for sbt's compiler interface", since = "2.12") +@deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class DirectoryClassPath -@deprecated("Shim for sbt's compiler interface", since = "2.12") +@deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class MergedClassPath -@deprecated("Shim for sbt's compiler interface", since = "2.12") +@deprecated("shim for sbt's compiler interface", since = "2.12.0") sealed abstract class JavaClassPath diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala index 960e452cdfed..93994d80bf01 100644 --- a/src/library/scala/collection/JavaConversions.scala +++ b/src/library/scala/collection/JavaConversions.scala @@ -56,5 +56,5 @@ import convert._ * @author Martin Odersky * @since 2.8 */ -@deprecated("Use JavaConverters", since="2.12") +@deprecated("use JavaConverters", since="2.12.0") object JavaConversions extends WrapAsScala with WrapAsJava diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala index e45c1666a5ed..e3a064b79dca 100644 --- a/src/library/scala/collection/convert/WrapAsJava.scala +++ b/src/library/scala/collection/convert/WrapAsJava.scala @@ -13,7 +13,7 @@ package convert import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } import scala.language.implicitConversions -@deprecated("Use JavaConverters or consider ToJavaImplicits", since="2.12") +@deprecated("use JavaConverters or consider ToJavaImplicits", since="2.12.0") trait WrapAsJava extends LowPriorityWrapAsJava { // provide higher-priority implicits with names that don't exist in JavaConverters for the case // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions @@ -286,5 +286,5 @@ private[convert] trait LowPriorityWrapAsJava { } } -@deprecated("Use JavaConverters or consider ImplicitConversionsToJava", since="2.12") +@deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0") object WrapAsJava extends WrapAsJava diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala index 514490e34856..fbaafde79875 100644 --- a/src/library/scala/collection/convert/WrapAsScala.scala +++ b/src/library/scala/collection/convert/WrapAsScala.scala @@ -13,7 +13,7 @@ package convert import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } import scala.language.implicitConversions -@deprecated("Use JavaConverters or consider ToScalaImplicits", since="2.12") +@deprecated("use JavaConverters or consider ToScalaImplicits", since="2.12.0") trait WrapAsScala extends LowPriorityWrapAsScala { // provide higher-priority implicits with names that don't exist in JavaConverters for the case // when importing both JavaConverters._ and JavaConversions._. otherwise implicit conversions @@ -225,5 +225,5 @@ private[convert] trait LowPriorityWrapAsScala { } } -@deprecated("Use JavaConverters or consider ImplicitConversionsToScala", since="2.12") +@deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0") object WrapAsScala extends WrapAsScala diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala index fe1951b6cf70..810d112cd5a5 100644 --- a/src/library/scala/collection/convert/package.scala +++ b/src/library/scala/collection/convert/package.scala @@ -10,17 +10,17 @@ package scala package collection package object convert { - @deprecated("use JavaConverters", since="2.12") + @deprecated("use JavaConverters", since="2.12.0") val decorateAsJava = new DecorateAsJava { } - @deprecated("use JavaConverters", since="2.12") + @deprecated("use JavaConverters", since="2.12.0") val decorateAsScala = new DecorateAsScala { } - @deprecated("use JavaConverters", since="2.12") + @deprecated("use JavaConverters", since="2.12.0") val decorateAll = JavaConverters - @deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12") + @deprecated("use JavaConverters or consider ImplicitConversionsToJava", since="2.12.0") val wrapAsJava = new WrapAsJava { } - @deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12") + @deprecated("use JavaConverters or consider ImplicitConversionsToScala", since="2.12.0") val wrapAsScala = new WrapAsScala { } - @deprecated("use JavaConverters or consider ImplicitConversions", since="2.12") + @deprecated("use JavaConverters or consider ImplicitConversions", since="2.12.0") val wrapAll = new WrapAsJava with WrapAsScala { } } diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala index feef694e0199..e74ee65dda88 100644 --- a/src/library/scala/collection/mutable/BitSet.scala +++ b/src/library/scala/collection/mutable/BitSet.scala @@ -164,7 +164,7 @@ class BitSet(protected final var elems: Array[Long]) extends AbstractSet[Int] */ @deprecated("If this BitSet contains a value that is 128 or greater, the result of this method is an 'immutable' " + "BitSet that shares state with this mutable BitSet. Thus, if the mutable BitSet is modified, it will violate the " + - "immutability of the result.", "2.11.6") + "immutability of the result.", "2.12.0") def toImmutable = immutable.BitSet.fromBitMaskNoCopy(elems) override def clone(): BitSet = { diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala index 7338dffb8dd0..60f085755007 100644 --- a/src/library/scala/deprecated.scala +++ b/src/library/scala/deprecated.scala @@ -29,26 +29,30 @@ import scala.annotation.meta._ * {{{ * oldMethod(1) * oldMethod(2) - * aDeprecatedMethodFromBarLibrary(3, 4) + * aDeprecatedMethodFromLibraryBar(3, 4) * - * // warning: there were two deprecation warnings (since FooLib 12.0) * // warning: there was one deprecation warning (since BarLib 3.2) + * // warning: there were two deprecation warnings (since FooLib 12.0) * // warning: there were three deprecation warnings in total; re-run with -deprecation for details * }}} * + * '''`@deprecated` in the Scala language and its standard library'''
+ * * A deprecated element of the Scala language or a definition in the Scala standard library will * be preserved or at least another major version. * - * This means that an element deprecated since 2.12 will be preserved in 2.13 and will very likely - * not be part of 2.14, though sometimes a deprecated element might be kept for more than a major + * This means that an element deprecated since 2.12 will be preserved in 2.13, but will very likely + * not be part of 2.14. Sometimes a deprecated element might be kept for more than a major * release to ease migration and upgrades from older Scala versions.
* Developers should not rely on this. * - * @note The Scala team has decided to enact a special deprecation policy for the 2.12 release:
+ * '''Special deprecation policy for Scala 2.12'''
+ * The Scala team has decided to enact a special deprecation policy for the 2.12 release:
* - * As an upgrade from Scala 2.11 to Scala 2.12 also requires upgrading from Java 6 to Java 8, - * no deprecated elements will be removed in this release to ease migration and upgrades - * from older Scala versions. + * As an upgrade from Scala 2.11 to Scala 2.12 also requires upgrading from Java 6 to Java 8, + * no deprecated elements will be removed in this release to ease migration and upgrades + * from older Scala versions. This means that elements deprecated since 2.11 (or earlier) + * will not be removed in Scala 2.12. * * @see The official documentation on [[http://www.scala-lang.org/news/2.11.0/#binary-compatibility binary compatibility]]. * @param message the message to print during compilation if the definition is accessed diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala index b85d07b0bdd5..bd5daf5de015 100644 --- a/src/library/scala/deprecatedInheritance.scala +++ b/src/library/scala/deprecatedInheritance.scala @@ -15,15 +15,21 @@ package scala * * No warnings are generated if the subclass is in the same compilation unit. * + * Library authors should state the library's deprecation policy in their documentation to give + * developers guidance on when a type annotated with `@deprecatedInheritance` will be `final`ized. + * + * Library authors should prepend the name of their library to the version number to help + * developers distinguish deprecations coming from different libraries: + * * {{{ - * @deprecatedInheritance("this class will be made final", "2.12") + * @deprecatedInheritance("this class will be made final", "FooLib 12.0") * class Foo * }}} * * {{{ * val foo = new Foo // no deprecation warning * class Bar extends Foo - * // warning: inheritance from class Foo is deprecated (since 2.12): this class will be made final + * // warning: inheritance from class Foo is deprecated (since FooLib 12.0): this class will be made final * // class Bar extends Foo * // ^ * }}} diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala index e2322f03639b..f8c6bd32ad77 100644 --- a/src/library/scala/deprecatedName.scala +++ b/src/library/scala/deprecatedName.scala @@ -15,14 +15,19 @@ import scala.annotation.meta._ * * Using this name in a named argument generates a deprecation warning. * - * For instance, evaluating the code below in the Scala interpreter (with `-deprecation`) + * Library authors should state the library's deprecation policy in their documentation to give + * developers guidance on how long a deprecated name will be preserved. + * + * Library authors should prepend the name of their library to the version number to help + * developers distinguish deprecations coming from different libraries: + * * {{{ - * def inc(x: Int, @deprecatedName('y, "2.12") n: Int): Int = x + n + * def inc(x: Int, @deprecatedName('y, "FooLib 12.0") n: Int): Int = x + n * inc(1, y = 2) * }}} * will produce the following warning: * {{{ - * warning: the parameter name y is deprecated (since 2.12): use n instead + * warning: the parameter name y is deprecated (since FooLib 12.0): use n instead * inc(1, y = 2) * ^ * }}} diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala index ee887db220c6..46639986c02f 100644 --- a/src/library/scala/deprecatedOverriding.scala +++ b/src/library/scala/deprecatedOverriding.scala @@ -12,9 +12,15 @@ package scala * * Overriding such a member in a sub-class then generates a warning. * + * Library authors should state the library's deprecation policy in their documentation to give + * developers guidance on when a method annotated with `@deprecatedOverriding` will be `final`ized. + * + * Library authors should prepend the name of their library to the version number to help + * developers distinguish deprecations coming from different libraries: + * * {{{ * class Foo { - * @deprecatedOverriding("this method will be made final", "2.12") + * @deprecatedOverriding("this method will be made final", "FooLib 12.0") * def add(x: Int, y: Int) = x + y * } * }}} @@ -24,7 +30,7 @@ package scala * class Baz extends Foo { * override def add(x: Int, y: Int) = x - y * } - * // warning: overriding method add in class Foo is deprecated (since 2.12): this method will be made final + * // warning: overriding method add in class Foo is deprecated (since FooLib 12.0): this method will be made final * // override def add(x: Int, y: Int) = x - y * // ^ * }}} diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala index 2d38c9d4a0fa..16d18d7d6df0 100644 --- a/src/library/scala/util/Random.scala +++ b/src/library/scala/util/Random.scala @@ -121,9 +121,6 @@ class Random(val self: java.util.Random) extends AnyRef with Serializable { (bf(xs) ++= buf).result() } - @deprecated("Preserved for backwards binary compatibility. To remove in 2.12.x.", "2.11.6") - final def `scala$util$Random$$isAlphaNum$1`(c: Char) = (c >= 'A' && c <= 'Z') || (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9') - /** Returns a Stream of pseudorandomly chosen alphanumeric characters, * equally chosen from A-Z, a-z, and 0-9. * diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index ea6ab6aad5d4..0dd96b26161c 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -45,8 +45,8 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def this(in0: BufferedReader, out: JPrintWriter) = this(Some(in0), out) def this() = this(None, new JPrintWriter(Console.out, true)) - @deprecated("Use `intp` instead.", "2.9.0") def interpreter = intp - @deprecated("Use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i + @deprecated("use `intp` instead.", "2.9.0") def interpreter = intp + @deprecated("use `intp` instead.", "2.9.0") def interpreter_= (i: Interpreter): Unit = intp = i var in: InteractiveReader = _ // the input stream from which commands come var settings: Settings = _ @@ -73,7 +73,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) def history = in.history // classpath entries added via :cp - @deprecated("Use reset, replay or require to update class path", since = "2.11") + @deprecated("use reset, replay or require to update class path", since = "2.11.0") var addedClasspath: String = "" /** A reverse list of commands to replay if the user requests a :replay */ @@ -594,7 +594,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) else File(filename).printlnAll(replayCommands: _*) ) - @deprecated("Use reset, replay or require to update class path", since = "2.11") + @deprecated("use reset, replay or require to update class path", since = "2.11.0") def addClasspath(arg: String): Unit = { val f = File(arg).normalize if (f.exists) { @@ -1000,7 +1000,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) } } - @deprecated("Use `process` instead", "2.9.0") + @deprecated("use `process` instead", "2.9.0") def main(settings: Settings): Unit = process(settings) //used by sbt } diff --git a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala index dd17c46f79fb..00d86adc2924 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/Rules.scala @@ -79,7 +79,7 @@ trait Rules { /** A factory for rules that apply to a particular context. * - * @requires S the context to which rules apply. + * @tparam S the context to which rules apply. * * @author Andrew Foggin * diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala index fc5a75c046c7..0595234addad 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala @@ -22,7 +22,6 @@ object SourceFileAttributeParser extends ByteCodeReader { } * * Contains only file index in ConstantPool, first two fields are already treated - * by {@link scalax.rules.scalasig.ClassFile.attribute#attribute} + * by {@link scalax.rules.scalasig.ClassFileParser#attribute} */ case class SourceFileInfo(sourceFileIndex: Int) - diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index 5b8a08da82e0..964c68e52819 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -1,5 +1,5 @@ warning: there were two deprecation warnings (since 2.11.0) -warning: there was one deprecation warning (since 2.11.6) +warning: there was one deprecation warning (since 2.12.0) warning: there were three deprecation warnings in total; re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index 5b8a08da82e0..964c68e52819 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -1,5 +1,5 @@ warning: there were two deprecation warnings (since 2.11.0) -warning: there was one deprecation warning (since 2.11.6) +warning: there was one deprecation warning (since 2.12.0) warning: there were three deprecation warnings in total; re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] diff --git a/test/files/neg/t9684.check b/test/files/neg/t9684.check index ab36479a472a..bb5669733d8f 100644 --- a/test/files/neg/t9684.check +++ b/test/files/neg/t9684.check @@ -1,7 +1,7 @@ -t9684.scala:6: warning: object JavaConversions in package collection is deprecated (since 2.12): Use JavaConverters +t9684.scala:6: warning: object JavaConversions in package collection is deprecated (since 2.12.0): use JavaConverters null.asInstanceOf[java.util.List[Int]] : Buffer[Int] ^ -t9684.scala:8: warning: object JavaConversions in package collection is deprecated (since 2.12): Use JavaConverters +t9684.scala:8: warning: object JavaConversions in package collection is deprecated (since 2.12.0): use JavaConverters null.asInstanceOf[Iterable[Int]] : java.util.Collection[Int] ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/run/bitsets.check b/test/files/run/bitsets.check index 770d9b5e3ffe..89e51f9a7834 100644 --- a/test/files/run/bitsets.check +++ b/test/files/run/bitsets.check @@ -1,4 +1,4 @@ -warning: there were three deprecation warnings (since 2.11.6); re-run with -deprecation for details +warning: there were three deprecation warnings (since 2.12.0); re-run with -deprecation for details ms0 = BitSet(2) ms1 = BitSet(2) ms2 = BitSet(2) From afc730bb53a9dd44a592d30c222a28dace86c1ea Mon Sep 17 00:00:00 2001 From: Simon Ochsenreither Date: Tue, 26 Jul 2016 10:59:01 +0200 Subject: [PATCH 0198/2793] Deprecate values that had to be public in older versions... ... so we can make them private later. --- src/library/scala/Predef.scala | 12 ++++++++++-- test/files/run/array-charSeq.check | 1 + 2 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 8de9754b50c2..5e82062b447c 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -337,8 +337,16 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { @deprecated("use Throwable#getStackTrace", "2.11.0") def getStackTraceString = self.getStackTrace().mkString("", EOL, EOL) } + // Sadly we have to do `@deprecatedName(null, "2.12.0")` because + // `@deprecatedName(since="2.12.0")` incurs a warning about + // Usage of named or default arguments transformed this annotation constructor call into a block. + // The corresponding AnnotationInfo will contain references to local values and default getters + // instead of the actual argument trees + // and `@deprecatedName(Symbol(""), "2.12.0")` crashes scalac with + // scala.reflect.internal.Symbols$CyclicReference: illegal cyclic reference involving object Symbol + // in run/repl-no-imports-no-predef-power.scala. /** @group implicit-classes-char */ - implicit final class SeqCharSequence(val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { + implicit final class SeqCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __sequenceOfChars: scala.collection.IndexedSeq[Char]) extends CharSequence { def length: Int = __sequenceOfChars.length def charAt(index: Int): Char = __sequenceOfChars(index) def subSequence(start: Int, end: Int): CharSequence = new SeqCharSequence(__sequenceOfChars.slice(start, end)) @@ -346,7 +354,7 @@ object Predef extends LowPriorityImplicits with DeprecatedPredef { } /** @group implicit-classes-char */ - implicit final class ArrayCharSequence(val __arrayOfChars: Array[Char]) extends CharSequence { + implicit final class ArrayCharSequence(@deprecated("will be made private", "2.12.0") @deprecatedName(null, "2.12.0") val __arrayOfChars: Array[Char]) extends CharSequence { def length: Int = __arrayOfChars.length def charAt(index: Int): Char = __arrayOfChars(index) def subSequence(start: Int, end: Int): CharSequence = new runtime.ArrayCharSequence(__arrayOfChars, start, end) diff --git a/test/files/run/array-charSeq.check b/test/files/run/array-charSeq.check index f1f374f63ec5..3ccf493ceeb3 100644 --- a/test/files/run/array-charSeq.check +++ b/test/files/run/array-charSeq.check @@ -1,3 +1,4 @@ +warning: there were two deprecation warnings (since 2.12.0); re-run with -deprecation for details [check 'abcdefghi'] len = 9 sub(0, 9) == 'abcdefghi' From 0022e59b8656a2fbb44f97498e67b9a35de3f29f Mon Sep 17 00:00:00 2001 From: Steven Mitchell Date: Tue, 2 Aug 2016 15:29:02 -0700 Subject: [PATCH 0199/2793] Typo fix in scala.sys.process.ProcessBuilder. --- src/library/scala/sys/process/ProcessBuilder.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index 35f3f4d7a5f4..9713b712fc67 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -15,8 +15,8 @@ import ProcessBuilder._ /** Represents a sequence of one or more external processes that can be * executed. A `ProcessBuilder` can be a single external process, or a - * combination of other `ProcessBuilder`. One can control where a - * the output of an external process will go to, and where its input will come + * combination of other `ProcessBuilder`. One can control where the + * output of an external process will go to, and where its input will come * from, or leave that decision to whoever starts it. * * One creates a `ProcessBuilder` through factories provided in From 498a2ce7397b909c0bebf36affeb1ee5a1c03d6a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 4 Aug 2016 01:58:33 -0700 Subject: [PATCH 0200/2793] SD-193 Lock down lambda deserialization The old design allowed a forged `SerializedLambda` to be deserialized into a lambda that could call any private method in the host class. This commit passes through the list of all lambda impl methods to the bootstrap method and verifies that you are deserializing one of these. The new test case shows that a forged lambda can no longer call the private method, and that the new encoding is okay with a large number of lambdas in a file. We already have method handle constants in the constant pool to support the invokedynamic through LambdaMetafactory, so the only additional cost will be referring to these in the boostrap args for `LambdaDeserialize`, 2 bytes per lambda. I checked this with an example: https://gist.github.com/retronym/e343d211f7536d06f1fef4b499a0a177 Fixes SD-193 --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 3 +- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 8 --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 11 ++- .../tools/nsc/backend/jvm/CoreBTypes.scala | 16 ++++- .../tools/nsc/backend/jvm/GenBCode.scala | 3 + .../backend/jvm/analysis/BackendUtils.scala | 17 ++--- .../tools/nsc/backend/jvm/opt/Inliner.scala | 7 +- .../scala/runtime/LambdaDeserialize.java | 25 +++++-- .../scala/runtime/LambdaDeserializer.scala | 15 +++- .../run/lambda-serialization-security.scala | 47 ++++++++++++ test/files/run/lambda-serialization.scala | 71 +++++++++++-------- .../scala/runtime/LambdaDeserializerTest.java | 4 +- 12 files changed, 164 insertions(+), 63 deletions(-) create mode 100644 test/files/run/lambda-serialization-security.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index d5c4b5e20161..6f9682f4348d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -14,6 +14,7 @@ import scala.reflect.internal.Flags import scala.tools.asm import GenBCode._ import BackendReporting._ +import scala.collection.mutable import scala.tools.asm.Opcodes import scala.tools.asm.tree.{MethodInsnNode, MethodNode} import scala.tools.nsc.backend.jvm.BCodeHelpers.{InvokeStyle, TestOp} @@ -1349,7 +1350,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { val markers = if (addScalaSerializableMarker) classBTypeFromSymbol(definitions.SerializableClass).toASMType :: Nil else Nil visitInvokeDynamicInsnLMF(bc.jmethod, sam.name.toString, invokedType, samMethodType, implMethodHandle, constrainedType, isSerializable, markers) if (isSerializable) - indyLambdaHosts += cnode.name + addIndyLambdaImplMethod(cnode.name, implMethodHandle :: Nil) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 1bff8519eca3..d4d532f4df18 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -112,14 +112,6 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { gen(cd.impl) - val shouldAddLambdaDeserialize = ( - settings.target.value == "jvm-1.8" - && settings.Ydelambdafy.value == "method" - && indyLambdaHosts.contains(cnode.name)) - - if (shouldAddLambdaDeserialize) - backendUtils.addLambdaDeserialize(cnode) - cnode.visitAttribute(thisBType.inlineInfoAttribute.get) if (AsmUtils.traceClassEnabled && cnode.name.contains(AsmUtils.traceClassPattern)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 7b2686e7a9be..0845e440d725 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -122,7 +122,16 @@ abstract class BTypes { * inlining: when inlining an indyLambda instruction into a class, we need to make sure the class * has the method. */ - val indyLambdaHosts: mutable.Set[InternalName] = recordPerRunCache(mutable.Set.empty) + val indyLambdaImplMethods: mutable.AnyRefMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunCache(mutable.AnyRefMap()) + def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = { + indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) ++= handle + } + def getIndyLambdaImplMethods(hostClass: InternalName): List[asm.Handle] = { + indyLambdaImplMethods.getOrNull(hostClass) match { + case null => Nil + case xs => xs.toList.distinct + } + } /** * Obtain the BType for a type descriptor or internal name. For class descriptors, the ClassBType diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index c2010d282828..1dbb18722ffe 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -283,7 +283,21 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { List( coreBTypes.jliMethodHandlesLookupRef, coreBTypes.StringRef, - coreBTypes.jliMethodTypeRef + coreBTypes.jliMethodTypeRef, + ArrayBType(jliMethodHandleRef) + ), + coreBTypes.jliCallSiteRef + ).descriptor, + /* itf = */ coreBTypes.srLambdaDeserialize.isInterface.get) + lazy val lambdaDeserializeAddTargets = + new scala.tools.asm.Handle(scala.tools.asm.Opcodes.H_INVOKESTATIC, + coreBTypes.srLambdaDeserialize.internalName, "bootstrapAddTargets", + MethodBType( + List( + coreBTypes.jliMethodHandlesLookupRef, + coreBTypes.StringRef, + coreBTypes.jliMethodTypeRef, + ArrayBType(coreBTypes.jliMethodHandleRef) ), coreBTypes.jliCallSiteRef ).descriptor, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 584b11d4edac..0a54767f7664 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -266,6 +266,9 @@ abstract class GenBCode extends BCodeSyncAndTry { try { localOptimizations(item.plain) setInnerClasses(item.plain) + val lambdaImplMethods = getIndyLambdaImplMethods(item.plain.name) + if (lambdaImplMethods.nonEmpty) + backendUtils.addLambdaDeserialize(item.plain, lambdaImplMethods) setInnerClasses(item.mirror) setInnerClasses(item.bean) addToQ3(item) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 83615abc31bb..d85d85003df0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -76,7 +76,7 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { * host a static field in the enclosing class. This allows us to add this method to interfaces * that define lambdas in default methods. */ - def addLambdaDeserialize(classNode: ClassNode): Unit = { + def addLambdaDeserialize(classNode: ClassNode, implMethods: List[Handle]): Unit = { val cw = classNode // Make sure to reference the ClassBTypes of all types that are used in the code generated @@ -87,12 +87,13 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { val nilLookupDesc = MethodBType(Nil, jliMethodHandlesLookupRef).descriptor val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor + val addTargetMethodsObjDesc = MethodBType(ObjectRef :: Nil, UNIT).descriptor { val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) mv.visitCode() mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, lambdaDeserializeBootstrapHandle) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, lambdaDeserializeBootstrapHandle, implMethods: _*) mv.visitInsn(ARETURN) mv.visitEnd() } @@ -104,16 +105,16 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { * a boolean indicating if the instruction list contains an instantiation of a serializable SAM * type. */ - def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], Boolean) = { + def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], List[Handle]) = { val javaLabelMap = labelMap.asJava val result = new InsnList var map = Map.empty[AbstractInsnNode, AbstractInsnNode] - var hasSerializableClosureInstantiation = false + var inlinedTargetHandles = mutable.ListBuffer[Handle]() for (ins <- methodNode.instructions.iterator.asScala) { - if (!hasSerializableClosureInstantiation) ins match { + ins match { case callGraph.LambdaMetaFactoryCall(indy, _, _, _) => indy.bsmArgs match { - case Array(_, _, _, flags: Integer, xs@_*) if (flags.intValue & LambdaMetafactory.FLAG_SERIALIZABLE) != 0 => - hasSerializableClosureInstantiation = true + case Array(_, targetHandle: Handle, _, flags: Integer, xs@_*) if (flags.intValue & LambdaMetafactory.FLAG_SERIALIZABLE) != 0 => + inlinedTargetHandles += targetHandle case _ => } case _ => @@ -124,7 +125,7 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { map += ((ins, cloned)) } } - (result, map, hasSerializableClosureInstantiation) + (result, map, inlinedTargetHandles.toList) } def getBoxedUnit: FieldInsnNode = new FieldInsnNode(GETSTATIC, srBoxedUnitRef.internalName, "UNIT", srBoxedUnitRef.descriptor) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 9c5a1a9f980b..a7916f9c2483 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -277,7 +277,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { } case _ => false } - val (clonedInstructions, instructionMap, hasSerializableClosureInstantiation) = cloneInstructions(callee, labelsMap, keepLineNumbers = sameSourceFile) + val (clonedInstructions, instructionMap, targetHandles) = cloneInstructions(callee, labelsMap, keepLineNumbers = sameSourceFile) // local vars in the callee are shifted by the number of locals at the callsite val localVarShift = callsiteMethod.maxLocals @@ -405,10 +405,7 @@ class Inliner[BT <: BTypes](val btypes: BT) { callsiteMethod.maxStack = math.max(callsiteMethod.maxStack, math.max(stackHeightAtNullCheck, maxStackOfInlinedCode)) - if (hasSerializableClosureInstantiation && !indyLambdaHosts(callsiteClass.internalName)) { - indyLambdaHosts += callsiteClass.internalName - addLambdaDeserialize(byteCodeRepository.classNode(callsiteClass.internalName).get) - } + addIndyLambdaImplMethod(callsiteClass.internalName, targetHandles) callGraph.addIfMissing(callee, calleeDeclarationClass) diff --git a/src/library/scala/runtime/LambdaDeserialize.java b/src/library/scala/runtime/LambdaDeserialize.java index e239debf25bc..a3df868517f2 100644 --- a/src/library/scala/runtime/LambdaDeserialize.java +++ b/src/library/scala/runtime/LambdaDeserialize.java @@ -2,28 +2,41 @@ import java.lang.invoke.*; +import java.util.ArrayList; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; public final class LambdaDeserialize { + public static final MethodType DESERIALIZE_LAMBDA_MT = MethodType.fromMethodDescriptorString("(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", LambdaDeserialize.class.getClassLoader()); + public static final MethodType ADD_TARGET_METHODS_MT = MethodType.fromMethodDescriptorString("([Ljava/lang/invoke/MethodHandle;)V", LambdaDeserialize.class.getClassLoader()); private MethodHandles.Lookup lookup; private final HashMap cache = new HashMap<>(); private final LambdaDeserializer$ l = LambdaDeserializer$.MODULE$; + private final HashMap targetMethodMap; - private LambdaDeserialize(MethodHandles.Lookup lookup) { + private LambdaDeserialize(MethodHandles.Lookup lookup, MethodHandle[] targetMethods) { this.lookup = lookup; + targetMethodMap = new HashMap<>(targetMethods.length); + for (MethodHandle targetMethod : targetMethods) { + MethodHandleInfo info = lookup.revealDirect(targetMethod); + String key = nameAndDescriptorKey(info.getName(), info.getMethodType().toMethodDescriptorString()); + targetMethodMap.put(key, targetMethod); + } } public Object deserializeLambda(SerializedLambda serialized) { - return l.deserializeLambda(lookup, cache, serialized); + return l.deserializeLambda(lookup, cache, targetMethodMap, serialized); } public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName, - MethodType invokedType) throws Throwable { - MethodType type = MethodType.fromMethodDescriptorString("(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", lookup.getClass().getClassLoader()); - MethodHandle deserializeLambda = lookup.findVirtual(LambdaDeserialize.class, "deserializeLambda", type); - MethodHandle exact = deserializeLambda.bindTo(new LambdaDeserialize(lookup)).asType(invokedType); + MethodType invokedType, MethodHandle... targetMethods) throws Throwable { + MethodHandle deserializeLambda = lookup.findVirtual(LambdaDeserialize.class, "deserializeLambda", DESERIALIZE_LAMBDA_MT); + MethodHandle exact = deserializeLambda.bindTo(new LambdaDeserialize(lookup, targetMethods)).asType(invokedType); return new ConstantCallSite(exact); } + public static String nameAndDescriptorKey(String name, String descriptor) { + return name + " " + descriptor; + } } diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala index ad7d12ba5d75..eb168fe445e9 100644 --- a/src/library/scala/runtime/LambdaDeserializer.scala +++ b/src/library/scala/runtime/LambdaDeserializer.scala @@ -31,10 +31,12 @@ object LambdaDeserializer { * member of the anonymous class created by `LambdaMetaFactory`. * @return An instance of the functional interface */ - def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { + def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], + targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { def slashDot(name: String) = name.replaceAll("/", ".") val loader = lookup.lookupClass().getClassLoader val implClass = loader.loadClass(slashDot(serialized.getImplClass)) + val key = LambdaDeserialize.nameAndDescriptorKey(serialized.getImplMethodName, serialized.getImplMethodSignature) def makeCallSite: CallSite = { import serialized._ @@ -69,7 +71,15 @@ object LambdaDeserializer { // Lookup the implementation method val implMethod: MethodHandle = try { - findMember(lookup, getImplMethodKind, implClass, getImplMethodName, implMethodSig) + if (targetMethodMap != null) { + if (targetMethodMap.containsKey(key)) { + targetMethodMap.get(key) + } else { + throw new IllegalArgumentException("Illegal lambda deserialization") + } + } else { + findMember(lookup, getImplMethodKind, implClass, getImplMethodName, implMethodSig) + } } catch { case e: ReflectiveOperationException => throw new IllegalArgumentException("Illegal lambda deserialization", e) } @@ -91,7 +101,6 @@ object LambdaDeserializer { ) } - val key = serialized.getImplMethodName + " : " + serialized.getImplMethodSignature val factory: MethodHandle = if (cache == null) { makeCallSite.getTarget } else cache.get(key) match { diff --git a/test/files/run/lambda-serialization-security.scala b/test/files/run/lambda-serialization-security.scala new file mode 100644 index 000000000000..08e235b1cb9b --- /dev/null +++ b/test/files/run/lambda-serialization-security.scala @@ -0,0 +1,47 @@ +import java.io.{ByteArrayInputStream, ObjectInputStream, ObjectOutputStream, ByteArrayOutputStream} + +trait IntToString extends java.io.Serializable { def apply(i: Int): String } + +object Test { + def main(args: Array[String]): Unit = { + roundTrip() + roundTripIndySam() + } + + def roundTrip(): Unit = { + val c = new Capture("Capture") + val lambda = (p: Param) => ("a", p, c) + val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[Object => Any] + val p = new Param + assert(reconstituted1.apply(p) == ("a", p, c)) + val reconstituted2 = serializeDeserialize(lambda).asInstanceOf[Object => Any] + assert(reconstituted1.getClass == reconstituted2.getClass) + + val reconstituted3 = serializeDeserialize(reconstituted1) + assert(reconstituted3.apply(p) == ("a", p, c)) + + val specializedLambda = (p: Int) => List(p, c).length + assert(serializeDeserialize(specializedLambda).apply(42) == 2) + assert(serializeDeserialize(serializeDeserialize(specializedLambda)).apply(42) == 2) + } + + // lambda targeting a SAM, not a FunctionN (should behave the same way) + def roundTripIndySam(): Unit = { + val lambda: IntToString = (x: Int) => "yo!" * x + val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[IntToString] + val reconstituted2 = serializeDeserialize(reconstituted1).asInstanceOf[IntToString] + assert(reconstituted1.apply(2) == "yo!yo!") + assert(reconstituted1.getClass == reconstituted2.getClass) + } + + def serializeDeserialize[T <: AnyRef](obj: T) = { + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } +} + +case class Capture(s: String) extends Serializable +class Param diff --git a/test/files/run/lambda-serialization.scala b/test/files/run/lambda-serialization.scala index 08e235b1cb9b..78b4c5d58b47 100644 --- a/test/files/run/lambda-serialization.scala +++ b/test/files/run/lambda-serialization.scala @@ -1,37 +1,54 @@ -import java.io.{ByteArrayInputStream, ObjectInputStream, ObjectOutputStream, ByteArrayOutputStream} +import java.io.{ByteArrayInputStream, ByteArrayOutputStream, ObjectInputStream, ObjectOutputStream} +import java.lang.invoke.{MethodHandleInfo, SerializedLambda} + +import scala.tools.nsc.util + +class C extends java.io.Serializable { + val fs = List( + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => (), + () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => (), () => () ,() => (), () => (), () => (), () => (), () => () + ) + private def foo(): Unit = { + assert(false, "should not be called!!!") + } +} -trait IntToString extends java.io.Serializable { def apply(i: Int): String } +trait FakeSam { def apply(): Unit } object Test { def main(args: Array[String]): Unit = { - roundTrip() - roundTripIndySam() + allRealLambdasRoundTrip() + fakeLambdaFailsToDeserialize() } - def roundTrip(): Unit = { - val c = new Capture("Capture") - val lambda = (p: Param) => ("a", p, c) - val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[Object => Any] - val p = new Param - assert(reconstituted1.apply(p) == ("a", p, c)) - val reconstituted2 = serializeDeserialize(lambda).asInstanceOf[Object => Any] - assert(reconstituted1.getClass == reconstituted2.getClass) - - val reconstituted3 = serializeDeserialize(reconstituted1) - assert(reconstituted3.apply(p) == ("a", p, c)) - - val specializedLambda = (p: Int) => List(p, c).length - assert(serializeDeserialize(specializedLambda).apply(42) == 2) - assert(serializeDeserialize(serializeDeserialize(specializedLambda)).apply(42) == 2) + def allRealLambdasRoundTrip(): Unit = { + new C().fs.map(x => serializeDeserialize(x).apply()) } - // lambda targeting a SAM, not a FunctionN (should behave the same way) - def roundTripIndySam(): Unit = { - val lambda: IntToString = (x: Int) => "yo!" * x - val reconstituted1 = serializeDeserialize(lambda).asInstanceOf[IntToString] - val reconstituted2 = serializeDeserialize(reconstituted1).asInstanceOf[IntToString] - assert(reconstituted1.apply(2) == "yo!yo!") - assert(reconstituted1.getClass == reconstituted2.getClass) + def fakeLambdaFailsToDeserialize(): Unit = { + val fake = new SerializedLambda(classOf[C], classOf[FakeSam].getName, "apply", "()V", + MethodHandleInfo.REF_invokeVirtual, classOf[C].getName, "foo", "()V", "()V", Array(new C)) + try { + serializeDeserialize(fake).asInstanceOf[FakeSam].apply() + assert(false) + } catch { + case ex: Exception => + val stackTrace = util.stackTraceString(ex) + assert(stackTrace.contains("Illegal lambda deserialization"), stackTrace) + } } def serializeDeserialize[T <: AnyRef](obj: T) = { @@ -43,5 +60,3 @@ object Test { } } -case class Capture(s: String) extends Serializable -class Param diff --git a/test/junit/scala/runtime/LambdaDeserializerTest.java b/test/junit/scala/runtime/LambdaDeserializerTest.java index 069eb4aab6db..ba52e979ccf9 100644 --- a/test/junit/scala/runtime/LambdaDeserializerTest.java +++ b/test/junit/scala/runtime/LambdaDeserializerTest.java @@ -97,7 +97,7 @@ public void implMethodSignatureChanged() { private void checkIllegalAccess(SerializedLambda serialized) { try { - LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, serialized); + LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, null, serialized); throw new AssertionError(); } catch (IllegalArgumentException iae) { if (!iae.getMessage().contains("Illegal lambda deserialization")) { @@ -130,7 +130,7 @@ private A reconstitute(A f1) { @SuppressWarnings("unchecked") private A reconstitute(A f1, java.util.HashMap cache) { try { - return (A) LambdaDeserializer.deserializeLambda(LambdaHost.lookup(), cache, writeReplace(f1)); + return (A) LambdaDeserializer.deserializeLambda(LambdaHost.lookup(), cache, null, writeReplace(f1)); } catch (Exception e) { throw new RuntimeException(e); } From aab103eb999e2816c87c5010e7f7c79ed993fb90 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 3 Jun 2016 15:57:22 -0700 Subject: [PATCH 0201/2793] Do not add `@TraitSetter` -- not sure what it's for Also deprecate the TraitSetter annotation. --- src/compiler/scala/tools/nsc/transform/Mixin.scala | 3 --- src/library/scala/runtime/TraitSetter.java | 1 + src/reflect/scala/reflect/internal/Definitions.scala | 2 +- src/reflect/scala/reflect/runtime/JavaUniverseForce.scala | 1 - 4 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index d62b77dac26e..b5084cffe149 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -479,9 +479,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { tree case _ => - if (currentOwner.isTrait && sym.isSetter && !enteringPickler(sym.isDeferred)) { - sym.addAnnotation(TraitSetterAnnotationClass) - } tree } } diff --git a/src/library/scala/runtime/TraitSetter.java b/src/library/scala/runtime/TraitSetter.java index d9907c0ac052..d8dd8c6b04ba 100644 --- a/src/library/scala/runtime/TraitSetter.java +++ b/src/library/scala/runtime/TraitSetter.java @@ -2,5 +2,6 @@ /** A marker annotation to tag a setter of a mutable variable in a trait */ +@Deprecated public @interface TraitSetter { } \ No newline at end of file diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 0342daf11390..3dec73da5801 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -362,7 +362,6 @@ trait Definitions extends api.StandardDefinitions { // classes with special meanings lazy val StringAddClass = requiredClass[scala.runtime.StringAdd] lazy val ScalaNumberClass = requiredClass[scala.math.ScalaNumber] - lazy val TraitSetterAnnotationClass = requiredClass[scala.runtime.TraitSetter] lazy val DelayedInitClass = requiredClass[scala.DelayedInit] def delayedInitMethod = getMemberMethod(DelayedInitClass, nme.delayedInit) @@ -1470,6 +1469,7 @@ trait Definitions extends api.StandardDefinitions { lazy val StringAdd_+ = getMemberMethod(StringAddClass, nme.PLUS) // The given symbol represents either String.+ or StringAdd.+ + // TODO: this misses Predef.any2stringadd def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+ lazy val StringContext_f = getMemberMethod(StringContextClass, nme.f) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 0a90a141d3c0..caef5535b4d9 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -247,7 +247,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.SymbolModule definitions.StringAddClass definitions.ScalaNumberClass - definitions.TraitSetterAnnotationClass definitions.DelayedInitClass definitions.TypeConstraintClass definitions.SingletonClass From 44a22d7cc0c315b9feaee1d4cb5df7a66578b1ea Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Tue, 5 Jul 2016 16:58:21 +0200 Subject: [PATCH 0202/2793] SI-9068 Deprecate scala.collection.mutable.Stack --- src/library/scala/collection/mutable/Stack.scala | 1 + test/files/jvm/serialization-new.check | 4 ++-- test/files/run/collection-stacks.check | 4 +++- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 1a92f23b7b19..28d50af1f979 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -54,6 +54,7 @@ object Stack extends SeqFactory[Stack] { * @define mayNotTerminateInf * @define willNotTerminateInf */ +@deprecated("Stack is an inelegant and potentially poorly-performing wrapper around List. Use a List assigned to a var instead.", "2.12.0") class Stack[A] private (var elems: List[A]) extends AbstractSeq[A] with Seq[A] diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check index 964c68e52819..90da8a085de6 100644 --- a/test/files/jvm/serialization-new.check +++ b/test/files/jvm/serialization-new.check @@ -1,6 +1,6 @@ warning: there were two deprecation warnings (since 2.11.0) -warning: there was one deprecation warning (since 2.12.0) -warning: there were three deprecation warnings in total; re-run with -deprecation for details +warning: there were three deprecation warnings (since 2.12.0) +warning: there were 5 deprecation warnings in total; re-run with -deprecation for details a1 = Array[1,2,3] _a1 = Array[1,2,3] arrayEquals(a1, _a1): true diff --git a/test/files/run/collection-stacks.check b/test/files/run/collection-stacks.check index 826e3a87f835..cd87cc61e471 100644 --- a/test/files/run/collection-stacks.check +++ b/test/files/run/collection-stacks.check @@ -1,4 +1,6 @@ -warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details +warning: there was one deprecation warning (since 2.11.0) +warning: there were two deprecation warnings (since 2.12.0) +warning: there were three deprecation warnings in total; re-run with -deprecation for details 3-2-1: true 3-2-1: true apply From 131402fd5fe8c064ef5cfffbe568507cbdf37990 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 10 Aug 2016 21:20:24 +1000 Subject: [PATCH 0203/2793] Cleanups after code review - Remove unused references to "addTargetMethods" - Require that `targetMethodMap` is provided --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 7 +- .../tools/nsc/backend/jvm/CoreBTypes.scala | 13 ---- .../backend/jvm/analysis/BackendUtils.scala | 9 ++- .../scala/runtime/LambdaDeserialize.java | 6 +- .../scala/runtime/LambdaDeserializer.scala | 25 ++------ .../scala/runtime/LambdaDeserializerTest.java | 64 ++++++++++++++++--- 6 files changed, 68 insertions(+), 56 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 0845e440d725..bff58b426e64 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -124,12 +124,13 @@ abstract class BTypes { */ val indyLambdaImplMethods: mutable.AnyRefMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunCache(mutable.AnyRefMap()) def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = { - indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) ++= handle + if (handle.nonEmpty) + indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) ++= handle } - def getIndyLambdaImplMethods(hostClass: InternalName): List[asm.Handle] = { + def getIndyLambdaImplMethods(hostClass: InternalName): Iterable[asm.Handle] = { indyLambdaImplMethods.getOrNull(hostClass) match { case null => Nil - case xs => xs.toList.distinct + case xs => xs } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index 1dbb18722ffe..acb950929f1c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -289,19 +289,6 @@ class CoreBTypes[BTFS <: BTypesFromSymbols[_ <: Global]](val bTypes: BTFS) { coreBTypes.jliCallSiteRef ).descriptor, /* itf = */ coreBTypes.srLambdaDeserialize.isInterface.get) - lazy val lambdaDeserializeAddTargets = - new scala.tools.asm.Handle(scala.tools.asm.Opcodes.H_INVOKESTATIC, - coreBTypes.srLambdaDeserialize.internalName, "bootstrapAddTargets", - MethodBType( - List( - coreBTypes.jliMethodHandlesLookupRef, - coreBTypes.StringRef, - coreBTypes.jliMethodTypeRef, - ArrayBType(coreBTypes.jliMethodHandleRef) - ), - coreBTypes.jliCallSiteRef - ).descriptor, - /* itf = */ coreBTypes.srLambdaDeserialize.isInterface.get) } /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index d85d85003df0..e25b55e7ab36 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -76,7 +76,7 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { * host a static field in the enclosing class. This allows us to add this method to interfaces * that define lambdas in default methods. */ - def addLambdaDeserialize(classNode: ClassNode, implMethods: List[Handle]): Unit = { + def addLambdaDeserialize(classNode: ClassNode, implMethods: Iterable[Handle]): Unit = { val cw = classNode // Make sure to reference the ClassBTypes of all types that are used in the code generated @@ -87,13 +87,12 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { val nilLookupDesc = MethodBType(Nil, jliMethodHandlesLookupRef).descriptor val serlamObjDesc = MethodBType(jliSerializedLambdaRef :: Nil, ObjectRef).descriptor - val addTargetMethodsObjDesc = MethodBType(ObjectRef :: Nil, UNIT).descriptor { val mv = cw.visitMethod(ACC_PRIVATE + ACC_STATIC + ACC_SYNTHETIC, "$deserializeLambda$", serlamObjDesc, null, null) mv.visitCode() mv.visitVarInsn(ALOAD, 0) - mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, lambdaDeserializeBootstrapHandle, implMethods: _*) + mv.visitInvokeDynamicInsn("lambdaDeserialize", serlamObjDesc, lambdaDeserializeBootstrapHandle, implMethods.toArray: _*) mv.visitInsn(ARETURN) mv.visitEnd() } @@ -102,8 +101,8 @@ class BackendUtils[BT <: BTypes](val btypes: BT) { /** * Clone the instructions in `methodNode` into a new [[InsnList]], mapping labels according to * the `labelMap`. Returns the new instruction list and a map from old to new instructions, and - * a boolean indicating if the instruction list contains an instantiation of a serializable SAM - * type. + * a list of lambda implementation methods references by invokedynamic[LambdaMetafactory] for a + * serializable SAM types. */ def cloneInstructions(methodNode: MethodNode, labelMap: Map[LabelNode, LabelNode], keepLineNumbers: Boolean): (InsnList, Map[AbstractInsnNode, AbstractInsnNode], List[Handle]) = { val javaLabelMap = labelMap.asJava diff --git a/src/library/scala/runtime/LambdaDeserialize.java b/src/library/scala/runtime/LambdaDeserialize.java index a3df868517f2..4c5198cc483c 100644 --- a/src/library/scala/runtime/LambdaDeserialize.java +++ b/src/library/scala/runtime/LambdaDeserialize.java @@ -2,14 +2,10 @@ import java.lang.invoke.*; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; public final class LambdaDeserialize { public static final MethodType DESERIALIZE_LAMBDA_MT = MethodType.fromMethodDescriptorString("(Ljava/lang/invoke/SerializedLambda;)Ljava/lang/Object;", LambdaDeserialize.class.getClassLoader()); - public static final MethodType ADD_TARGET_METHODS_MT = MethodType.fromMethodDescriptorString("([Ljava/lang/invoke/MethodHandle;)V", LambdaDeserialize.class.getClassLoader()); private MethodHandles.Lookup lookup; private final HashMap cache = new HashMap<>(); @@ -37,6 +33,6 @@ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName return new ConstantCallSite(exact); } public static String nameAndDescriptorKey(String name, String descriptor) { - return name + " " + descriptor; + return name + descriptor; } } diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala index eb168fe445e9..e120f0e3085d 100644 --- a/src/library/scala/runtime/LambdaDeserializer.scala +++ b/src/library/scala/runtime/LambdaDeserializer.scala @@ -33,6 +33,7 @@ object LambdaDeserializer { */ def deserializeLambda(lookup: MethodHandles.Lookup, cache: java.util.Map[String, MethodHandle], targetMethodMap: java.util.Map[String, MethodHandle], serialized: SerializedLambda): AnyRef = { + assert(targetMethodMap != null) def slashDot(name: String) = name.replaceAll("/", ".") val loader = lookup.lookupClass().getClassLoader val implClass = loader.loadClass(slashDot(serialized.getImplClass)) @@ -71,14 +72,10 @@ object LambdaDeserializer { // Lookup the implementation method val implMethod: MethodHandle = try { - if (targetMethodMap != null) { - if (targetMethodMap.containsKey(key)) { - targetMethodMap.get(key) - } else { - throw new IllegalArgumentException("Illegal lambda deserialization") - } + if (targetMethodMap.containsKey(key)) { + targetMethodMap.get(key) } else { - findMember(lookup, getImplMethodKind, implClass, getImplMethodName, implMethodSig) + throw new IllegalArgumentException("Illegal lambda deserialization") } } catch { case e: ReflectiveOperationException => throw new IllegalArgumentException("Illegal lambda deserialization", e) @@ -124,18 +121,4 @@ object LambdaDeserializer { // is cleaner if we uniformly add a single marker, so I'm leaving it in place. "java.io.Serializable" } - - private def findMember(lookup: MethodHandles.Lookup, kind: Int, owner: Class[_], - name: String, signature: MethodType): MethodHandle = { - kind match { - case MethodHandleInfo.REF_invokeStatic => - lookup.findStatic(owner, name, signature) - case MethodHandleInfo.REF_newInvokeSpecial => - lookup.findConstructor(owner, signature) - case MethodHandleInfo.REF_invokeVirtual | MethodHandleInfo.REF_invokeInterface => - lookup.findVirtual(owner, name, signature) - case MethodHandleInfo.REF_invokeSpecial => - lookup.findSpecial(owner, name, signature, owner) - } - } } diff --git a/test/junit/scala/runtime/LambdaDeserializerTest.java b/test/junit/scala/runtime/LambdaDeserializerTest.java index ba52e979ccf9..3ed1ae1365ae 100644 --- a/test/junit/scala/runtime/LambdaDeserializerTest.java +++ b/test/junit/scala/runtime/LambdaDeserializerTest.java @@ -4,9 +4,7 @@ import org.junit.Test; import java.io.Serializable; -import java.lang.invoke.MethodHandle; -import java.lang.invoke.MethodHandles; -import java.lang.invoke.SerializedLambda; +import java.lang.invoke.*; import java.lang.reflect.Method; import java.util.Arrays; import java.util.HashMap; @@ -85,19 +83,20 @@ public void cachedStatic() { public void implMethodNameChanged() { F1 f1 = lambdaHost.lambdaBackedByStaticImplMethod(); SerializedLambda sl = writeReplace(f1); - checkIllegalAccess(copySerializedLambda(sl, sl.getImplMethodName() + "___", sl.getImplMethodSignature())); + checkIllegalAccess(sl, copySerializedLambda(sl, sl.getImplMethodName() + "___", sl.getImplMethodSignature())); } @Test public void implMethodSignatureChanged() { F1 f1 = lambdaHost.lambdaBackedByStaticImplMethod(); SerializedLambda sl = writeReplace(f1); - checkIllegalAccess(copySerializedLambda(sl, sl.getImplMethodName(), sl.getImplMethodSignature().replace("Boolean", "Integer"))); + checkIllegalAccess(sl, copySerializedLambda(sl, sl.getImplMethodName(), sl.getImplMethodSignature().replace("Boolean", "Integer"))); } - private void checkIllegalAccess(SerializedLambda serialized) { + private void checkIllegalAccess(SerializedLambda allowed, SerializedLambda requested) { try { - LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, null, serialized); + HashMap allowedMap = createAllowedMap(LambdaHost.lookup(), allowed); + LambdaDeserializer.deserializeLambda(MethodHandles.lookup(), null, allowedMap, requested); throw new AssertionError(); } catch (IllegalArgumentException iae) { if (!iae.getMessage().contains("Illegal lambda deserialization")) { @@ -123,6 +122,7 @@ private Class loadClass(String className) { throw new RuntimeException(e); } } + private A reconstitute(A f1) { return reconstitute(f1, null); } @@ -130,12 +130,56 @@ private A reconstitute(A f1) { @SuppressWarnings("unchecked") private A reconstitute(A f1, java.util.HashMap cache) { try { - return (A) LambdaDeserializer.deserializeLambda(LambdaHost.lookup(), cache, null, writeReplace(f1)); + return deserizalizeLambdaCreatingAllowedMap(f1, cache, LambdaHost.lookup()); } catch (Exception e) { throw new RuntimeException(e); } } + private A deserizalizeLambdaCreatingAllowedMap(A f1, HashMap cache, MethodHandles.Lookup lookup) { + SerializedLambda serialized = writeReplace(f1); + HashMap allowed = createAllowedMap(lookup, serialized); + return (A) LambdaDeserializer.deserializeLambda(lookup, cache, allowed, serialized); + } + + private HashMap createAllowedMap(MethodHandles.Lookup lookup, SerializedLambda serialized) { + Class implClass = classForName(serialized.getImplClass().replace("/", "."), lookup.lookupClass().getClassLoader()); + MethodHandle implMethod = findMember(lookup, serialized.getImplMethodKind(), implClass, serialized.getImplMethodName(), MethodType.fromMethodDescriptorString(serialized.getImplMethodSignature(), lookup.lookupClass().getClassLoader())); + HashMap allowed = new HashMap<>(); + allowed.put(LambdaDeserialize.nameAndDescriptorKey(serialized.getImplMethodName(), serialized.getImplMethodSignature()), implMethod); + return allowed; + } + + private Class classForName(String className, ClassLoader classLoader) { + try { + return Class.forName(className, true, classLoader); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + private MethodHandle findMember(MethodHandles.Lookup lookup, int kind, Class owner, + String name, MethodType signature) { + try { + switch (kind) { + case MethodHandleInfo.REF_invokeStatic: + return lookup.findStatic(owner, name, signature); + case MethodHandleInfo.REF_newInvokeSpecial: + return lookup.findConstructor(owner, signature); + case MethodHandleInfo.REF_invokeVirtual: + case MethodHandleInfo.REF_invokeInterface: + return lookup.findVirtual(owner, name, signature); + case MethodHandleInfo.REF_invokeSpecial: + return lookup.findSpecial(owner, name, signature, owner); + default: + throw new IllegalArgumentException(); + } + } catch (NoSuchMethodException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + private SerializedLambda writeReplace(A f1) { try { Method writeReplace = f1.getClass().getDeclaredMethod("writeReplace"); @@ -189,5 +233,7 @@ public static MethodHandles.Lookup lookup() { } interface I { - default String i() { return "i"; }; + default String i() { + return "i"; + } } From 43ba65fa11456899b3c45be14bd3895d8d6b6b5a Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 28 Jul 2016 15:08:43 -0700 Subject: [PATCH 0204/2793] SI-7187 deprecate eta-expansion of zero-arg method values For backwards compatiblity with 2.11, we already don't adapt a zero-arg method value to a SAM. In 2.13, we won't do any eta-expansion for zero-arg method values, but we should deprecate first. --- .../scala/tools/nsc/typechecker/Typers.scala | 47 ++++++++++++------- test/files/neg/t7187.check | 6 +++ test/files/neg/t7187.flags | 1 + test/files/neg/t7187.scala | 6 +++ test/files/pos/t9178.flags | 1 - test/files/pos/t9178.scala | 13 ----- test/files/run/Course-2002-08.scala | 20 ++++---- test/files/run/runtime.scala | 2 +- 8 files changed, 54 insertions(+), 42 deletions(-) create mode 100644 test/files/neg/t7187.check create mode 100644 test/files/neg/t7187.flags create mode 100644 test/files/neg/t7187.scala delete mode 100644 test/files/pos/t9178.flags delete mode 100644 test/files/pos/t9178.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ba104fb7a6b1..91cff54bc7bc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -872,16 +872,32 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Block(_, tree1) => tree1.symbol case _ => tree.symbol } - def shouldEtaExpandToSam: Boolean = { - // SI-9536 don't adapt parameterless method types to a to SAM's, fall through to empty application - // instead for backwards compatiblity with 2.11. See comments of that ticket and SI-7187 - // for analogous trouble with non-SAM eta expansion. Suggestions there are: a) deprecate eta expansion to Function0, - // or b) switch the order of eta-expansion and empty application in this adaptation. - !mt.params.isEmpty && samOf(pt).exists - } - if (!meth.isConstructor && (isFunctionType(pt) || shouldEtaExpandToSam)) { // (4.2) + + def cantAdapt = + if (context.implicitsEnabled) MissingArgsForMethodTpeError(tree, meth) + else setError(tree) + + // constructors do not eta-expand + if (meth.isConstructor) cantAdapt + // (4.2) eta-expand method value when function or sam type is expected + else if (isFunctionType(pt) || (!mt.params.isEmpty && samOf(pt).exists)) { + // SI-9536 `!mt.params.isEmpty &&`: for backwards compatiblity with 2.11, + // we don't adapt a zero-arg method value to a SAM + // In 2.13, we won't do any eta-expansion for zero-arg method values, but we should deprecate first + debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt") checkParamsConvertible(tree, tree.tpe) + + // SI-7187 eta-expansion of zero-arg method value is deprecated, switch order of (4.3) and (4.2) in 2.13 + def isExplicitEtaExpansion = original match { + case Typed(_, Function(Nil, EmptyTree)) => true // tree shape for `f _` + case _ => false + } + if (mt.params.isEmpty && !isExplicitEtaExpansion) { + currentRun.reporting.deprecationWarning(tree.pos, NoSymbol, + s"Eta-expansion of zero-argument method values is deprecated. Did you intend to write ${Apply(tree, Nil)}?", "2.12.0") + } + val tree0 = etaExpand(context.unit, tree, this) // #2624: need to infer type arguments for eta expansion of a polymorphic method @@ -895,12 +911,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else typed(tree0, mode, pt) } - else if (!meth.isConstructor && mt.params.isEmpty) // (4.3) - adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original) - else if (context.implicitsEnabled) - MissingArgsForMethodTpeError(tree, meth) - else - setError(tree) + // (4.3) apply to empty argument list -- TODO 2.13: move this one case up to avoid eta-expanding at arity 0 + else if (mt.params.isEmpty) adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original) + else cantAdapt } def adaptType(): Tree = { @@ -4398,11 +4411,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * (2) If $e$ is a parameterless method or call-by-name parameter of type `=>$T$`, `$e$ _` represents * the function of type `() => $T$`, which evaluates $e$ when it is applied to the empty parameterlist `()`. */ - def typedEta(methodValue: Tree): Tree = methodValue.tpe match { + def typedEta(methodValue: Tree, original: Tree): Tree = methodValue.tpe match { case tp@(MethodType(_, _) | PolyType(_, MethodType(_, _))) => // (1) val formals = tp.params if (isFunctionType(pt) || samMatchesFunctionBasedOnArity(samOf(pt), formals)) methodValue - else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length))) + else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length)), original) case TypeRef(_, ByNameParamClass, _) | NullaryMethodType(_) => // (2) val pos = methodValue.pos @@ -5106,7 +5119,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Typed(expr, Function(Nil, EmptyTree)) => typed1(suppressMacroExpansion(expr), mode, pt) match { case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef) - case methodValue => typedEta(checkDead(methodValue)) + case methodValue => typedEta(checkDead(methodValue), tree) } case Typed(expr, tpt) => val tpt1 = typedType(tpt, mode) // type the ascribed type first diff --git a/test/files/neg/t7187.check b/test/files/neg/t7187.check new file mode 100644 index 000000000000..a30803c74689 --- /dev/null +++ b/test/files/neg/t7187.check @@ -0,0 +1,6 @@ +t7187.scala:3: warning: Eta-expansion of zero-argument method values is deprecated. Did you intend to write EtaExpandZeroArg.this.foo()? + val f: () => Any = foo + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/t7187.flags b/test/files/neg/t7187.flags new file mode 100644 index 000000000000..c6bfaf1f64a4 --- /dev/null +++ b/test/files/neg/t7187.flags @@ -0,0 +1 @@ +-deprecation -Xfatal-warnings diff --git a/test/files/neg/t7187.scala b/test/files/neg/t7187.scala new file mode 100644 index 000000000000..45d33f06af31 --- /dev/null +++ b/test/files/neg/t7187.scala @@ -0,0 +1,6 @@ +class EtaExpandZeroArg { + def foo(): () => String = () => "" + val f: () => Any = foo + + // f() would evaluate to instead of "" +} diff --git a/test/files/pos/t9178.flags b/test/files/pos/t9178.flags deleted file mode 100644 index 7de3c0f3eea0..000000000000 --- a/test/files/pos/t9178.flags +++ /dev/null @@ -1 +0,0 @@ --Xfatal-warnings -deprecation diff --git a/test/files/pos/t9178.scala b/test/files/pos/t9178.scala deleted file mode 100644 index f2cf20a7781f..000000000000 --- a/test/files/pos/t9178.scala +++ /dev/null @@ -1,13 +0,0 @@ -// eta expansion to Function0 is problematic (as shown here). -// Perhaps we should we deprecate it? See discussion in the comments of -// on SI-9178. -// -// This test encodes the status quo: no deprecation. -object Test { - def foo(): () => String = () => "" - val f: () => Any = foo - - def main(args: Array[String]): Unit = { - println(f()) // - } -} diff --git a/test/files/run/Course-2002-08.scala b/test/files/run/Course-2002-08.scala index 5e21edaba353..1d0e02262df7 100644 --- a/test/files/run/Course-2002-08.scala +++ b/test/files/run/Course-2002-08.scala @@ -205,7 +205,7 @@ object M5 { val inputSig = input.getSignal; afterDelay(InverterDelay) {() => output.setSignal(!inputSig) }; } - input addAction invertAction + input addAction invertAction _ } def andGate(a1: Wire, a2: Wire, output: Wire): Unit = { @@ -214,8 +214,8 @@ object M5 { val a2Sig = a2.getSignal; afterDelay(AndGateDelay) {() => output.setSignal(a1Sig & a2Sig) }; } - a1 addAction andAction; - a2 addAction andAction; + a1 addAction andAction _ + a2 addAction andAction _ } def orGate(o1: Wire, o2: Wire, output: Wire): Unit = { @@ -224,8 +224,8 @@ object M5 { val o2Sig = o2.getSignal; afterDelay(OrGateDelay) {() => output.setSignal(o1Sig | o2Sig) }; } - o1 addAction orAction; - o2 addAction orAction; + o1 addAction orAction _ + o2 addAction orAction _ } def probe(name: String, wire: Wire): Unit = { @@ -479,7 +479,7 @@ abstract class BasicCircuitSimulator() extends Simulator() { val inputSig = input.getSignal; afterDelay(InverterDelay) {() => output.setSignal(!inputSig) }; } - input addAction invertAction + input addAction invertAction _ } def andGate(a1: Wire, a2: Wire, output: Wire) = { @@ -488,8 +488,8 @@ abstract class BasicCircuitSimulator() extends Simulator() { val a2Sig = a2.getSignal; afterDelay(AndGateDelay) {() => output.setSignal(a1Sig & a2Sig) }; } - a1 addAction andAction; - a2 addAction andAction + a1 addAction andAction _ + a2 addAction andAction _ } def orGate(a1: Wire, a2: Wire, output: Wire) = { @@ -498,8 +498,8 @@ abstract class BasicCircuitSimulator() extends Simulator() { val a2Sig = a2.getSignal; afterDelay(OrGateDelay) {() => output.setSignal(a1Sig | a2Sig) }; } - a1 addAction orAction; - a2 addAction orAction + a1 addAction orAction _ + a2 addAction orAction _ } def orGate2(a1: Wire, a2: Wire, output: Wire) = { diff --git a/test/files/run/runtime.scala b/test/files/run/runtime.scala index 89348b294db8..468a80fc0c87 100644 --- a/test/files/run/runtime.scala +++ b/test/files/run/runtime.scala @@ -73,7 +73,7 @@ object Test1Test { // {System.out.print(22); test1.bar}.System.out.println(); {Console.print(23); test1.bar.System}.out.println(); {Console.print(24); test1.bar.System.out}.println(); - {Console.print(25); test1.bar.System.out.println:(() => Unit)} apply (); + {Console.print(25); test1.bar.System.out.println _ : (() => Unit)} apply (); {Console.print(26); test1.bar.System.out.println()}; } From 0d2760dce189cdcb363e54868381175af4b2646f Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 5 Jul 2016 14:22:00 +0200 Subject: [PATCH 0205/2793] SI-8786 fix generic signature for @varargs forwarder methods When generating a varargs forwarder for def foo[T](a: T*) the parameter type of the forwarder needs to be Array[Object]. If we gnerate Array[T] in UnCurry, that would be erased to plain Object, and the method would not be a valid varargs. Unfortunately, setting the parameter type to Array[Object] lead to an invalid generic signature - the generic signature should reflect the real signature. This change adds an attachment to the parameter symbol in the varargs forwarder method and special-cases signature generation. Also cleanes up the code to produce the varargs forwarder. For example, type parameter and parameter symbols in the forwarder's method type were not clones, but the same symbols from the original method were re-used. --- README.md | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 14 ++- .../scala/tools/nsc/transform/UnCurry.scala | 118 ++++++++++-------- test/files/jvm/t8786-sig.scala | 116 +++++++++++++++++ test/files/jvm/t8786/A_1.scala | 3 + test/files/jvm/t8786/B_2.java | 22 ++++ test/files/jvm/t8786/Test_2.scala | 3 + test/files/jvm/varargs/JavaClass.java | 26 ++-- test/files/jvm/varargs/VaClass.scala | 9 +- test/files/jvm/varargs/varargs.scala | 16 --- 10 files changed, 237 insertions(+), 92 deletions(-) create mode 100644 test/files/jvm/t8786-sig.scala create mode 100644 test/files/jvm/t8786/A_1.scala create mode 100644 test/files/jvm/t8786/B_2.java create mode 100644 test/files/jvm/t8786/Test_2.scala diff --git a/README.md b/README.md index 6ebb4531765c..ed42eadaaa79 100644 --- a/README.md +++ b/README.md @@ -134,7 +134,7 @@ codebase and re-compiles too many files, resulting in long build times (check [sbt#1104](https://github.com/sbt/sbt/issues/1104) for progress on that front). In the meantime you can: - Enable "ant mode" in which sbt only re-compiles source files that were modified. - Create a file `local.sbt` containing the line `(incOptions in ThisBuild) := (incOptions in ThisBuild).value.withNameHashing(false).withAntStyle(true)`. + Create a file `local.sbt` containing the line `antStyle := true`. Add an entry `local.sbt` to your `~/.gitignore`. - Use IntelliJ IDEA for incremental compiles (see [IDE Setup](#ide-setup) below) - its incremental compiler is a bit less conservative, but usually correct. diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index db8e203c1cdf..6678b565d59a 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -343,7 +343,18 @@ abstract class Erasure extends AddInterfaces case MethodType(params, restpe) => val buf = new StringBuffer("(") - params foreach (p => buf append jsig(p.tpe)) + params foreach (p => { + val tp = p.attachments.get[TypeParamVarargsAttachment] match { + case Some(att) => + // For @varargs forwarders, a T* parameter has type Array[Object] in the forwarder + // instead of Array[T], as the latter would erase to Object (instead of Array[Object]). + // To make the generic signature correct ("[T", not "[Object"), an attachment on the + // parameter symbol stores the type T that was replaced by Object. + buf.append("["); att.typeParamRef + case _ => p.tpe + } + buf append jsig(tp) + }) buf append ")" buf append (if (restpe.typeSymbol == UnitClass || sym0.isConstructor) VOID_TAG.toString else jsig(restpe)) buf.toString @@ -1227,4 +1238,5 @@ abstract class Erasure extends AddInterfaces } private class TypeRefAttachment(val tpe: TypeRef) + class TypeParamVarargsAttachment(val typeParamRef: Type) } diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 374e8430d819..3047b8f89a06 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -744,72 +744,88 @@ abstract class UnCurry extends InfoTransform if (!dd.symbol.hasAnnotation(VarargsClass) || !enteringUncurry(mexists(dd.symbol.paramss)(sym => definitions.isRepeatedParamType(sym.tpe)))) return flatdd - def toArrayType(tp: Type): Type = { - val arg = elementType(SeqClass, tp) - // to prevent generation of an `Object` parameter from `Array[T]` parameter later - // as this would crash the Java compiler which expects an `Object[]` array for varargs - // e.g. def foo[T](a: Int, b: T*) - // becomes def foo[T](a: Int, b: Array[Object]) - // instead of def foo[T](a: Int, b: Array[T]) ===> def foo[T](a: Int, b: Object) - arrayType( - if (arg.typeSymbol.isTypeParameterOrSkolem) ObjectTpe - else arg - ) - } + val forwSym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) - val theTyper = typer.atOwner(dd, currentClass) - val flatparams = flatdd.symbol.paramss.head val isRepeated = enteringUncurry(dd.symbol.info.paramss.flatten.map(sym => definitions.isRepeatedParamType(sym.tpe))) - // create the type - val forwformals = map2(flatparams, isRepeated) { - case (p, true) => toArrayType(p.tpe) - case (p, false)=> p.tpe - } - val forwresult = dd.symbol.tpe_*.finalResultType - val forwformsyms = map2(forwformals, flatparams)((tp, oldparam) => - currentClass.newValueParameter(oldparam.name.toTermName, oldparam.pos).setInfo(tp) - ) - def mono = MethodType(forwformsyms, forwresult) - val forwtype = dd.symbol.tpe match { - case MethodType(_, _) => mono - case PolyType(tps, _) => PolyType(tps, mono) - } + val oldPs = flatdd.symbol.paramss.head + + // see comment in method toArrayType below + val arrayTypesMappedToObject = mutable.Map.empty[Symbol, Type] - // create the symbol - val forwsym = currentClass.newMethod(dd.name.toTermName, dd.pos, VARARGS | SYNTHETIC | flatdd.symbol.flags) setInfo forwtype - def forwParams = forwsym.info.paramss.flatten - - // create the tree - val forwtree = theTyper.typedPos(dd.pos) { - val locals = map3(forwParams, flatparams, isRepeated) { - case (_, fp, false) => null - case (argsym, fp, true) => - Block(Nil, - gen.mkCast( - gen.mkWrapArray(Ident(argsym), elementType(ArrayClass, argsym.tpe)), - seqType(elementType(SeqClass, fp.tpe)) - ) - ) + val forwTpe = { + val (oldTps, tps) = dd.symbol.tpe match { + case PolyType(oldTps, _) => + val newTps = oldTps.map(_.cloneSymbol(forwSym)) + (oldTps, newTps) + + case _ => (Nil, Nil) } - val seqargs = map2(locals, forwParams) { - case (null, argsym) => Ident(argsym) - case (l, _) => l + + def toArrayType(tp: Type, newParam: Symbol): Type = { + val arg = elementType(SeqClass, tp) + val elem = if (arg.typeSymbol.isTypeParameterOrSkolem && !(arg <:< AnyRefTpe)) { + // To prevent generation of an `Object` parameter from `Array[T]` parameter later + // as this would crash the Java compiler which expects an `Object[]` array for varargs + // e.g. def foo[T](a: Int, b: T*) + // becomes def foo[T](a: Int, b: Array[Object]) + // instead of def foo[T](a: Int, b: Array[T]) ===> def foo[T](a: Int, b: Object) + // + // In order for the forwarder method to type check we need to insert a cast: + // def foo'[T'](a: Int, b: Array[Object]) = foo[T'](a, wrapRefArray(b).asInstanceOf[Seq[T']]) + // The target element type for that cast (T') is stored in the `arrayTypesMappedToObject` map. + val originalArg = arg.substSym(oldTps, tps) + arrayTypesMappedToObject(newParam) = originalArg + // Store the type parameter that was replaced by Object to emit the correct generic signature + newParam.updateAttachment(new erasure.TypeParamVarargsAttachment(originalArg)) + ObjectTpe + } else + arg + arrayType(elem) } - val end = if (forwsym.isConstructor) List(UNIT) else Nil - DefDef(forwsym, BLOCK(Apply(gen.mkAttributedRef(flatdd.symbol), seqargs) :: end : _*)) + val ps = map2(oldPs, isRepeated)((oldParam, isRep) => { + val newParam = oldParam.cloneSymbol(forwSym) + val tp = if (isRep) toArrayType(oldParam.tpe, newParam) else oldParam.tpe + newParam.setInfo(tp) + }) + + val resTp = dd.symbol.tpe_*.finalResultType.substSym(oldPs, ps) + val mt = MethodType(ps, resTp) + val r = if (tps.isEmpty) mt else PolyType(tps, mt) + r.substSym(oldTps, tps) + } + + forwSym.setInfo(forwTpe) + val newPs = forwTpe.params + + val theTyper = typer.atOwner(dd, currentClass) + val forwTree = theTyper.typedPos(dd.pos) { + val seqArgs = map3(newPs, oldPs, isRepeated)((param, oldParam, isRep) => { + if (!isRep) Ident(param) + else { + val parTp = elementType(ArrayClass, param.tpe) + val wrap = gen.mkWrapArray(Ident(param), parTp) + arrayTypesMappedToObject.get(param) match { + case Some(tp) => gen.mkCast(wrap, seqType(tp)) + case _ => wrap + } + } + }) + + val forwCall = Apply(gen.mkAttributedRef(flatdd.symbol), seqArgs) + DefDef(forwSym, if (forwSym.isConstructor) Block(List(forwCall), UNIT) else forwCall) } // check if the method with that name and those arguments already exists in the template - currentClass.info.member(forwsym.name).alternatives.find(s => s != forwsym && s.tpe.matches(forwsym.tpe)) match { + currentClass.info.member(forwSym.name).alternatives.find(s => s != forwSym && s.tpe.matches(forwSym.tpe)) match { case Some(s) => reporter.error(dd.symbol.pos, "A method with a varargs annotation produces a forwarder method with the same signature " + s.tpe + " as an existing method.") case None => // enter symbol into scope - currentClass.info.decls enter forwsym - addNewMember(forwtree) + currentClass.info.decls enter forwSym + addNewMember(forwTree) } flatdd diff --git a/test/files/jvm/t8786-sig.scala b/test/files/jvm/t8786-sig.scala new file mode 100644 index 000000000000..0745b650e607 --- /dev/null +++ b/test/files/jvm/t8786-sig.scala @@ -0,0 +1,116 @@ +class A[U] { + @annotation.varargs def m1[T] (a: T*): T = a.head + @annotation.varargs def m2[T <: AnyRef](a: T*): T = a.head + @annotation.varargs def m3[T <: AnyVal](a: T*): T = a.head + @annotation.varargs def m4[T <: Int] (a: T*): T = a.head + @annotation.varargs def m5[T <: String](a: T*): T = a.head + @annotation.varargs def m6 (a: String*): String = a.head + @annotation.varargs def m7 (a: Int*): Int = a.head + @annotation.varargs def m8 (a: U*): U = a.head + + def n1[T] (a: Array[T]): T = a(0) + def n2[T <: AnyRef](a: Array[T]): T = a(0) + def n3[T <: AnyVal](a: Array[T]): T = a(0) + def n4[T <: Int] (a: Array[T]): T = a(0) + def n5[T <: String](a: Array[T]): T = a(0) + def n6 (a: Array[String]): String = a(0) + def n7 (a: Array[Int]): Int = a(0) + def n8 (a: Array[U]): U = a(0) +} + +object Test extends App { + val a = classOf[A[_]] + + def sig (method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).toString + def genSig(method: String, tp: Class[_]) = a.getDeclaredMethod(method, tp).toGenericString + def bound (method: String, tp: Class[_]) = { + val m = a.getDeclaredMethod(method, tp) + m.getGenericParameterTypes.apply(0) match { + case _: Class[_] => "" + case gat: java.lang.reflect.GenericArrayType => + val compTp = gat.getGenericComponentType.asInstanceOf[java.lang.reflect.TypeVariable[_]] + compTp.getBounds.apply(0).toString + } + } + + def check(a: String, b: String) = { + assert(a == b, s"found: $a\nexpected: $b") + } + + val sq = classOf[Seq[_]] + val ob = classOf[Object] + val ao = classOf[Array[Object]] + val as = classOf[Array[String]] + val ai = classOf[Array[Int]] + + check(sig("m1", sq) , "public java.lang.Object A.m1(scala.collection.Seq)") + check(sig("m2", sq) , "public java.lang.Object A.m2(scala.collection.Seq)") + check(sig("m3", sq) , "public java.lang.Object A.m3(scala.collection.Seq)") + check(sig("m4", sq) , "public int A.m4(scala.collection.Seq)") + check(sig("m5", sq) , "public java.lang.String A.m5(scala.collection.Seq)") + check(sig("m6", sq) , "public java.lang.String A.m6(scala.collection.Seq)") + check(sig("m7", sq) , "public int A.m7(scala.collection.Seq)") + check(sig("m8", sq) , "public java.lang.Object A.m8(scala.collection.Seq)") + + check(genSig("m1", sq), "public T A.m1(scala.collection.Seq)") + check(genSig("m2", sq), "public T A.m2(scala.collection.Seq)") + check(genSig("m3", sq), "public T A.m3(scala.collection.Seq)") + // TODO: the signature for is wrong for T <: Int, SI-9846. The signature should be + // `public int A.m4(scala.collection.Seq)`. This is testing the status quo. + check(genSig("m4", sq), "public T A.m4(scala.collection.Seq)") + check(genSig("m5", sq), "public T A.m5(scala.collection.Seq)") + check(genSig("m6", sq), "public java.lang.String A.m6(scala.collection.Seq)") + check(genSig("m7", sq), "public int A.m7(scala.collection.Seq)") + check(genSig("m8", sq), "public U A.m8(scala.collection.Seq)") + + + // varargs forwarder + + check(sig("m1", ao) , "public java.lang.Object A.m1(java.lang.Object[])") + check(sig("m2", ao) , "public java.lang.Object A.m2(java.lang.Object[])") + check(sig("m3", ao) , "public java.lang.Object A.m3(java.lang.Object[])") + check(sig("m4", ao) , "public int A.m4(java.lang.Object[])") + check(sig("m5", as) , "public java.lang.String A.m5(java.lang.String[])") + check(sig("m6", as) , "public java.lang.String A.m6(java.lang.String[])") + check(sig("m7", ai) , "public int A.m7(int[])") + check(sig("m8", ao) , "public java.lang.Object A.m8(java.lang.Object[])") + + check(genSig("m1", ao), "public T A.m1(T...)") + check(genSig("m2", ao), "public T A.m2(T...)") + check(genSig("m3", ao), "public T A.m3(T...)") + // testing status quo: signature is wrong for T <: Int, SI-9846 + check(genSig("m4", ao), "public T A.m4(T...)") + check(genSig("m5", as), "public T A.m5(T...)") + check(genSig("m6", as), "public java.lang.String A.m6(java.lang.String...)") + check(genSig("m7", ai), "public int A.m7(int...)") + check(genSig("m8", ao), "public U A.m8(U...)") + + check(bound("m1", ao) , "class java.lang.Object") + check(bound("m2", ao) , "class java.lang.Object") + check(bound("m3", ao) , "class java.lang.Object") + check(bound("m4", ao) , "class java.lang.Object") + check(bound("m5", as) , "class java.lang.String") + check(bound("m6", as) , "") + check(bound("m7", ai) , "") + check(bound("m8", ao) , "class java.lang.Object") + + + check(sig("n1", ob) , "public java.lang.Object A.n1(java.lang.Object)") + check(sig("n2", ao) , "public java.lang.Object A.n2(java.lang.Object[])") + check(sig("n3", ob) , "public java.lang.Object A.n3(java.lang.Object)") + check(sig("n4", ob) , "public int A.n4(java.lang.Object)") + check(sig("n5", as) , "public java.lang.String A.n5(java.lang.String[])") + check(sig("n6", as) , "public java.lang.String A.n6(java.lang.String[])") + check(sig("n7", ai) , "public int A.n7(int[])") + check(sig("n8", ob) , "public java.lang.Object A.n8(java.lang.Object)") + + check(genSig("n1", ob), "public T A.n1(java.lang.Object)") + check(genSig("n2", ao), "public T A.n2(T[])") + check(genSig("n3", ob), "public T A.n3(java.lang.Object)") + // testing status quo: signature is wrong for T <: Int, SI-9846 + check(genSig("n4", ob), "public T A.n4(java.lang.Object)") + check(genSig("n5", as), "public T A.n5(T[])") + check(genSig("n6", as), "public java.lang.String A.n6(java.lang.String[])") + check(genSig("n7", ai), "public int A.n7(int[])") + check(genSig("n8", ob), "public U A.n8(java.lang.Object)") +} diff --git a/test/files/jvm/t8786/A_1.scala b/test/files/jvm/t8786/A_1.scala new file mode 100644 index 000000000000..13c0ad191d29 --- /dev/null +++ b/test/files/jvm/t8786/A_1.scala @@ -0,0 +1,3 @@ +class A { + @annotation.varargs def foo[T](a: Int, b: T*): T = b.head +} diff --git a/test/files/jvm/t8786/B_2.java b/test/files/jvm/t8786/B_2.java new file mode 100644 index 000000000000..dc155a290f93 --- /dev/null +++ b/test/files/jvm/t8786/B_2.java @@ -0,0 +1,22 @@ +public class B_2 { + private static int res = 0; + + public static void m(char a[]) { res += 10; } + public static void m(String a) { res += 100; } + public static void m(Object a) { res += 1000; } + + public static T foo(int a, T... b) { return b[0]; } + + public static T bar(T b[]) { return b[0]; } + + public static void main(String[] args) { + m(foo(15, "a", "b", "c")); + if (res != 100) + throw new Error("bad: "+ res); + + A a = new A(); + m(a.foo(16, "a", "b", "c")); + if (res != 200) + throw new Error("bad: " + res); + } +} diff --git a/test/files/jvm/t8786/Test_2.scala b/test/files/jvm/t8786/Test_2.scala new file mode 100644 index 000000000000..76ccb4c3ed84 --- /dev/null +++ b/test/files/jvm/t8786/Test_2.scala @@ -0,0 +1,3 @@ +object Test extends App { + B_2.main(null) +} diff --git a/test/files/jvm/varargs/JavaClass.java b/test/files/jvm/varargs/JavaClass.java index 6928ee5adc4b..0cc3587c5e88 100644 --- a/test/files/jvm/varargs/JavaClass.java +++ b/test/files/jvm/varargs/JavaClass.java @@ -1,16 +1,12 @@ - - - public class JavaClass { - public static void varargz(int i, T... v) { - } - - public static void callSomeAnnotations() { - VaClass va = new VaClass(); - va.vs(4, "", "", ""); - va.vi(1, 2, 3, 4); - varargz(5, 1.0, 2.0, 3.0); - va.vt(16, "", "", ""); - System.out.println(va.vt1(16, "a", "b", "c")); - } -} \ No newline at end of file + public static void varargz(int i, T... v) { } + + public static void callSomeAnnotations() { + VaClass va = new VaClass(); + va.vs(4, "", "", ""); + va.vi(1, 2, 3, 4); + varargz(5, 1.0, 2.0, 3.0); + va.vt(16, "", "", ""); + System.out.println(va.vt1(16, "a", "b", "c")); + } +} diff --git a/test/files/jvm/varargs/VaClass.scala b/test/files/jvm/varargs/VaClass.scala index d83e63ace1e3..ee8c288a16ab 100644 --- a/test/files/jvm/varargs/VaClass.scala +++ b/test/files/jvm/varargs/VaClass.scala @@ -1,15 +1,8 @@ - - import annotation.varargs - - class VaClass { - @varargs def vs(a: Int, b: String*) = println(a + b.length) @varargs def vi(a: Int, b: Int*) = println(a + b.sum) @varargs def vt[T](a: Int, b: T*) = println(a + b.length) - - // TODO remove type bound after fixing SI-8786, see also https://github.com/scala/scala/pull/3961 - @varargs def vt1[T <: String](a: Int, b: T*): T = b.head + @varargs def vt1[T](a: Int, b: T*): T = b.head } diff --git a/test/files/jvm/varargs/varargs.scala b/test/files/jvm/varargs/varargs.scala index 6d2e707bdf3f..b09818f46f27 100644 --- a/test/files/jvm/varargs/varargs.scala +++ b/test/files/jvm/varargs/varargs.scala @@ -1,21 +1,5 @@ - - - - - - object Test { def main(args: Array[String]) { JavaClass.callSomeAnnotations } } - - - - - - - - - - From 366f45b54938895be74b248af4ba69f5976d639a Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Thu, 11 Aug 2016 18:27:39 +0200 Subject: [PATCH 0206/2793] Improve log output of the `testAll` task MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit It’s a lot of code for little benefit but makes the output more useful when test tasks fail. Unfortunately there doesn’t seem to be any way to get the `summary` reported by a test framework at this point. The arguments of `toTask` for InputTasks with applied arguments have also been lost, so we keep track of the commands separately. --- build.sbt | 44 ++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 42 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 10cca3531a27..cff60c1fa37b 100644 --- a/build.sbt +++ b/build.sbt @@ -786,11 +786,51 @@ lazy val root: Project = (project in file(".")) doc in Compile in scalap ).result )).value - val failed = results.map(_.toEither).collect { case Left(i) => i } + // All attempts to define these together with the actual tasks due to the applicative rewriting of `.value` + val descriptions = Vector( + "junit/test", + "partest run pos neg jvm", + "partest res scalap specialized scalacheck", + "partest instrumented presentation", + "partest --srcpath scaladoc", + "osgiTestFelix/test", + "osgiTestEclipse/test", + "library/mima", + "reflect/mima", + "doc" + ) + val failed = results.map(_.toEither).zip(descriptions).collect { case (Left(i: Incomplete), d) => (i, d) } if(failed.nonEmpty) { val log = streams.value.log + def showScopedKey(k: Def.ScopedKey[_]): String = + Vector( + k.scope.project.toOption.map { + case p: ProjectRef => p.project + case p => p + }.map(_ + "/"), + k.scope.config.toOption.map(_.name + ":"), + k.scope.task.toOption.map(_.label + "::") + ).flatten.mkString + k.key + def logIncomplete(i: Incomplete, prefix: String): Unit = { + val sk = i.node match { + case Some(t: Task[_]) => + t.info.attributes.entries.collect { case e if e.key == Keys.taskDefinitionKey => e.value.asInstanceOf[Def.ScopedKey[_]] } + .headOption.map(showScopedKey) + case _ => None + } + val childCount = (if(i.directCause.isDefined) 1 else 0) + i.causes.length + val skip = childCount <= 1 && sk.isEmpty + if(!skip) log.error(s"$prefix- ${sk.getOrElse("?")}") + i.directCause match { + case Some(e) => log.error(s"$prefix - $e") + case None => i.causes.foreach(i => logIncomplete(i, prefix + (if(skip) "" else " "))) + } + } log.error(s"${failed.size} of ${results.length} test tasks failed:") - failed.foreach(i => log.error(s" - $i")) + failed.foreach { case (i, d) => + log.error(s"- $d") + logIncomplete(i, " ") + } throw new RuntimeException } }, From a97297d7d253eb7573c995ce936f364b56d9bfe9 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 28 Apr 2016 22:43:46 -0700 Subject: [PATCH 0207/2793] Fields phase One step towards teasing apart the mixin phase, making each phase that adds members to traits responsible for mixing in those members into subclasses of said traits. Another design tenet is to not emit symbols or trees only to later remove them. Therefore, we model a val in a trait as its accessor. The underlying field is an implementation detail. It must be mixed into subclasses, but has no business in a trait (an interface). Also trying to reduce tree creation by changing less in subtrees during tree transforms. A lot of nice fixes fall out from this rework: - Correct bridges and more precise generic signatures for mixed in accessors, since they are now created before erasure. - Correct enclosing method attribute for classes nested in trait fields. Trait fields are now created as MethodSymbol (no longer TermSymbol). This symbol shows up in the `originalOwner` chain of a class declared within the field initializer. This promoted the field getter to being the enclosing method of the nested class, which it is not (the EnclosingMethod attribute is a source-level property). - Signature inference is now more similar between vals and defs - No more field for constant-typed vals, or mixed in accessors for subclasses. A constant val can be fully implemented in a trait. TODO: - give same treatment to trait lazy vals (only accessors, no fields) - remove support for presuper vals in traits (they don't have the right init semantics in traits anyway) - lambdalift should emit accessors for captured vals in traits, not a field Assorted notes from the full git history before squashing below. Unit-typed vals: don't suppress field it affects the memory model -- even a write of unit to a field is relevant... unit-typed lazy vals should never receive a field this need was unmasked by test/files/run/t7843-jsr223-service.scala, which no longer printed the output expected from the `0 to 10 foreach` Use getter.referenced to track traitsetter reify's toolbox compiler changes the name of the trait that owns the accessor between fields and constructors (`$` suffix), so that the trait setter cannot be found when doing mkAssign in constructors this could be solved by creating the mkAssign tree immediately during fields anyway, first experiment: use `referenced` now that fields runs closer to the constructors phase (I tried this before and something broke) Infer result type for `val`s, like we do for `def`s The lack of result type inference caused pos/t6780 to fail in the new field encoding for traits, as there is no separate accessor, and method synthesis computes the type signature based on the ValDef tree. This caused a cyclic error in implicit search, because now the implicit val's result type was not inferred from the super member, and inferring it from the RHS would cause implicit search to consider the member in question, so that a cycle is detected and type checking fails... Regardless of the new encoding, we should consistently infer result types for `def`s and `val`s. Removed test/files/run/t4287inferredMethodTypes.scala and test/files/presentation/t4287c, since they were relying on inferring argument types from "overridden" constructors in a test for range positions of default arguments. Constructors don't override, so that was a mis-feature of -Yinfer-argument-types. Had to slightly refactor test/files/presentation/doc, as it was relying on scalac inferring a big intersection type to approximate the anonymous class that's instantiated for `override lazy val analyzer`. Now that we infer `Global` as the expected type based on the overridden val, we make `getComment` private in navigating between good old Skylla and Charybdis. I'm not sure why we need this restriction for anonymous classes though; only structural calls are restricted in the way that we're trying to avoid. The old behavior is maintained nder -Xsource:2.11. Tests: - test/files/{pos,neg}/val_infer.scala - test/files/neg/val_sig_infer_match.scala - test/files/neg/val_sig_infer_struct.scala need NMT when inferring sig for accessor Q: why are we calling valDefSig and not methodSig? A: traits use defs for vals, but still use valDefSig... keep accessor and field info in synch --- spec/05-classes-and-objects.md | 4 +- src/compiler/scala/tools/nsc/Global.scala | 18 +- .../scala/tools/nsc/ast/TreeGen.scala | 10 + .../tools/nsc/backend/jvm/BCodeHelpers.scala | 2 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 7 +- .../tools/nsc/transform/AddInterfaces.scala | 6 - .../tools/nsc/transform/Constructors.scala | 197 ++++---- .../scala/tools/nsc/transform/Erasure.scala | 2 + .../scala/tools/nsc/transform/Fields.scala | 449 ++++++++++++++++++ .../scala/tools/nsc/transform/Mixin.scala | 132 ++--- .../tools/nsc/transform/OverridingPairs.scala | 4 +- .../scala/tools/nsc/transform/Statics.scala | 2 +- .../tools/nsc/typechecker/ContextErrors.scala | 23 +- .../nsc/typechecker/MethodSynthesis.scala | 60 ++- .../scala/tools/nsc/typechecker/Namers.scala | 395 ++++++++------- .../tools/nsc/typechecker/RefChecks.scala | 84 ++-- .../nsc/typechecker/TypeDiagnostics.scala | 16 +- .../scala/tools/nsc/typechecker/Typers.scala | 8 +- src/library/scala/deprecatedInheritance.scala | 3 + src/library/scala/deprecatedOverriding.scala | 3 + .../reflect/internal/AnnotationInfos.scala | 11 +- .../scala/reflect/internal/Definitions.scala | 12 +- .../scala/reflect/internal/Flags.scala | 12 +- .../scala/reflect/internal/Phase.scala | 2 + .../reflect/internal/ReificationSupport.scala | 1 + .../scala/reflect/internal/SymbolPairs.scala | 28 +- .../scala/reflect/internal/Symbols.scala | 121 +++-- test/files/neg/overloaded-unapply.check | 4 +- test/files/neg/t1960.check | 2 +- test/files/neg/t200.check | 4 +- test/files/neg/t2779.check | 4 +- test/files/neg/t278.check | 4 +- test/files/neg/t3871.check | 2 +- test/files/neg/t4541.check | 2 +- test/files/neg/t4541b.check | 2 +- test/files/neg/t5429.check | 2 +- test/files/neg/t591.check | 4 +- test/files/neg/t591.scala | 3 +- test/files/neg/t6335.check | 8 +- test/files/neg/t6446-additional.check | 31 +- test/files/neg/t6446-missing.check | 29 +- test/files/neg/t6446-show-phases.check | 29 +- test/files/neg/t6666.check | 4 +- test/files/neg/t7494-no-options.check | 31 +- test/files/neg/t7602.check | 4 +- test/files/neg/t7622-cyclic-dependency.check | 2 +- test/files/neg/t800.check | 12 +- test/files/neg/t8849.check | 2 +- test/files/neg/trait_fields_conflicts.check | 273 +++++++++++ test/files/neg/trait_fields_conflicts.scala | 87 ++++ .../trait_fields_deprecated_overriding.check | 6 + .../trait_fields_deprecated_overriding.flags | 1 + .../trait_fields_deprecated_overriding.scala | 11 + test/files/neg/val_infer.check | 6 + test/files/neg/val_infer.scala | 4 + test/files/neg/val_sig_infer_match.check | 4 + test/files/neg/val_sig_infer_match.scala | 22 + test/files/neg/val_sig_infer_struct.check | 4 + test/files/neg/val_sig_infer_struct.scala | 8 + test/files/neg/warn-unused-privates.check | 7 +- .../infer_override_def_args.flags} | 0 test/files/pos/infer_override_def_args.scala | 5 + .../pos/trait_fields_dependent_conflict.scala | 20 + .../pos/trait_fields_dependent_rebind.scala | 15 + .../pos/trait_fields_inherit_double_def.scala | 20 + test/files/pos/trait_fields_lambdalift.scala | 22 + .../trait_fields_nested_private_object.scala | 8 + .../trait_fields_nested_public_object.scala | 5 + test/files/pos/trait_fields_owners.scala | 19 + .../files/pos/trait_fields_private_this.scala | 5 + test/files/pos/trait_fields_static_fwd.scala | 10 + test/files/pos/val_infer.scala | 5 + test/files/presentation/doc/doc.scala | 2 +- .../presentation/scope-completion-3.check | 14 +- test/files/presentation/t4287c.check | 11 - test/files/presentation/t4287c/Test.scala | 3 - test/files/presentation/t4287c/src/Foo.scala | 9 - test/files/run/SymbolsTest.scala | 18 +- test/files/run/analyzerPlugins.check | 37 +- test/files/run/programmatic-main.check | 29 +- .../reflection-fieldsymbol-navigation.check | 6 +- test/files/run/repl-colon-type.check | 2 +- test/files/run/showdecl.check | 2 +- test/files/run/showdecl/Macros_1.scala | 2 +- test/files/run/showraw_mods.check | 2 +- test/files/run/t4287inferredMethodTypes.check | 30 -- test/files/run/t4287inferredMethodTypes.scala | 25 - test/files/run/t6733.check | 15 +- test/files/run/t7533.check | 51 +- test/files/run/t7533.scala | 34 +- test/files/run/t8549.scala | 6 +- test/files/run/trait_fields_bytecode.scala | 23 + test/files/run/trait_fields_final.scala | 21 + test/files/run/trait_fields_init.check | 21 + test/files/run/trait_fields_init.scala | 55 +++ test/files/run/trait_fields_repl.check | 11 + test/files/run/trait_fields_repl.scala | 10 + .../trait_fields_three_layer_overrides.check | 2 + .../trait_fields_three_layer_overrides.scala | 25 + test/files/run/trait_fields_volatile.scala | 13 + .../scala/reflect/internal/PrintersTest.scala | 6 +- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 3 +- test/pending/run/origins.check | 6 +- 103 files changed, 2064 insertions(+), 794 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/transform/Fields.scala create mode 100644 test/files/neg/trait_fields_conflicts.check create mode 100644 test/files/neg/trait_fields_conflicts.scala create mode 100644 test/files/neg/trait_fields_deprecated_overriding.check create mode 100644 test/files/neg/trait_fields_deprecated_overriding.flags create mode 100644 test/files/neg/trait_fields_deprecated_overriding.scala create mode 100644 test/files/neg/val_infer.check create mode 100644 test/files/neg/val_infer.scala create mode 100644 test/files/neg/val_sig_infer_match.check create mode 100644 test/files/neg/val_sig_infer_match.scala create mode 100644 test/files/neg/val_sig_infer_struct.check create mode 100644 test/files/neg/val_sig_infer_struct.scala rename test/files/{presentation/t4287c.flags => pos/infer_override_def_args.flags} (100%) create mode 100644 test/files/pos/infer_override_def_args.scala create mode 100644 test/files/pos/trait_fields_dependent_conflict.scala create mode 100644 test/files/pos/trait_fields_dependent_rebind.scala create mode 100644 test/files/pos/trait_fields_inherit_double_def.scala create mode 100644 test/files/pos/trait_fields_lambdalift.scala create mode 100644 test/files/pos/trait_fields_nested_private_object.scala create mode 100644 test/files/pos/trait_fields_nested_public_object.scala create mode 100644 test/files/pos/trait_fields_owners.scala create mode 100644 test/files/pos/trait_fields_private_this.scala create mode 100644 test/files/pos/trait_fields_static_fwd.scala create mode 100644 test/files/pos/val_infer.scala delete mode 100644 test/files/presentation/t4287c.check delete mode 100644 test/files/presentation/t4287c/Test.scala delete mode 100644 test/files/presentation/t4287c/src/Foo.scala delete mode 100644 test/files/run/t4287inferredMethodTypes.check delete mode 100644 test/files/run/t4287inferredMethodTypes.scala create mode 100644 test/files/run/trait_fields_bytecode.scala create mode 100644 test/files/run/trait_fields_final.scala create mode 100644 test/files/run/trait_fields_init.check create mode 100644 test/files/run/trait_fields_init.scala create mode 100644 test/files/run/trait_fields_repl.check create mode 100644 test/files/run/trait_fields_repl.scala create mode 100644 test/files/run/trait_fields_three_layer_overrides.check create mode 100644 test/files/run/trait_fields_three_layer_overrides.scala create mode 100644 test/files/run/trait_fields_volatile.scala diff --git a/spec/05-classes-and-objects.md b/spec/05-classes-and-objects.md index 69828ec7fec9..f92e88788ac7 100644 --- a/spec/05-classes-and-objects.md +++ b/spec/05-classes-and-objects.md @@ -344,8 +344,8 @@ $M'$: - If $M$ and $M'$ are both concrete value definitions, then either none of them is marked `lazy` or both must be marked `lazy`. -A stable member can only be overridden by a stable member. -For example, this is not allowed: +- A stable member can only be overridden by a stable member. + For example, this is not allowed: ```scala class X { val stable = 1} diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index d4c2896c5c65..c2d92ce7f975 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -476,10 +476,22 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val runsRightAfter = None } with TailCalls + // phaseName = "fields" + object fields extends { + val global: Global.this.type = Global.this + // after refchecks, so it doesn't have to make weird exceptions for synthetic accessors + // after uncurry as it produces more work for the fields phase as well as being confused by it: + // - sam expansion synthesizes classes, which may need trait fields mixed in + // - the fields phase adds synthetic abstract methods to traits that should not disqualify them from being a SAM type + // before erasure: correct signatures & bridges for accessors + val runsAfter = List("uncurry") + val runsRightAfter = None + } with Fields + // phaseName = "explicitouter" object explicitOuter extends { val global: Global.this.type = Global.this - val runsAfter = List("tailcalls") + val runsAfter = List("fields") val runsRightAfter = None } with ExplicitOuter @@ -595,7 +607,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) * This implementation creates a description map at the same time. */ protected def computeInternalPhases(): Unit = { - // Note: this fits -Xshow-phases into 80 column width, which it is + // Note: this fits -Xshow-phases into 80 column width, which is // desirable to preserve. val phs = List( syntaxAnalyzer -> "parse source into ASTs, perform simple desugaring", @@ -608,6 +620,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) pickler -> "serialize symbol tables", refChecks -> "reference/override checking, translate nested objects", uncurry -> "uncurry, translate function values to anonymous classes", + fields -> "synthesize accessors and fields", tailCalls -> "replace tail calls by jumps", specializeTypes -> "@specialized-driven class and method specialization", explicitOuter -> "this refs to outer pointers", @@ -1239,6 +1252,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) val picklerPhase = phaseNamed("pickler") val refchecksPhase = phaseNamed("refchecks") val uncurryPhase = phaseNamed("uncurry") + // val fieldsPhase = phaseNamed("fields") // val tailcallsPhase = phaseNamed("tailcalls") val specializePhase = phaseNamed("specialize") val explicitouterPhase = phaseNamed("explicitouter") diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index bb695500cc67..5dddf30c96b1 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -290,6 +290,16 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } + // the result type of a function or corresponding SAM type + private def functionResultType(tp: Type): Type = { + val dealiased = tp.dealiasWiden + if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.last + else samOf(tp) match { + case samSym if samSym.exists => tp.memberInfo(samSym).resultType.deconst + case _ => NoType + } + } + /** * Lift a Function's body to a method. For use during Uncurry, where Function nodes have type FunctionN[T1, ..., Tn, R] * diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index d779490ba84e..27a4cbd1346f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -164,7 +164,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { def enclosingMethod(sym: Symbol): Option[Symbol] = { if (sym.isClass || sym == NoSymbol) None - else if (sym.isMethod) { + else if (sym.isMethod && !sym.isGetter) { if (doesNotExist(sym)) None else Some(sym) } else enclosingMethod(nextEnclosing(sym)) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 383347a0d321..836893a98b17 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -549,7 +549,10 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { if (classSym.isEffectivelyFinal) None else { // Phase travel necessary. For example, nullary methods (getter of an abstract val) get an - // empty parameter list in later phases and would therefore be picked as SAM. + // empty parameter list in uncurry and would therefore be picked as SAM. + // Similarly, the fields phases adds abstract trait setters, which should not be considered + // abstract for SAMs (they do disqualify the SAM from LMF treatment, + // but an anonymous subclasss can be spun up by scalac after making just the single abstract method concrete) val samSym = exitingPickler(definitions.samOf(classSym.tpe)) if (samSym == NoSymbol) None else Some(samSym.javaSimpleName.toString + methodBTypeFromSymbol(samSym).descriptor) @@ -724,7 +727,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModuleClass(sym)) && !sym.enclClass.isTrait && !sym.isClassConstructor - && !sym.isMutable // lazy vals and vars both + && (!sym.isMutable || nme.isTraitSetterName(sym.name)) // lazy vals and vars and their setters cannot be final, but trait setters are ) // Primitives are "abstract final" to prohibit instantiation diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 9a8eca152fe2..104e2e8c937f 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -50,12 +50,6 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => } } - private def mkAssign(clazz: Symbol, assignSym: Symbol, rhs: Tree): Tree = { - val qual = Select(This(clazz), assignSym) - if (assignSym.isSetter) Apply(qual, List(rhs)) - else Assign(qual, rhs) - } - /** Add calls to supermixin constructors * `super[mix].$init$()` * to tree, which is assumed to be the body of a constructor of class clazz. diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 971a55f763c8..ec8dc6883447 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -13,7 +13,7 @@ import symtab.Flags._ /** This phase converts classes with parameters into Java-like classes with * fields, which are assigned to from constructors. */ -abstract class Constructors extends Statics with Transform with ast.TreeDSL { +abstract class Constructors extends Statics with Transform with TypingTransformers with ast.TreeDSL { import global._ import definitions._ @@ -26,7 +26,7 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { private val guardedCtorStats: mutable.Map[Symbol, List[Tree]] = perRunCaches.newMap[Symbol, List[Tree]]() private val ctorParams: mutable.Map[Symbol, List[Symbol]] = perRunCaches.newMap[Symbol, List[Symbol]]() - class ConstructorTransformer(unit: CompilationUnit) extends Transformer { + class ConstructorTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { /* * Inspect for obvious out-of-order initialization; concrete, eager vals or vars, declared in this class, * for which a reference to the member precedes its definition. @@ -80,7 +80,10 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { else { checkUninitializedReads(cd) val tplTransformer = new TemplateTransformer(unit, impl0) - treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed) + tplTransformer.localTyper = this.localTyper + tplTransformer.atOwner(impl0, cd.symbol) { + treeCopy.ClassDef(cd, mods0, name0, tparams0, tplTransformer.transformed) + } } case _ => super.transform(tree) @@ -442,13 +445,14 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { } // GuardianOfCtorStmts private class TemplateTransformer(val unit: CompilationUnit, val impl: Template) - extends StaticsTransformer + extends TypingTransformer(unit) + with StaticsTransformer with DelayedInitHelper with OmittablesHelper - with GuardianOfCtorStmts { + with GuardianOfCtorStmts + { val clazz = impl.symbol.owner // the transformed class - val localTyper = typer.atOwner(impl, clazz) val isDelayedInitSubclass = clazz isSubClass DelayedInitClass @@ -544,12 +548,15 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { else transform(tree.changeOwner(oldOwner -> newOwner)) } - // Create an assignment to class field `to` with rhs `from` - def mkAssign(to: Symbol, from: Tree): Tree = - localTyper.typedPos(to.pos) { - Assign(Select(This(clazz), to), from) + // Assign `rhs` to class field / trait setter `assignSym` + def mkAssign(assignSym: Symbol, rhs: Tree): Tree = + localTyper.typedPos(assignSym.pos) { + val qual = Select(This(clazz), assignSym) + if (assignSym.isSetter) Apply(qual, List(rhs)) + else Assign(qual, rhs) } + // Create code to copy parameter to parameter accessor field. // If parameter is $outer, check that it is not null so that we NPE // here instead of at some unknown future $outer access. @@ -565,9 +572,6 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { } } - // Constant typed vals are not memoized. - def memoizeValue(sym: Symbol) = !sym.info.resultType.isInstanceOf[ConstantType] - /** Triage definitions and statements in this template into the following categories. * The primary constructor is treated separately, as it is assembled in part from these pieces. * @@ -577,84 +581,113 @@ abstract class Constructors extends Statics with Transform with ast.TreeDSL { * - `constrStats`: statements that go into the constructor after and including the superclass constructor call * - `classInitStats`: statements that go into the class initializer */ - def triageStats = { - val defBuf, auxConstructorBuf, constrPrefixBuf, constrStatBuf, classInitStatBuf = new mutable.ListBuffer[Tree] - - // The early initialized field definitions of the class (these are the class members) - val presupers = treeInfo.preSuperFields(stats) - - // generate code to copy pre-initialized fields - for (stat <- primaryConstrBody.stats) { - constrStatBuf += stat - stat match { - case ValDef(mods, name, _, _) if mods.hasFlag(PRESUPER) => - // stat is the constructor-local definition of the field value - val fields = presupers filter (_.getterName == name) - assert(fields.length == 1, s"expected exactly one field by name $name in $presupers of $clazz's early initializers") - val to = fields.head.symbol - - if (memoizeValue(to)) constrStatBuf += mkAssign(to, Ident(stat.symbol)) - case _ => + class Triage { + private val defBuf, auxConstructorBuf, constrPrefixBuf, constrStatBuf, classInitStatBuf = new mutable.ListBuffer[Tree] + + triage() + + val defs = defBuf.toList + val auxConstructors = auxConstructorBuf.toList + val constructorPrefix = constrPrefixBuf.toList + val constructorStats = constrStatBuf.toList + val classInitStats = classInitStatBuf.toList + + private def triage() = { + // Constant typed vals are not memoized. + def memoizeValue(sym: Symbol) = !sym.info.resultType.isInstanceOf[ConstantType] + + // The early initialized field definitions of the class (these are the class members) + val presupers = treeInfo.preSuperFields(stats) + + // generate code to copy pre-initialized fields + for (stat <- primaryConstrBody.stats) { + constrStatBuf += stat + stat match { + case ValDef(mods, name, _, _) if mods.hasFlag(PRESUPER) => // TODO trait presupers + // stat is the constructor-local definition of the field value + val fields = presupers filter (_.getterName == name) + assert(fields.length == 1, s"expected exactly one field by name $name in $presupers of $clazz's early initializers") + val to = fields.head.symbol + + if (memoizeValue(to)) constrStatBuf += mkAssign(to, Ident(stat.symbol)) + case _ => + } } - } - for (stat <- stats) { - val statSym = stat.symbol - - // Move the RHS of a ValDef to the appropriate part of the ctor. - // If the val is an early initialized or a parameter accessor, - // it goes before the superclass constructor call, otherwise it goes after. - // A lazy val's effect is not moved to the constructor, as it is delayed. - // Returns `true` when a `ValDef` is needed. - def moveEffectToCtor(mods: Modifiers, rhs: Tree, assignSym: Symbol): Unit = { - val initializingRhs = - if ((assignSym eq NoSymbol) || statSym.isLazy) EmptyTree // not memoized, or effect delayed (for lazy val) - else if (!mods.hasStaticFlag) intoConstructor(statSym, primaryConstr.symbol)(rhs) - else rhs - - if (initializingRhs ne EmptyTree) { - val initPhase = - if (mods hasFlag STATIC) classInitStatBuf - else if (mods hasFlag PRESUPER | PARAMACCESSOR) constrPrefixBuf - else constrStatBuf - - initPhase += mkAssign(assignSym, initializingRhs) + val primaryConstrSym = primaryConstr.symbol + + for (stat <- stats) { + val statSym = stat.symbol + + // Move the RHS of a ValDef to the appropriate part of the ctor. + // If the val is an early initialized or a parameter accessor, + // it goes before the superclass constructor call, otherwise it goes after. + // A lazy val's effect is not moved to the constructor, as it is delayed. + // Returns `true` when a `ValDef` is needed. + def moveEffectToCtor(mods: Modifiers, rhs: Tree, assignSym: Symbol): Unit = { + val initializingRhs = + if ((assignSym eq NoSymbol) || statSym.isLazy) EmptyTree // not memoized, or effect delayed (for lazy val) + else if (!mods.hasStaticFlag) intoConstructor(statSym, primaryConstrSym)(rhs) + else rhs + + if (initializingRhs ne EmptyTree) { + val initPhase = + if (mods hasFlag STATIC) classInitStatBuf + else if (mods hasFlag PRESUPER | PARAMACCESSOR) constrPrefixBuf + else constrStatBuf + + initPhase += mkAssign(assignSym, initializingRhs) + } } - } - stat match { - // recurse on class definition, store in defBuf - case _: ClassDef if !stat.symbol.isInterface => defBuf += new ConstructorTransformer(unit).transform(stat) - - // Triage methods -- they all end up in the template -- - // regular ones go to `defBuf`, secondary contructors go to `auxConstructorBuf`. - // The primary constructor is dealt with separately (we're massaging it here). - case _: DefDef if statSym.isPrimaryConstructor || statSym.isMixinConstructor => () - case _: DefDef if statSym.isConstructor => auxConstructorBuf += stat - case _: DefDef => defBuf += stat - - // If a val needs a field, an empty valdef goes into the template. - // Except for lazy and ConstantTyped vals, the field is initialized by an assignment in: - // - the class initializer (static), - // - the constructor, before the super call (early initialized or a parameter accessor), - // - the constructor, after the super call (regular val). - case ValDef(mods, _, _, rhs) => - if (rhs ne EmptyTree) { - val emitField = memoizeValue(statSym) - moveEffectToCtor(mods, rhs, if (emitField) statSym else NoSymbol) - if (emitField) defBuf += deriveValDef(stat)(_ => EmptyTree) - } else defBuf += stat - - // all other statements go into the constructor - case _ => constrStatBuf += intoConstructor(impl.symbol, primaryConstr.symbol)(stat) + stat match { + // recurse on class definition, store in defBuf + case _: ClassDef if !statSym.isInterface => + defBuf += new ConstructorTransformer(unit).transform(stat) + + // primary constructor is already tracked as `primaryConstr` + // non-primary constructors go to auxConstructorBuf + // mixin constructors are suppressed (!?!?) + case _: DefDef if statSym.isConstructor => + if ((statSym ne primaryConstrSym) && !statSym.isMixinConstructor) auxConstructorBuf += stat + + // If a val needs a field, an empty valdef goes into the template. + // Except for lazy and ConstantTyped vals, the field is initialized by an assignment in: + // - the class initializer (static), + // - the constructor, before the super call (early initialized or a parameter accessor), + // - the constructor, after the super call (regular val). + case vd: ValDef => + if (vd.rhs eq EmptyTree) { defBuf += vd } + else { + val emitField = memoizeValue(statSym) + + if (emitField) { + moveEffectToCtor(vd.mods, vd.rhs, statSym) + defBuf += deriveValDef(stat)(_ => EmptyTree) + } + } + + case dd: DefDef => + // either move the RHS to ctor (for getter of stored field) or just drop it (for corresponding setter) + def shouldMoveRHS = + clazz.isTrait && statSym.isAccessor && !statSym.isLazy && (statSym.isSetter || memoizeValue(statSym)) + + if ((dd.rhs eq EmptyTree) || !shouldMoveRHS) { defBuf += dd } + else { + if (statSym.isGetter) moveEffectToCtor(dd.mods, dd.rhs, statSym.asTerm.referenced orElse statSym.setterIn(clazz)) + defBuf += deriveDefDef(stat)(_ => EmptyTree) + } + + // all other statements go into the constructor + case _ => + constrStatBuf += intoConstructor(impl.symbol, primaryConstrSym)(stat) + } } } - - (defBuf.toList, auxConstructorBuf.toList, constrPrefixBuf.toList, constrStatBuf.toList, classInitStatBuf.toList) } def transformed = { - val (defs, auxConstructors, constructorPrefix, constructorStats, classInitStats) = triageStats + val triage = new Triage; import triage._ // omit unused outers val omittableAccessor: Set[Symbol] = diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index db8e203c1cdf..289ac0cc023c 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1156,6 +1156,8 @@ abstract class Erasure extends AddInterfaces treeCopy.ArrayValue( tree1, elemtpt setType specialScalaErasure.applyInArray(elemtpt.tpe), trees map transform).clearType() case DefDef(_, _, _, _, tpt, _) => + fields.dropFieldAnnotationsFromGetter(tree.symbol) // TODO: move this in some post-processing transform in the fields phase? + try super.transform(tree1).clearType() finally tpt setType specialErasure(tree1.symbol)(tree1.symbol.tpe).resultType case ApplyDynamic(qual, Literal(Constant(boostrapMethodRef: Symbol)) :: _) => diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala new file mode 100644 index 000000000000..0dd7b1fee025 --- /dev/null +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -0,0 +1,449 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author + */ + +package scala.tools.nsc +package transform + +import scala.annotation.tailrec +import symtab.Flags._ + + +/** Synthesize accessors and field for each (strict) val owned by a trait. + * + * For traits: + * + * - Namers translates a definition `val x = rhs` into a getter `def x = rhs` -- no underlying field is created. + * - This phase synthesizes accessors and fields for any vals mixed into a non-trait class. + * - Constructors will move the rhs to an assignment in the template body. + * and those statements then move to the template into the constructor, + * which means it will initialize the fields defined in this template (and execute the corresponding side effects). + * We need to maintain the connection between getter and rhs until after specialization so that it can duplicate vals. + * + * Runs before erasure (to get bridges), and thus before lambdalift/flatten, so that nested functions/definitions must be considered. + * We run after uncurry because it can introduce subclasses of traits with fields (SAMs with vals). + * Lambdalift also introduces new fields (paramaccessors for captured vals), but runs too late in the pipeline + * (mixins still synthesizes implementations for accessors that need to be mixed into subclasses of local traits that capture). + * + * In the future, would like to get closer to dotty, which lifts a val's RHS (a similar thing is done for template-level statements) + * to a method `$_initialize_$1$x` instead of a block, which is used in the constructor to initialize the val. + * This makes for a nice unification of strict and lazy vals, in that the RHS is lifted to a method for both, + * with the corresponding compute method called at the appropriate time.) + * + * This only reduces the required number of methods per field declaration in traits, + * if we encode the name (and place in initialisation order) of the field + * in the name of its initializing method, to allow separate compilation. + * (The name mangling must include ordering, and thus complicate incremental compilation: + * ideally, we'd avoid renumbering unchanged methods, but that would result in + * different bytecode between clean recompiles and incremental ones). + * + * In the even longer term (Scala 3?), I agree with @DarkDimius that it would make sense + * to hide the difference between strict and lazy vals. All vals are lazy, + * but the memoization overhead is removed when we statically know they are forced during initialiation. + * We could still expose the low-level field semantics through `private[this] val`s. + * + * In any case, the current behavior of overriding vals is pretty surprising. + * An overridden val's side-effect is still performed. + * The only change due to overriding is that its value is never written to the field + * (the overridden val's value is, of course, stored in the field in addition to its side-effect being performed). + */ +abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransformers { + + import global._ + import definitions._ + + /** the following two members override abstract members in Transform */ + val phaseName: String = "fields" + + protected def newTransformer(unit: CompilationUnit): Transformer = new FieldsTransformer(unit) + override def transformInfo(sym: Symbol, tp: Type): Type = + if (sym.isJavaDefined || sym.isPackageClass || !sym.isClass) tp + else synthFieldsAndAccessors(tp) + + // we leave lazy vars/accessors and early-init vals alone for now + private def excludedAccessorOrFieldByFlags(statSym: Symbol): Boolean = statSym hasFlag LAZY | PRESUPER + + // used for internal communication between info and tree transform of this phase -- not pickled, not in initialflags + // TODO: reuse MIXEDIN for NEEDS_TREES? + override def phaseNewFlags: Long = NEEDS_TREES | OVERRIDDEN_TRAIT_SETTER + + private final val OVERRIDDEN_TRAIT_SETTER = TRANS_FLAG + + final val TRAIT_SETTER_FLAGS = NEEDS_TREES | DEFERRED | ProtectedLocal + + private def accessorImplementedInSubclass(accessor: Symbol) = + (accessor hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) && (accessor hasFlag (ACCESSOR)) + + private def concreteOrSynthImpl(sym: Symbol): Boolean = !(sym hasFlag DEFERRED) || (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) + + private def synthesizeImplInSubclasses(accessor: Symbol): Unit = + accessor setFlag lateDEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS + + private def setClonedTraitSetterFlags(clazz: Symbol, correspondingGetter: Symbol, cloneInSubclass: Symbol): Unit = { + val overridden = isOverriddenAccessor(correspondingGetter, clazz) + if (overridden) cloneInSubclass setFlag OVERRIDDEN_TRAIT_SETTER + else if (correspondingGetter.isEffectivelyFinal) cloneInSubclass setFlag FINAL + } + + // TODO: add MIXEDIN (see e.g., `accessed` on `Symbol`) + private def setMixedinAccessorFlags(orig: Symbol, cloneInSubclass: Symbol): Unit = + cloneInSubclass setFlag OVERRIDE | NEEDS_TREES resetFlag DEFERRED | lateDEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS + + private def setFieldFlags(accessor: Symbol, fieldInSubclass: TermSymbol): Unit = + fieldInSubclass setFlag (NEEDS_TREES | + PrivateLocal + | (accessor getFlag MUTABLE | LAZY) + | (if (accessor hasFlag STABLE) 0 else MUTABLE) + ) + + + def checkAndClearOverridden(setter: Symbol) = checkAndClear(OVERRIDDEN_TRAIT_SETTER)(setter) + def checkAndClearNeedsTrees(setter: Symbol) = checkAndClear(NEEDS_TREES)(setter) + def checkAndClear(flag: Long)(sym: Symbol) = + sym.hasFlag(flag) match { + case overridden => + sym resetFlag flag + overridden + } + + + private def isOverriddenAccessor(member: Symbol, site: Symbol): Boolean = { + val pre = site.thisType + @tailrec def loop(bcs: List[Symbol]): Boolean = { + // println(s"checking ${bcs.head} for member overriding $member (of ${member.owner})") + bcs.nonEmpty && bcs.head != member.owner && (matchingAccessor(pre, member, bcs.head) != NoSymbol || loop(bcs.tail)) + } + + member.exists && loop(site.info.baseClasses) + } + + + def matchingAccessor(pre: Type, member: Symbol, clazz: Symbol) = { + val res = member.matchingSymbol(clazz, pre) filter (sym => (sym hasFlag ACCESSOR) && concreteOrSynthImpl(sym)) + // if (res != NoSymbol) println(s"matching accessor for $member in $clazz = $res (under $pre)") + // else println(s"no matching accessor for $member in $clazz (under $pre) among ${clazz.info.decls}") + res + } + + + class FieldMemoization(accessorOrField: Symbol, site: Symbol) { + val tp = fieldTypeOfAccessorIn(accessorOrField, site.thisType) + // not stored, no side-effect + val pureConstant = tp.isInstanceOf[ConstantType] + + // if !stored, may still have a side-effect + // (currently not distinguished -- used to think we could drop unit-typed vals, + // but the memory model cares about writes to unit-typed fields) + val stored = !pureConstant // || isUnitType(tp)) + } + + private def fieldTypeForGetterIn(getter: Symbol, pre: Type): Type = getter.info.finalResultType.asSeenFrom(pre, getter.owner) + private def fieldTypeForSetterIn(setter: Symbol, pre: Type): Type = setter.info.paramTypes.head.asSeenFrom(pre, setter.owner) + + // TODO: is there a more elegant way? + def fieldTypeOfAccessorIn(accessor: Symbol, pre: Type) = + if (accessor.isSetter) fieldTypeForSetterIn(accessor, pre) + else fieldTypeForGetterIn(accessor, pre) + + + // Constant/unit typed vals are not memoized (their value is so cheap it doesn't make sense to store it in a field) + // for a unit-typed getter, we perform the effect at the appropriate time (constructor for eager ones, lzyCompute for lazy), + // and have the getter just return Unit (who does that!?) + // NOTE: this only considers type, filter on flags first! + def fieldMemoizationIn(accessorOrField: Symbol, site: Symbol) = new FieldMemoization(accessorOrField, site) + + // drop field-targeting annotations from getters + // (in traits, getters must also hold annotations that target the underlying field, + // because the latter won't be created until the trait is mixed into a class) + // TODO do bean getters need special treatment to suppress field-targeting annotations in traits? + def dropFieldAnnotationsFromGetter(sym: Symbol) = + if (sym.isGetter && sym.owner.isTrait) { + sym setAnnotations (sym.annotations filter AnnotationInfo.mkFilter(GetterTargetClass, defaultRetention = false)) + } + + private object synthFieldsAndAccessors extends TypeMap { + private def newTraitSetter(getter: Symbol, clazz: Symbol) = { + // Add setter for an immutable, memoizing getter + // (can't emit during namers because we don't yet know whether it's going to be memoized or not) + val setterFlags = (getter.flags & ~(STABLE | PrivateLocal | OVERRIDE | IMPLICIT | FINAL)) | MUTABLE | ACCESSOR | TRAIT_SETTER_FLAGS + val setterName = nme.expandedSetterName(getter.name.setterName, clazz) + val setter = clazz.newMethod(setterName, getter.pos.focus, setterFlags) + val fieldTp = fieldTypeForGetterIn(getter, clazz.thisType) + // println(s"newTraitSetter in $clazz for $getter = $setterName : $fieldTp") + + getter.asTerm.referenced = setter + + setter setInfo MethodType(List(setter.newSyntheticValueParam(fieldTp)), UnitTpe) + setter + } + + def apply(tp0: Type): Type = tp0 match { + // TODO: make less destructive (name changes, decl additions, flag setting -- + // none of this is actually undone when travelling back in time using atPhase) + case tp@ClassInfoType(parents, decls, clazz) if clazz.isTrait => + // setters for trait vars or module accessor + val newDecls = collection.mutable.ListBuffer[Symbol]() + val origDecls = decls.toList + + // strict, memoized accessors will receive an implementation in first real class to extend this trait + origDecls.foreach { member => + if (member hasFlag ACCESSOR) { + val fieldMemoization = fieldMemoizationIn(member, clazz) + // check flags before calling makeNotPrivate + val accessorUnderConsideration = !(member hasFlag (DEFERRED | LAZY)) + + // destructively mangle accessor's name (which may cause rehashing of decls), also sets flags + if (member hasFlag PRIVATE) member makeNotPrivate clazz + + // Need to mark as notPROTECTED, so that it's carried over to the synthesized member in subclasses, + // since the trait member will receive this flag later in ExplicitOuter, but the synthetic subclass member will not. + // If we don't add notPROTECTED to the synthesized one, the member will not be seen as overriding the trait member. + // Therefore, addForwarders's call to membersBasedOnFlags would see the deferred member in the trait, + // instead of the concrete (desired) one in the class + // TODO: encapsulate as makeNotProtected, similar to makeNotPrivate (also do moduleClass, e.g.) + if (member hasFlag PROTECTED) member setFlag notPROTECTED + + // must not reset LOCAL, as we must maintain protected[this]ness to allow that variance hole + // (not sure why this only problem only arose when we started setting the notPROTECTED flag) + + // derive trait setter after calling makeNotPrivate (so that names are mangled consistently) + if (accessorUnderConsideration && fieldMemoization.stored) { + synthesizeImplInSubclasses(member) + + if (member hasFlag STABLE) // TODO: check isGetter? + newDecls += newTraitSetter(member, clazz) + } + } + } + + if (newDecls nonEmpty) { + val allDecls = newScope + origDecls foreach allDecls.enter + newDecls foreach allDecls.enter + ClassInfoType(parents, allDecls, clazz) + } else tp + + // mix in fields & accessors for all mixed in traits + + case tp@ClassInfoType(parents, oldDecls, clazz) if !clazz.isPackageClass => + val site = clazz.thisType + // TODO (1): improve logic below, which is used to avoid mixing in anything that would result in an error in refchecks + // (a reason to run after refchecks? we should run before pickler, though, I think, so that the synthesized stats are pickled) + + val membersNeedingSynthesis = clazz.mixinClasses.flatMap { mixin => + // afterOwnPhase, so traits receive trait setters for vals + afterOwnPhase {mixin.info}.decls.toList.filter(accessorImplementedInSubclass) + } + +// println(s"mixing in for $clazz: $membersNeedingSynthesis from ${clazz.mixinClasses}") + + // TODO: setter conflicts? + def accessorConflictsExistingVal(accessor: Symbol): Boolean = { + val existingGetter = oldDecls.lookup(accessor.name.getterName) + // println(s"$existingGetter from $accessor to ${accessor.name.getterName}") + val tp = fieldTypeOfAccessorIn(accessor, site) + (existingGetter ne NoSymbol) && (tp matches (site memberInfo existingGetter).resultType) // !existingGetter.isDeferred && -- see (3) + } + + // mixin field accessors -- + // invariant: (accessorsMaybeNeedingImpl, mixedInAccessorAndFields).zipped.forall(case (acc, clone :: _) => `clone` is clone of `acc` case _ => true) + val synthAccessorAndFields = membersNeedingSynthesis map { member => + def cloneAccessor() = { + val clonedAccessor = (member cloneSymbol clazz) setPos clazz.pos + setMixedinAccessorFlags(member, clonedAccessor) + + if (clonedAccessor.isGetter) + clonedAccessor setAnnotations (clonedAccessor.annotations filter AnnotationInfo.mkFilter(GetterTargetClass, defaultRetention = false)) + + // if we don't cloneInfo, method argument symbols are shared between trait and subclasses --> lambalift proxy crash + // TODO: use derive symbol variant? + // println(s"cloning accessor $accessor to $clazz / $clonedInfo -> $relativeInfo") + clonedAccessor setInfo ((clazz.thisType memberType member) cloneInfo clonedAccessor) // accessor.info.cloneInfo(clonedAccessor).asSeenFrom(clazz.thisType, accessor.owner) + } + + // when considering whether to mix in the trait setter, forget about conflicts -- they will be reported for the getter + // a trait setter for an overridden val will receive a unit body in the tree transform + if (nme.isTraitSetterName(member.name)) { + val getter = member.getterIn(member.owner) + val clone = cloneAccessor() + + setClonedTraitSetterFlags(clazz, getter, clone) + // println(s"mixed in trait setter ${clone.defString}") + + List(clone) + } + // avoid creating early errors in case of conflicts (wait until refchecks); + // also, skip overridden accessors contributed by supertraits (only act on the last overriding one) + else if (accessorConflictsExistingVal(member) || isOverriddenAccessor(member, clazz)) Nil + else if (member.isGetter && fieldMemoizationIn(member, clazz).stored) { + // add field if needed + val field = clazz.newValue(member.localName, member.pos) setInfo fieldTypeForGetterIn(member, clazz.thisType) + + setFieldFlags(member, field) + + // filter getter's annotations to exclude those only meant for the field + // we must keep them around long enough to see them here, though, when we create the field + field setAnnotations (member.annotations filter AnnotationInfo.mkFilter(FieldTargetClass, defaultRetention = true)) + + List(cloneAccessor(), field) + } else List(cloneAccessor()) + } + + // println(s"new decls for $clazz: $mixedInAccessorAndFields") + + // omit fields that are not memoized, retain all other members + def omittableField(sym: Symbol) = sym.isValue && !sym.isMethod && !fieldMemoizationIn(sym, clazz).stored + + val newDecls = + if (synthAccessorAndFields.isEmpty) oldDecls.filterNot(omittableField) + else { + // must not alter `decls` directly + val newDecls = newScope + val enter = newDecls enter (_: Symbol) + val enterAll = (_: List[Symbol]) foreach enter + + oldDecls foreach { d => if (!omittableField(d)) enter(d) } + synthAccessorAndFields foreach enterAll + + newDecls + } + + // println(s"new decls: $newDecls") + + if (newDecls eq oldDecls) tp + else ClassInfoType(parents, newDecls, clazz) + + case tp => mapOver(tp) + } + } + + + + class FieldsTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { + def mkTypedUnit(pos: Position) = localTyper.typedPos(pos)(CODE.UNIT) + def deriveUnitDef(stat: Tree) = deriveDefDef(stat)(_ => mkTypedUnit(stat.pos)) + + def mkAccessor(accessor: Symbol)(body: Tree) = localTyper.typedPos(accessor.pos)(DefDef(accessor, body)).asInstanceOf[DefDef] + + def mkField(sym: Symbol) = localTyper.typedPos(sym.pos)(ValDef(sym)).asInstanceOf[ValDef] + + + // synth trees for accessors/fields and trait setters when they are mixed into a class + def fieldsAndAccessors(exprOwner: Symbol): List[ValOrDefDef] = { + if (exprOwner.isLocalDummy) { + val clazz = exprOwner.owner + def fieldAccess(accessor: Symbol): Option[Tree] = { + val fieldName = accessor.localName + val field = clazz.info.decl(fieldName) + // The `None` result denotes an error, but we defer to refchecks to report it. + // This is the result of overriding a val with a def, so that no field is found in the subclass. + if (field.exists) Some(Select(This(clazz), field)) + else None + } + + def getterBody(getter: Symbol): Option[Tree] = { + val fieldMemoization = fieldMemoizationIn(getter, clazz) + if (fieldMemoization.pureConstant) Some(gen.mkAttributedQualifier(fieldMemoization.tp)) // TODO: drop when we no longer care about producing identical bytecode + else fieldAccess(getter) + } + + // println(s"accessorsAndFieldsNeedingTrees for $templateSym: $accessorsAndFieldsNeedingTrees") + def setterBody(setter: Symbol): Option[Tree] = { + // trait setter in trait + if (clazz.isTrait) Some(EmptyTree) + // trait setter for overridden val in class + else if (checkAndClearOverridden(setter)) Some(mkTypedUnit(setter.pos)) + // trait val/var setter mixed into class + else fieldAccess(setter) map (fieldSel => Assign(fieldSel, Ident(setter.firstParam))) + } + + + clazz.info.decls.toList.filter(checkAndClearNeedsTrees) flatMap { + case setter if setter.isSetter => setterBody(setter) map mkAccessor(setter) + case getter if getter.isAccessor => getterBody(getter) map mkAccessor(getter) + case field if !(field hasFlag METHOD) => Some(mkField(field)) // vals/vars and module vars (cannot have flags PACKAGE | JAVA since those never receive NEEDS_TREES) + case _ => None + } + } else { +// println(s"$exprOwner : ${exprOwner.info} --> ${exprOwner.info.decls}") + Nil + } + } + + def rhsAtOwner(stat: ValOrDefDef, newOwner: Symbol): Tree = + atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol -> newOwner))) + + private def transformStat(exprOwner: Symbol)(stat: Tree): List[Tree] = { + val clazz = currentOwner + val statSym = stat.symbol + + // println(s"transformStat $statSym in ${exprOwner.ownerChain}") + // currentRun.trackerFactory.snapshot() + + /* + For traits, the getter has the val's RHS, which is already constant-folded. There is no valdef. + For classes, we still have the classic scheme of private[this] valdef + getter & setter that read/assign to the field. + + There are two axes: (1) is there a side-effect to the val (2) does the val need storage? + For a ConstantType, both answers are "no". (For a unit-typed field, there's a side-effect, but no storage needed.) + + All others (getter for trait field, valdef for class field) have their rhs moved to an initialization statement. + Trait accessors for stored fields are made abstract (there can be no field in a trait). + (In some future version, accessors for non-stored, but effectful fields, + would receive a constant rhs, as the effect is performed by the initialization statement. + We could do this for unit-typed fields, but have chosen not to for backwards compatibility.) + */ + stat match { + // TODO: consolidate with ValDef case + case stat@DefDef(_, _, _, _, _, rhs) if (statSym hasFlag ACCESSOR) && !excludedAccessorOrFieldByFlags(statSym) => + /* TODO: defer replacing ConstantTyped tree by the corresponding constant until erasure + (until then, trees should not be constant-folded -- only their type tracks the resulting constant) + TODO: also remove ACCESSOR flag since there won't be an underlying field to access? + */ + def statInlinedConstantRhs = + if (clazz.isTrait) stat // we've already done this for traits.. the asymmetry will be solved by the above todo + else deriveDefDef(stat)(_ => gen.mkAttributedQualifier(rhs.tpe)) + + if (rhs ne EmptyTree) { + val fieldMemoization = fieldMemoizationIn(statSym, clazz) + + // if we decide to have non-stored fields with initialization effects, the stat's RHS should be replaced by unit + // if (!fieldMemoization.stored) deriveUnitDef(stat) else stat + + if (fieldMemoization.pureConstant) statInlinedConstantRhs :: Nil + else super.transform(stat) :: Nil + } else { + stat :: Nil + } + + case stat@ValDef(mods, _, _, rhs) if !excludedAccessorOrFieldByFlags(statSym) => + if (rhs ne EmptyTree) { + val fieldMemoization = fieldMemoizationIn(statSym, clazz) + + // drop the val for (a) constant (pure & not-stored) and (b) not-stored (but still effectful) fields + if (fieldMemoization.pureConstant) Nil // (a) + else super.transform(stat) :: Nil // if (fieldMemoization.stored) + // else rhsAtOwner(transformStat, exprOwner) :: Nil // (b) -- not used currently + } else { + stat :: Nil + } + + + case tree => List( + if (exprOwner != currentOwner && tree.isTerm) atOwner(exprOwner)(super.transform(tree)) + else super.transform(tree) + ) + } + } + + // TODO flatMapConserve or something like it + // TODO use thicket encoding of multi-tree transformStat? + // if (!currentOwner.isClass || currentOwner.isPackageClass || currentOwner.isInterface) stats flatMap transformStat(exprOwner) // for the ModuleDef case, the only top-level case in that method + // else + override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = + afterOwnPhase { + fieldsAndAccessors(exprOwner) ++ (stats flatMap transformStat(exprOwner)) + } + } +} diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index b5084cffe149..d98daf0ffb31 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -45,8 +45,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * methods in the impl class (because they can have arbitrary initializers) */ private def isImplementedStatically(sym: Symbol) = ( - sym.isMethod - && (!sym.hasFlag(DEFERRED | SUPERACCESSOR) || (sym hasFlag lateDEFERRED)) + sym.isMethod + && notDeferredOrLate(sym) && sym.owner.isTrait && (!sym.isModule || sym.hasFlag(PRIVATE | LIFTED)) && (!(sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isLazy) @@ -109,16 +109,16 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // --------- type transformation ----------------------------------------------- - def isConcreteAccessor(member: Symbol) = - member.hasAccessorFlag && (!member.isDeferred || (member hasFlag lateDEFERRED)) + private def notDeferredOrLate(sym: Symbol) = !sym.hasFlag(DEFERRED) || sym.hasFlag(lateDEFERRED) /** Is member overridden (either directly or via a bridge) in base class sequence `bcs`? */ def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = beforeOwnPhase { def hasOverridingAccessor(clazz: Symbol) = { clazz.info.nonPrivateDecl(member.name).alternatives.exists( sym => - isConcreteAccessor(sym) && + sym.hasFlag(ACCESSOR) && !sym.hasFlag(MIXEDIN) && + notDeferredOrLate(sym) && matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true)) } ( bcs.head != member.owner @@ -126,6 +126,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { ) } + private def isUnitGetter(sym: Symbol) = sym.tpe.resultType.typeSymbol == UnitClass + /** Add given member to given class, and mark member as mixed-in. */ def addMember(clazz: Symbol, member: Symbol): Symbol = { @@ -202,6 +204,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { clazz.info // make sure info is up to date, so that implClass is set. + // TODO: is this needed? can there be fields in a class that don't have accessors yet but need them??? + // can we narrow this down to just getters for lazy vals? param accessors? for (member <- clazz.info.decls) { if (!member.isMethod && !member.isModule && !member.isModuleVar) { assert(member.isTerm && !member.isDeferred, member) @@ -297,49 +301,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def mixinTraitMembers(mixinClass: Symbol) { // For all members of a trait's interface do: for (mixinMember <- mixinClass.info.decls) { - if (isConcreteAccessor(mixinMember)) { - if (isOverriddenAccessor(mixinMember, clazz.info.baseClasses)) - devWarning(s"Overridden concrete accessor: ${mixinMember.fullLocationString}") - else { - // mixin field accessors - val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember) - if (mixinMember.isLazy) { - initializer(mixedInAccessor) = ( - mixinClass.info.decl(mixinMember.name) - orElse abort("Could not find initializer for " + mixinMember.name) - ) - } - if (!mixinMember.isSetter) - mixinMember.tpe match { - case MethodType(Nil, ConstantType(_)) => - // mixinMember is a constant; only getter is needed - ; - case MethodType(Nil, TypeRef(_, UnitClass, _)) => - // mixinMember is a value of type unit. No field needed - ; - case _ => // otherwise mixin a field as well - // enteringPhase: the private field is moved to the implementation class by erasure, - // so it can no longer be found in the mixinMember's owner (the trait) - val accessed = enteringPickler(mixinMember.accessed) - // #3857, need to retain info before erasure when cloning (since cloning only - // carries over the current entry in the type history) - val sym = enteringErasure { - // so we have a type history entry before erasure - clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType) - } - sym updateInfo mixinMember.tpe.resultType // info at current phase - - val newFlags = ( - ( PrivateLocal ) - | ( mixinMember getFlag MUTABLE | LAZY) - | ( if (mixinMember.hasStableFlag) 0 else MUTABLE ) - ) - - addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations) - } - } - } - else if (mixinMember.isSuperAccessor) { // mixin super accessors + if (mixinMember.hasFlag(SUPERACCESSOR)) { // mixin super accessors val superAccessor = addMember(clazz, mixinMember.cloneSymbol(clazz)) setPos clazz.pos assert(superAccessor.alias != NoSymbol, superAccessor) @@ -355,10 +317,53 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { superAccessor.asInstanceOf[TermSymbol] setAlias alias1 } } - else if (mixinMember.isMethod && mixinMember.isModule && mixinMember.hasNoFlags(LIFTED | BRIDGE)) { + else if (mixinMember.hasAllFlags(METHOD | MODULE) && mixinMember.hasNoFlags(LIFTED | BRIDGE)) { // mixin objects: todo what happens with abstract objects? addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos) } + else if (mixinMember.hasFlag(ACCESSOR) && notDeferredOrLate(mixinMember) + && (mixinMember hasFlag (LAZY | PARAMACCESSOR)) + && !isOverriddenAccessor(mixinMember, clazz.info.baseClasses)) { + // pick up where `fields` left off -- it already mixed in fields and accessors for regular vals. + // but has ignored lazy vals and constructor parameter accessors + // TODO: captures added by lambdalift for local traits? + // + // mixin accessor for lazy val or constructor parameter + // (note that a paramaccessor cannot have a constant type as it must have a user-defined type) + val mixedInAccessor = cloneAndAddMixinMember(mixinClass, mixinMember) + val name = mixinMember.name + + if (mixinMember.isLazy) + initializer(mixedInAccessor) = + (mixinClass.info.decl(name) orElse abort(s"Could not find initializer for lazy val $name!")) + + // Add field while we're mixing in the getter (unless it's a Unit-typed lazy val) + // + // lazy val of type Unit doesn't need a field -- the bitmap is enough. + // TODO: constant-typed lazy vals... it's an extreme corner case, but we could also suppress the field in: + // `trait T { final lazy val a = "a" }; class C extends T`, but who writes code like that!? :) + // we'd also have to change the lazyvals logic if we do this + if (!nme.isSetterName(name) && !(mixinMember.isLazy && isUnitGetter(mixinMember))) { + // enteringPhase: the private field is moved to the implementation class by erasure, + // so it can no longer be found in the mixinMember's owner (the trait) + val accessed = enteringPickler(mixinMember.accessed) + // #3857, need to retain info before erasure when cloning (since cloning only + // carries over the current entry in the type history) + val sym = enteringErasure { + // so we have a type history entry before erasure + clazz.newValue(mixinMember.localName, mixinMember.pos).setInfo(mixinMember.tpe.resultType) + } + sym updateInfo mixinMember.tpe.resultType // info at current phase + + val newFlags = ( + (PrivateLocal) + | (mixinMember getFlag MUTABLE | LAZY) + | (if (mixinMember.hasStableFlag) 0 else MUTABLE) + ) + + addMember(clazz, sym setFlag newFlags setAnnotations accessed.annotations) + } + } } } @@ -478,8 +483,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { tree - case _ => - tree + case _ => tree } } @@ -763,13 +767,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def addCheckedGetters(clazz: Symbol, stats: List[Tree]): List[Tree] = { def dd(stat: DefDef) = { val sym = stat.symbol - def isUnit = sym.tpe.resultType.typeSymbol == UnitClass def isEmpty = stat.rhs == EmptyTree if (!clazz.isTrait && sym.isLazy && !isEmpty) { assert(fieldOffset contains sym, sym) deriveDefDef(stat) { - case t if isUnit => mkLazyDef(clazz, sym, List(t), UNIT, fieldOffset(sym)) + case t if isUnitGetter(sym) => mkLazyDef(clazz, sym, List(t), UNIT, fieldOffset(sym)) case Block(stats, res) => mkLazyDef(clazz, sym, stats, Select(This(clazz), res.symbol), fieldOffset(sym)) @@ -781,8 +784,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { assert(fieldOffset contains sym, sym) deriveDefDef(stat)(rhs => (mkCheckedAccessor(clazz, _: Tree, fieldOffset(sym), stat.pos, sym))( - if (sym.tpe.resultType.typeSymbol == UnitClass) UNIT - else rhs + if (isUnitGetter(sym)) UNIT else rhs ) ) } @@ -908,7 +910,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } } - def isUnitGetter(getter: Symbol) = getter.tpe.resultType.typeSymbol == UnitClass def fieldAccess(accessor: Symbol) = Select(This(clazz), accessor.accessed) def isOverriddenSetter(sym: Symbol) = @@ -924,7 +925,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { addDefDef(sym) } else { // if class is not a trait add accessor definitions - if (isConcreteAccessor(sym)) { + // used to include `sym` with `sym hasFlag lateDEFERRED` as not deferred, + // but I don't think MIXEDIN members ever get this flag + assert(!sym.hasFlag(lateDEFERRED), s"mixedin $sym from $clazz has lateDEFERRED flag?!") + if (sym.hasFlag(ACCESSOR) && !sym.hasFlag(DEFERRED)) { + assert(sym hasFlag (LAZY | PARAMACCESSOR), s"mixed in $sym from $clazz is not lazy/param?!?") + // add accessor definitions addDefDef(sym, { if (sym.isSetter) { @@ -1006,20 +1012,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val parents1 = currentOwner.info.parents map (t => TypeTree(t) setPos tree.pos) // mark fields which can be nulled afterward lazyValNullables = nullableFields(templ) withDefaultValue Set() - // Remove bodies of accessors in traits - TODO: after PR #5141 (fields refactoring), this might be a no-op - val bodyEmptyAccessors = if (!sym.enclClass.isTrait) body else body mapConserve { - case dd: DefDef if dd.symbol.isAccessor && !dd.symbol.isLazy => - deriveDefDef(dd)(_ => EmptyTree) - case tree => tree - } // add all new definitions to current class or interface - val body1 = addNewDefs(currentOwner, bodyEmptyAccessors) - body1 foreach { + val statsWithNewDefs = addNewDefs(currentOwner, body) + statsWithNewDefs foreach { case dd: DefDef if isTraitMethodRequiringStaticImpl(dd) => dd.symbol.updateAttachment(NeedStaticImpl) case _ => } - treeCopy.Template(tree, parents1, self, body1) + treeCopy.Template(tree, parents1, self, statsWithNewDefs) case Select(qual, name) if sym.owner.isTrait && !sym.isMethod => // refer to fields in some trait an abstract getter in the interface. diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index e4082eb3769f..a861115cabb9 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -18,8 +18,6 @@ abstract class OverridingPairs extends SymbolPairs { import global._ class Cursor(base: Symbol) extends super.Cursor(base) { - lazy val relatively = new RelativeTo(base.thisType) - /** Symbols to exclude: Here these are constructors and private/artifact symbols, * including bridges. But it may be refined in subclasses. */ @@ -37,7 +35,7 @@ abstract class OverridingPairs extends SymbolPairs { (lo.owner != high.owner) // don't try to form pairs from overloaded members && !high.isPrivate // private or private[this] members never are overridden && !exclude(lo) // this admits private, as one can't have a private member that matches a less-private member. - && relatively.matches(lo, high) + && ((self memberType lo) matches (self memberType high)) ) // TODO we don't call exclude(high), should we? } } diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala index 9ab00f1a831d..776805fd9f1c 100644 --- a/src/compiler/scala/tools/nsc/transform/Statics.scala +++ b/src/compiler/scala/tools/nsc/transform/Statics.scala @@ -4,7 +4,7 @@ package transform abstract class Statics extends Transform with ast.TreeDSL { import global._ - class StaticsTransformer extends Transformer { + trait StaticsTransformer extends Transformer { /** generate a static constructor with symbol fields inits, or an augmented existing static ctor */ def staticConstructor(body: List[Tree], localTyper: analyzer.Typer, pos: Position)(newStaticInits: List[Tree]): Tree = diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 81a465ef2f68..fcfcc8feb92e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -13,6 +13,7 @@ import scala.reflect.macros.runtime.AbortMacroException import scala.util.control.NonFatal import scala.tools.nsc.util.stackTraceString import scala.reflect.io.NoAbstractFile +import scala.reflect.internal.util.NoSourceFile trait ContextErrors { self: Analyzer => @@ -757,22 +758,18 @@ trait ContextErrors { } def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) = { + val addPref = s";\n the conflicting $sym1 was defined" + val bugNote = "\n Note: this may be due to a bug in the compiler involving wildcards in package objects" + // Most of this hard work is associated with SI-4893. val isBug = sym0.isAbstractType && sym1.isAbstractType && (sym0.name startsWith "_$") - val addendums = List( - if (sym0.associatedFile eq sym1.associatedFile) - Some("conflicting symbols both originated in file '%s'".format(sym0.associatedFile.canonicalPath)) - else if ((sym0.associatedFile ne NoAbstractFile) && (sym1.associatedFile ne NoAbstractFile)) - Some("conflicting symbols originated in files '%s' and '%s'".format(sym0.associatedFile.canonicalPath, sym1.associatedFile.canonicalPath)) - else None , - if (isBug) Some("Note: this may be due to a bug in the compiler involving wildcards in package objects") else None - ) - val addendum = addendums.flatten match { - case Nil => "" - case xs => xs.mkString("\n ", "\n ", "") - } + val addendum = ( + if (sym0.pos.source eq sym1.pos.source) s"$addPref at line ${sym1.pos.line}:${sym1.pos.column}" + else if (sym1.pos.source ne NoSourceFile) s"$addPref at line ${sym1.pos.line}:${sym1.pos.column} of '${sym1.pos.source.path}'" + else if (sym1.associatedFile ne NoAbstractFile) s"$addPref in '${sym1.associatedFile.canonicalPath}'" + else "") + (if (isBug) bugNote else "") - issueSymbolTypeError(sym0, sym1+" is defined twice" + addendum) + issueSymbolTypeError(sym0, s"$sym0 is defined twice$addendum") } // cyclic errors diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index c03094bc6a41..408b457d5b77 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -121,6 +121,7 @@ trait MethodSynthesis { } // TODO: see if we can link symbol creation & tree derivation by sharing the Field/Getter/Setter factories + // maybe we can at least reuse some variant of standardAccessors? def enterGetterSetter(tree: ValDef): Unit = { tree.symbol = if (tree.mods.isLazy) { @@ -131,15 +132,14 @@ trait MethodSynthesis { val getterSym = getter.createAndEnterSymbol() // Create the setter if necessary. - if (getter.needsSetter) - Setter(tree).createAndEnterSymbol() + if (getter.needsSetter) Setter(tree).createAndEnterSymbol() - // If the getter's abstract the tree gets the getter's symbol, - // otherwise, create a field (assume the getter requires storage). + // If the getter's abstract, the tree gets the getter's symbol, + // otherwise, create a field (we have to assume the getter requires storage for now). // NOTE: we cannot look at symbol info, since we're in the process of deriving them // (luckily, they only matter for lazy vals, which we've ruled out in this else branch, // and `doNotDeriveField` will skip them if `!mods.isLazy`) - if (Field.noFieldFor(tree)) getterSym setPos tree.pos + if (Field.noFieldFor(tree)) getterSym setPos tree.pos // TODO: why do setPos? `createAndEnterSymbol` already gave `getterSym` the position `tree.pos.focus` else enterStrictVal(tree) } @@ -282,14 +282,15 @@ trait MethodSynthesis { final def enclClass = basisSym.enclClass - /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */ - final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, isSetter) + // There's no reliable way to detect all kinds of setters from flags or name!!! + // A BeanSetter's name does not end in `_=` -- it does begin with "set", but so could the getter + // for a regular Scala field... TODO: can we add a flag to distinguish getter/setter accessors? + final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, this.isInstanceOf[DerivedSetter]) final def fieldSelection = Select(This(enclClass), basisSym) def derivedSym: Symbol = tree.symbol def derivedTree: Tree = EmptyTree - def isSetter = false def isDeferred = mods.isDeferred def validate() { } def createAndEnterSymbol(): MethodSymbol = { @@ -304,6 +305,7 @@ trait MethodSynthesis { result } + final def derive(initial: List[AnnotationInfo]): Tree = { validate() @@ -311,7 +313,9 @@ trait MethodSynthesis { // Annotations on ValDefs can be targeted towards the following: field, getter, setter, beanGetter, beanSetter, param. // The defaults are: // - (`val`-, `var`- or plain) constructor parameter annotations end up on the parameter, not on any other entity. - // - val/var member annotations solely end up on the underlying field. + // - val/var member annotations solely end up on the underlying field, except in traits (@since 2.12), + // where there is no field, and the getter thus holds annotations targetting both getter & field. + // As soon as there is a field/getter (in subclasses mixing in the trait), we triage the annotations. // // TODO: these defaults can be surprising for annotations not meant for accessors/fields -- should we revisit? // (In order to have `@foo val X` result in the X getter being annotated with `@foo`, foo needs to be meta-annotated with @getter) @@ -319,9 +323,11 @@ trait MethodSynthesis { case _: Param => annotationFilter(ParamTargetClass, defaultRetention = true) // By default annotations go to the field, except if the field is generated for a class parameter (PARAMACCESSOR). case _: Field => annotationFilter(FieldTargetClass, defaultRetention = !mods.isParamAccessor) + case _: BaseGetter if owner.isTrait => annotationFilter(List(FieldTargetClass, GetterTargetClass), defaultRetention = true) case _: BaseGetter => annotationFilter(GetterTargetClass, defaultRetention = false) case _: Setter => annotationFilter(SetterTargetClass, defaultRetention = false) case _: BeanSetter => annotationFilter(BeanSetterTargetClass, defaultRetention = false) + // TODO do bean getters need special treatment to collect field-targeting annotations in traits? case _: AnyBeanGetter => annotationFilter(BeanGetterTargetClass, defaultRetention = false) } @@ -329,21 +335,23 @@ trait MethodSynthesis { // should be propagated to this kind of accessor. derivedSym setAnnotations (initial filter annotFilter) + if (derivedSym.isSetter && owner.isTrait && !isDeferred) + derivedSym addAnnotation TraitSetterAnnotationClass + logDerived(derivedTree) } } + sealed trait DerivedGetter extends DerivedFromValDef { - // A getter must be accompanied by a setter if the ValDef is mutable. def needsSetter = mods.isMutable } sealed trait DerivedSetter extends DerivedFromValDef { - override def isSetter = true - private def setterParam = derivedSym.paramss match { + protected def setterParam = derivedSym.paramss match { case (p :: Nil) :: _ => p case _ => NoSymbol } - private def setterRhs = { + protected def setterRhs = { assert(!derivedSym.isOverloaded, s"Unexpected overloaded setter $derivedSym for $basisSym in $enclClass") if (Field.noFieldFor(tree) || derivedSym.isOverloaded) EmptyTree else Assign(fieldSelection, Ident(setterParam)) @@ -390,6 +398,7 @@ trait MethodSynthesis { override def derivedSym = if (Field.noFieldFor(tree)) basisSym else basisSym.getterIn(enclClass) private def derivedRhs = if (Field.noFieldFor(tree)) tree.rhs else fieldSelection + // TODO: more principled approach -- this is a bit bizarre private def derivedTpt = { // For existentials, don't specify a type for the getter, even one derived // from the symbol! This leads to incompatible existentials for the field and @@ -457,6 +466,7 @@ trait MethodSynthesis { def flagsMask = SetterFlags def flagsExtra = ACCESSOR + // TODO: double check logic behind need for name expansion in context of new fields phase override def derivedSym = basisSym.setterIn(enclClass) } @@ -464,17 +474,25 @@ trait MethodSynthesis { // No field for these vals (either never emitted or eliminated later on): // - abstract vals have no value we could store (until they become concrete, potentially) // - lazy vals of type Unit - // - [Emitted, later removed during AddInterfaces/Mixins] concrete vals in traits can't have a field - // - [Emitted, later removed during Constructors] a concrete val with a statically known value (Unit / ConstantType) + // - concrete vals in traits don't yield a field here either (their getter's RHS has the initial value) + // Constructors will move the assignment to the constructor, abstracting over the field using the field setter, + // and Fields will add a field to the class that mixes in the trait, implementing the accessors in terms of it + // - [Emitted, later removed during Constructors] a concrete val with a statically known value (ConstantType) // performs its side effect according to lazy/strict semantics, but doesn't need to store its value // each access will "evaluate" the RHS (a literal) again // We would like to avoid emitting unnecessary fields, but the required knowledge isn't available until after typer. // The only way to avoid emitting & suppressing, is to not emit at all until we are sure to need the field, as dotty does. // NOTE: do not look at `vd.symbol` when called from `enterGetterSetter` (luckily, that call-site implies `!mods.isLazy`), + // similarly, the `def field` call-site breaks when you add `|| vd.symbol.owner.isTrait` (detected in test suite) // as the symbol info is in the process of being created then. // TODO: harmonize tree & symbol creation - // TODO: the `def field` call-site breaks when you add `|| vd.symbol.owner.isTrait` (detected in test suite) - def noFieldFor(vd: ValDef) = vd.mods.isDeferred || (vd.mods.isLazy && isUnitType(vd.symbol.info)) + // the middle `&& !owner.isTrait` is needed after `isLazy` because non-unit-typed lazy vals in traits still get a field -- see neg/t5455.scala + def noFieldFor(vd: ValDef) = (vd.mods.isDeferred + || (vd.mods.isLazy && !owner.isTrait && isUnitType(vd.symbol.info)) + || (owner.isTrait && !traitFieldFor(vd))) + + // TODO: never emit any fields in traits -- only use getter for lazy/presuper ones as well + private def traitFieldFor(vd: ValDef): Boolean = vd.mods.hasFlag(PRESUPER | LAZY) } case class Field(tree: ValDef) extends DerivedFromValDef { @@ -482,6 +500,9 @@ trait MethodSynthesis { def flagsMask = FieldFlags def flagsExtra = PrivateLocal + // TODO: override def createAndEnterSymbol (currently never called on Field) + // and do `enterStrictVal(tree)`, so that enterGetterSetter and addDerivedTrees can share some logic... + // handle lazy val first for now (we emit a Field even though we probably shouldn't...) override def derivedTree = if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus) @@ -528,7 +549,10 @@ trait MethodSynthesis { } case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter { } case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter { } - case class BeanSetter(tree: ValDef) extends BeanAccessor("set") with DerivedSetter + case class BeanSetter(tree: ValDef) extends BeanAccessor("set") with DerivedSetter { + // TODO: document, motivate + override protected def setterRhs = Apply(Ident(tree.name.setterName), List(Ident(setterParam))) + } // No Symbols available. private def beanAccessorsFromNames(tree: ValDef) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index caad4a907b00..784b43ab8431 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -6,6 +6,7 @@ package scala.tools.nsc package typechecker +import scala.annotation.tailrec import scala.collection.mutable import symtab.Flags._ import scala.language.postfixOps @@ -116,10 +117,10 @@ trait Namers extends MethodSynthesis { } // All lazy vals need accessors, including those owned by terms (e.g., in method) or private[this] in a class - def deriveAccessors(vd: ValDef) = vd.mods.isLazy || (owner.isClass && deriveAccessorsInClass(vd)) + def deriveAccessors(vd: ValDef) = (vd.mods.isLazy || owner.isTrait || (owner.isClass && deriveAccessorsInClass(vd))) private def deriveAccessorsInClass(vd: ValDef) = - !vd.mods.isPrivateLocal && // note, private[this] lazy vals do get accessors -- see outer disjunction of deriveAccessors + !vd.mods.isPrivateLocal && // note, private[this] lazy vals do get accessors -- see outer disjunction of deriveAccessors !(vd.name startsWith nme.OUTER) && // outer accessors are added later, in explicitouter !isEnumConstant(vd) // enums can only occur in classes, so only check here @@ -773,28 +774,31 @@ trait Namers extends MethodSynthesis { // this accomplishes anything, but performance is a non-consideration // on these flag checks so it can't hurt. def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential - logAndValidate(sym) { - val tp = typeSig(tree) - - findCyclicalLowerBound(tp) andAlso { sym => - if (needsCycleCheck) { - // neg/t1224: trait C[T] ; trait A { type T >: C[T] <: C[C[T]] } - // To avoid an infinite loop on the above, we cannot break all cycles - log(s"Reinitializing info of $sym to catch any genuine cycles") - sym reset sym.info - sym.initialize - } - } - sym setInfo { - if (sym.isJavaDefined) RestrictJavaArraysMap(tp) - else tp - } + + // logDefinition(sym) { + val tp = typeSig(tree) + + findCyclicalLowerBound(tp) andAlso { sym => if (needsCycleCheck) { - log(s"Needs cycle check: ${sym.debugLocationString}") - if (!typer.checkNonCyclic(tree.pos, tp)) - sym setInfo ErrorType + // neg/t1224: trait C[T] ; trait A { type T >: C[T] <: C[C[T]] } + // To avoid an infinite loop on the above, we cannot break all cycles + log(s"Reinitializing info of $sym to catch any genuine cycles") + sym reset sym.info + sym.initialize } } + sym setInfo { + if (sym.isJavaDefined) RestrictJavaArraysMap(tp) + else tp + } + if (needsCycleCheck) { + log(s"Needs cycle check: ${sym.debugLocationString}") + if (!typer.checkNonCyclic(tree.pos, tp)) + sym setInfo ErrorType + } + //} + + validate(sym) } def moduleClassTypeCompleter(tree: ModuleDef) = { @@ -807,15 +811,18 @@ trait Namers extends MethodSynthesis { /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym => - logAndValidate(sym) { - sym setInfo { - val tp = if (isSetter) MethodType(List(sym.newSyntheticValueParam(typeSig(tree))), UnitTpe) - else NullaryMethodType(typeSig(tree)) - pluginsTypeSigAccessor(tp, typer, tree, sym) - } - } + // typeSig calls valDefSig (because tree: ValDef) + // sym is an accessor, while tree is the field (which may have the same symbol as the getter, or maybe it's the field) + val sig = accessorSigFromFieldTp(sym, isSetter, typeSig(tree)) + + sym setInfo pluginsTypeSigAccessor(sig, typer, tree, sym) + + validate(sym) } + private def accessorSigFromFieldTp(sym: global.Symbol, isSetter: Boolean, tp: global.Type): global.Type with Product with Serializable = { + if (isSetter) MethodType(List(sym.newSyntheticValueParam(tp)), UnitTpe) else NullaryMethodType(tp) + } def selfTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym => val selftpe = typer.typedType(tree).tpe sym setInfo { @@ -992,6 +999,19 @@ trait Namers extends MethodSynthesis { clazz.tpe_* } + + // make a java method type if meth.isJavaDefined + private def methodTypeFor(meth: Symbol, vparamSymss: List[List[Symbol]], restpe: Type) = { + def makeJavaMethodType(vparams: List[Symbol], restpe: Type) = { + vparams foreach (p => p setInfo objToAny(p.tpe)) + JavaMethodType(vparams, restpe) + } + if (vparamSymss.isEmpty) NullaryMethodType(restpe) + else if (meth.isJavaDefined) vparamSymss.foldRight(restpe)(makeJavaMethodType) + else vparamSymss.foldRight(restpe)(MethodType(_, _)) + } + + /** * The method type for `ddef`. * @@ -1009,166 +1029,140 @@ trait Namers extends MethodSynthesis { * to the non-skolems. */ private def methodSig(ddef: DefDef): Type = { - - // DEPMETTODO: do we need to skolemize value parameter symbols? - val DefDef(_, _, tparams, vparamss, tpt, _) = ddef val meth = owner val methOwner = meth.owner - val site = methOwner.thisType /* tparams already have symbols (created in enterDefDef/completerOf), namely the skolemized ones (created * by the PolyTypeCompleter constructor, and assigned to tparams). reenterTypeParams enters the type skolems * into scope and returns the non-skolems. */ val tparamSyms = typer.reenterTypeParams(tparams) - val tparamSkolems = tparams.map(_.symbol) - /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems - * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter - * types from the overridden method). - */ - var vparamSymss = enterValueParams(vparamss) - /* * Creates a method type using tparamSyms and vparamsSymss as argument symbols and `respte` as result type. * All typeRefs to type skolems are replaced by references to the corresponding non-skolem type parameter, * so the resulting type is a valid external method type, it does not contain (references to) skolems. + * + * tparamSyms are deskolemized symbols -- TODO: check that their infos don't refer to method args? + * vparamss refer (if they do) to skolemized tparams */ - def thisMethodType(restpe: Type) = { - if (vparamSymss.lengthCompare(0) > 0) { // OPT fast path for methods of 0-1 parameter lists - val checkDependencies = new DependentTypeChecker(context)(this) - checkDependencies check vparamSymss - } - - val makeMethodType = (vparams: List[Symbol], restpe: Type) => { - // TODODEPMET: check that we actually don't need to do anything here - // new dependent method types: probably OK already, since 'enterValueParams' above - // enters them in scope, and all have a lazy type. so they may depend on other params. but: need to - // check that params only depend on ones in earlier sections, not the same. (done by checkDependencies, - // so re-use / adapt that) - if (meth.isJavaDefined) - // TODODEPMET necessary?? new dependent types: replace symbols in restpe with the ones in vparams - JavaMethodType(vparams map (p => p setInfo objToAny(p.tpe)), restpe) - else - MethodType(vparams, restpe) - } + def deskolemizedPolySig(vparamSymss: List[List[Symbol]], restpe: Type) = + GenPolyType(tparamSyms, methodTypeFor(meth, vparamSymss, restpe).substSym(tparamSkolems, tparamSyms)) - val res = GenPolyType( - tparamSyms, // deSkolemized symbols -- TODO: check that their infos don't refer to method args? - if (vparamSymss.isEmpty) NullaryMethodType(restpe) - // vparamss refer (if they do) to skolemized tparams - else (vparamSymss :\ restpe) (makeMethodType) - ) - res.substSym(tparamSkolems, tparamSyms) + if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) { + tpt defineType context.enclClass.owner.tpe_* + tpt setPos meth.pos.focus } + /* since the skolemized tparams are in scope, the TypeRefs in types of vparamSymss refer to the type skolems + * note that for parameters with missing types, `methodSig` reassigns types of these symbols (the parameter + * types from the overridden method). + */ + val vparamSymss: List[List[Symbol]] = enterValueParams(vparamss) + + val resTpGiven = + if (tpt.isEmpty) WildcardType + else typer.typedType(tpt).tpe + + + // ignore missing types unless we can look to overridden method to recover the missing information + val canOverride = methOwner.isClass && !meth.isConstructor + val inferResTp = canOverride && tpt.isEmpty + val inferArgTp = canOverride && settings.YmethodInfer && mexists(vparamss)(_.tpt.isEmpty) + + /* - * Creates a schematic method type which has WildcardTypes for non specified - * return or parameter types. For instance, in `def f[T](a: T, b) = ...`, the - * type schema is + * Find the overridden method that matches a schematic method type, + * which has WildcardTypes for unspecified return or parameter types. + * For instance, in `def f[T](a: T, b) = ...`, the type schema is * * PolyType(T, MethodType(List(a: T, b: WildcardType), WildcardType)) * * where T are non-skolems. + * + * NOTE: mutates info of symbol of vparamss that don't specify a type */ - def methodTypeSchema(resTp: Type) = { - // for all params without type set WildcaradType - mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType) - thisMethodType(resTp) - } - - def overriddenSymbol(resTp: Type) = { - lazy val schema: Type = methodTypeSchema(resTp) // OPT create once. Must be lazy to avoid cycles in neg/t5093.scala - intersectionType(methOwner.info.parents).nonPrivateMember(meth.name).filter { sym => - sym != NoSymbol && (site.memberType(sym) matches schema) + val methodSigApproxUnknownArgs: () => Type = + if (!inferArgTp) () => deskolemizedPolySig(vparamSymss, resTpGiven) + else () => { + // for all params without type set WildcardType + mforeach(vparamss)(v => if (v.tpt.isEmpty) v.symbol setInfo WildcardType) + // must wait to call deskolemizedPolySig until we've temporarily set the WildcardType info for the vparamSymss + // (Otherwise, valDefSig will complain about missing argument types.) + deskolemizedPolySig(vparamSymss, resTpGiven) } - } - // TODO: see whether this or something similar would work instead: - // def overriddenSymbol = meth.nextOverriddenSymbol + // Must be lazy about the schema to avoid cycles in neg/t5093.scala + val overridden = + if (!canOverride) NoSymbol + else safeNextOverriddenSymbolLazySchema(meth, methodSigApproxUnknownArgs) /* - * If `meth` doesn't have an explicit return type, extracts the return type from the method - * overridden by `meth` (if there's an unique one). This type is lateron used as the expected + * If `meth` doesn't have an explicit return type, extract the return type from the method + * overridden by `meth` (if there's an unique one). This type is later used as the expected * type for computing the type of the rhs. The resulting type references type skolems for * type parameters (consistent with the result of `typer.typedType(tpt).tpe`). * - * As a first side effect, this method assigns a MethodType constructed using this - * return type to `meth`. This allows omitting the result type for recursive methods. + * If the result type is missing, assign a MethodType to `meth` that's constructed using this return type. + * This allows omitting the result type for recursive methods. * - * As another side effect, this method also assigns parameter types from the overridden - * method to parameters of `meth` that have missing types (the parser accepts missing - * parameter types under -Yinfer-argument-types). + * Missing parameter types are also recovered from the overridden method (by mutating the info of their symbols). + * (The parser accepts missing parameter types under -Yinfer-argument-types.) */ - def typesFromOverridden(methResTp: Type): Type = { - val overridden = overriddenSymbol(methResTp) - if (overridden == NoSymbol || overridden.isOverloaded) { - methResTp - } else { + val resTpFromOverride = + if (!(inferArgTp || inferResTp) || overridden == NoSymbol || overridden.isOverloaded) resTpGiven + else { overridden.cookJavaRawInfo() // #3404 xform java rawtypes into existentials - var overriddenTp = site.memberType(overridden) match { - case PolyType(tparams, rt) => rt.substSym(tparams, tparamSkolems) - case mt => mt + + val (overriddenTparams, overriddenTp) = + methOwner.thisType.memberType(overridden) match { + case PolyType(tparams, mt) => (tparams, mt.substSym(tparams, tparamSkolems)) + case mt => (Nil, mt) } - for (vparams <- vparamss) { - var overriddenParams = overriddenTp.params - for (vparam <- vparams) { + + // try to derive empty parameter types from the overridden method's argument types + if (inferArgTp) { + val overriddenSyms = overriddenTparams ++ overridden.paramss.flatten + val ourSyms = tparamSkolems ++ vparamSymss.flatten + foreach2(vparamss, overridden.paramss) { foreach2(_, _) { (vparam, overriddenParam) => + // println(s"infer ${vparam.symbol} from ${overriddenParam}? ${vparam.tpt}") if (vparam.tpt.isEmpty) { - val overriddenParamTp = overriddenParams.head.tpe + val overriddenParamTp = overriddenParam.tpe.substSym(overriddenSyms, ourSyms) + // println(s"inferred ${vparam.symbol} : $overriddenParamTp") // references to type parameters in overriddenParamTp link to the type skolems, so the // assigned type is consistent with the other / existing parameter types in vparamSymss. vparam.symbol setInfo overriddenParamTp vparam.tpt defineType overriddenParamTp setPos vparam.pos.focus } - overriddenParams = overriddenParams.tail - } - overriddenTp = overriddenTp.resultType + }} } - // SI-7668 Substitute parameters from the parent method with those of the overriding method. - overriddenTp = overriddenTp.substSym(overridden.paramss.flatten, vparamss.flatten.map(_.symbol)) + @tailrec @inline def applyFully(tp: Type, paramss: List[List[Symbol]]): Type = + if (paramss.isEmpty) tp match { + case NullaryMethodType(rtpe) => rtpe + case MethodType(Nil, rtpe) => rtpe + case tp => tp + } + else applyFully(tp.resultType(paramss.head.map(_.tpe)), paramss.tail) - overriddenTp match { - case NullaryMethodType(rtpe) => overriddenTp = rtpe - case MethodType(List(), rtpe) => overriddenTp = rtpe - case _ => - } + if (inferResTp) { + // SI-7668 Substitute parameters from the parent method with those of the overriding method. + val overriddenResTp = applyFully(overriddenTp, vparamSymss).substSym(overriddenTparams, tparamSkolems) - if (tpt.isEmpty) { // provisionally assign `meth` a method type with inherited result type // that way, we can leave out the result type even if method is recursive. - meth setInfo thisMethodType(overriddenTp) - overriddenTp - } else { - methResTp - } + meth setInfo deskolemizedPolySig(vparamSymss, overriddenResTp) + overriddenResTp + } else resTpGiven } - } - - if (tpt.isEmpty && meth.name == nme.CONSTRUCTOR) { - tpt defineType context.enclClass.owner.tpe_* - tpt setPos meth.pos.focus - } - - val methResTp = if (tpt.isEmpty) WildcardType else typer.typedType(tpt).tpe - val resTpFromOverride = if (methOwner.isClass && (tpt.isEmpty || mexists(vparamss)(_.tpt.isEmpty))) { - typesFromOverridden(methResTp) - } else { - methResTp - } - - // Add a () parameter section if this overrides some method with () parameters - if (methOwner.isClass && vparamss.isEmpty && - overriddenSymbol(methResTp).alternatives.exists(_.info.isInstanceOf[MethodType])) { - vparamSymss = ListOfNil - } // issue an error for missing parameter types + // (computing resTpFromOverride may have required inferring some, meanwhile) mforeach(vparamss) { vparam => if (vparam.tpt.isEmpty) { MissingParameterOrValTypeError(vparam) @@ -1176,13 +1170,9 @@ trait Namers extends MethodSynthesis { } } - val overridden = { - val isConstr = meth.isConstructor - if (isConstr || !methOwner.isClass) NoSymbol else overriddenSymbol(methResTp) - } - val hasDefaults = mexists(vparamss)(_.symbol.hasDefault) || mexists(overridden.paramss)(_.hasDefault) - if (hasDefaults) - addDefaultGetters(meth, ddef, vparamss, tparams, overridden) + // If we, or the overridden method has defaults, add getters for them + if (mexists(vparamss)(_.symbol.hasDefault) || mexists(overridden.paramss)(_.hasDefault)) + addDefaultGetters(meth, ddef, vparamss, tparams, overridden) // fast track macros, i.e. macros defined inside the compiler, are hardcoded // hence we make use of that and let them have whatever right-hand side they need @@ -1193,27 +1183,35 @@ trait Namers extends MethodSynthesis { // because @macroImpl annotation only gets assigned during typechecking // otherwise macro defs wouldn't be able to robustly coexist with their clients // because a client could be typechecked before a macro def that it uses - if (meth.isMacro) { - typer.computeMacroDefType(ddef, resTpFromOverride) + if (meth.isMacro) typer.computeMacroDefType(ddef, resTpFromOverride) // note: `pt` argument ignored in `computeMacroDefType` + + if (vparamSymss.lengthCompare(0) > 0) { // OPT fast path for methods of 0-1 parameter lists + val checkDependencies = new DependentTypeChecker(context)(this) + checkDependencies check vparamSymss } - val res = thisMethodType({ - val rt = ( - if (!tpt.isEmpty) { - methResTp - } else { - // return type is inferred, we don't just use resTpFromOverride. Here, C.f has type String: - // trait T { def f: Object }; class C <: T { def f = "" } - // using resTpFromOverride as expected type allows for the following (C.f has type A): - // trait T { def f: A }; class C <: T { implicit def b2a(t: B): A = ???; def f = new B } - assignTypeToTree(ddef, typer, resTpFromOverride) - }) + val resTp = { + // When return type is inferred, we don't just use resTpFromOverride -- it must be packed and widened. + // Here, C.f has type String: + // trait T { def f: Object }; class C extends T { def f = "" } + // using resTpFromOverride as expected type allows for the following (C.f has type A): + // trait T { def f: A }; class C extends T { implicit def b2a(t: B): A = ???; def f = new B } + val resTpComputedUnlessGiven = + if (tpt.isEmpty) assignTypeToTree(ddef, typer, resTpFromOverride) + else resTpGiven + // #2382: return type of default getters are always @uncheckedVariance - if (meth.hasDefault) - rt.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List())) - else rt - }) - pluginsTypeSig(res, typer, ddef, methResTp) + if (meth.hasDefault) resTpComputedUnlessGiven.withAnnotation(AnnotationInfo(uncheckedVarianceClass.tpe, List(), List())) + else resTpComputedUnlessGiven + } + + // Add a () parameter section if this overrides some method with () parameters + val vparamSymssOrEmptyParamsFromOverride = + if (overridden != NoSymbol && vparamSymss.isEmpty && overridden.alternatives.exists(_.info.isInstanceOf[MethodType])) ListOfNil // NOTEL must check `.info.isInstanceOf[MethodType]`, not `.isMethod`! + else vparamSymss + + val methSig = deskolemizedPolySig(vparamSymssOrEmptyParamsFromOverride, resTp) + pluginsTypeSig(methSig, typer, ddef, resTpGiven) } /** @@ -1369,19 +1367,76 @@ trait Namers extends MethodSynthesis { private def valDefSig(vdef: ValDef) = { val ValDef(_, _, tpt, rhs) = vdef - val result = if (tpt.isEmpty) { - if (rhs.isEmpty) { - MissingParameterOrValTypeError(tpt) - ErrorType - } - else assignTypeToTree(vdef, typer, WildcardType) - } else { - typer.typedType(tpt).tpe - } + val result = + if (tpt.isEmpty) { + if (rhs.isEmpty) { + MissingParameterOrValTypeError(tpt) + ErrorType + } else { + // enterGetterSetter assigns the getter's symbol to a ValDef when there's no underlying field + // (a deferred val or most vals defined in a trait -- see Field.noFieldFor) + val isGetter = vdef.symbol hasFlag ACCESSOR + + val pt = { + val valOwner = owner.owner + // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out + if (valOwner.isClass && settings.isScala212) { + // normalize to getter so that we correctly consider a val overriding a def + // (a val's name ends in a " ", so can't compare to def) + val overridingSym = if (isGetter) vdef.symbol else vdef.symbol.getterIn(valOwner) + + // We're called from an accessorTypeCompleter, which is completing the info for the accessor's symbol, + // which may or may not be `vdef.symbol` (see isGetter above) + val overridden = safeNextOverriddenSymbol(overridingSym) + + if (overridden == NoSymbol || overridden.isOverloaded) WildcardType + else valOwner.thisType.memberType(overridden).resultType + } else WildcardType + } + + def patchSymInfo(tp: Type): Unit = + if (pt ne WildcardType) // no patching up to do if we didn't infer a prototype + vdef.symbol setInfo (if (isGetter) NullaryMethodType(tp) else tp) + + patchSymInfo(pt) + + // derives the val's result type from type checking its rhs under the expected type `pt` + // vdef.tpt is mutated, and `vdef.tpt.tpe` is `assignTypeToTree`'s result + val tptFromRhsUnderPt = assignTypeToTree(vdef, typer, pt) + + // need to re-align with assignTypeToTree, as the type we're returning from valDefSig (tptFromRhsUnderPt) + // may actually go to the accessor, not the valdef (and if assignTypeToTree returns a subtype of `pt`, + // we would be out of synch between field and its accessors), and thus the type completer won't + // fix the symbol's info for us -- we set it to tmpInfo above, which may need to be improved to tptFromRhsUnderPt + if (!isGetter) patchSymInfo(tptFromRhsUnderPt) + + tptFromRhsUnderPt + } + } else typer.typedType(tpt).tpe + +// println(s"val: $result / ${vdef.tpt.tpe} / ") + pluginsTypeSig(result, typer, vdef, if (tpt.isEmpty) WildcardType else result) + } + // Pretend we're an erroneous symbol, for now, so that we match while finding the overridden symbol, + // but are not considered during implicit search. + private def safeNextOverriddenSymbol(sym: Symbol, schema: Type = ErrorType): Symbol = { + val savedInfo = sym.rawInfo + val savedFlags = sym.rawflags + try { + sym setInfo schema + sym.nextOverriddenSymbol + } finally { + sym setInfo savedInfo // setInfo resets the LOCKED flag, so restore saved flags as well + sym.rawflags = savedFlags + } } + private def safeNextOverriddenSymbolLazySchema(sym: Symbol, schema: () => Type): Symbol = + safeNextOverriddenSymbol(sym, new LazyType { override def complete(sym: Symbol): Unit = sym setInfo schema() }) + + //@M! an abstract type definition (abstract type member/type parameter) // may take type parameters, which are in scope in its bounds private def typeDefSig(tdef: TypeDef) = { @@ -1560,10 +1615,6 @@ trait Namers extends MethodSynthesis { sym => "[define] >> " + sym.flagString + " " + sym.fullLocationString, sym => "[define] << " + sym ) - private def logAndValidate(sym: Symbol)(body: => Unit) { - logDefinition(sym)(body) - validate(sym) - } /** Convert Java generic array type T[] to (T with Object)[] * (this is necessary because such arrays have a representation which is incompatible diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index d1764ea4829e..0eae1ce41987 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -298,16 +298,29 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def infoString(sym: Symbol) = infoString0(sym, sym.owner != clazz) def infoStringWithLocation(sym: Symbol) = infoString0(sym, true) - def infoString0(sym: Symbol, showLocation: Boolean) = { - val sym1 = analyzer.underlyingSymbol(sym) - sym1.toString() + + def infoString0(member: Symbol, showLocation: Boolean) = { + val underlying = // not using analyzer.underlyingSymbol(member) because we should get rid of it + if (!(member hasFlag ACCESSOR)) member + else member.accessed match { + case field if field.exists => field + case _ if member.isSetter => member.getterIn(member.owner) + case _ => member + } + + def memberInfo = + self.memberInfo(underlying) match { + case getterTp if underlying.isGetter => getterTp.resultType + case tp => tp + } + + underlying.toString() + (if (showLocation) - sym1.locationString + - (if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1) - else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1) - else if (sym1.isModule) "" - else if (sym1.isTerm) " of type "+self.memberInfo(sym1) - else "") + underlying.locationString + + (if (underlying.isAliasType) s", which equals $memberInfo" + else if (underlying.isAbstractType) s" with bounds$memberInfo" + else if (underlying.isModule) "" + else if (underlying.isTerm) s" of type $memberInfo" + else "") else "") } @@ -321,7 +334,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans def memberTp = lowType def otherTp = highType - debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString)) +// debuglog(s"Checking validity of ${member.fullLocationString} overriding ${other.fullLocationString}") def noErrorType = !pair.isErroneous def isRootOrNone(sym: Symbol) = sym != null && sym.isRoot || sym == NoSymbol @@ -346,9 +359,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans analyzer.foundReqMsg(member.tpe, other.tpe) else "" - "overriding %s;\n %s %s%s".format( - infoStringWithLocation(other), infoString(member), msg, addendum - ) + s"overriding ${infoStringWithLocation(other)};\n ${infoString(member)} $msg$addendum" } def emitOverrideError(fullmsg: String) { if (member.owner == clazz) reporter.error(member.pos, fullmsg) @@ -439,9 +450,11 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) { overrideError("needs `abstract override' modifiers") } - else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) { - // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here. - // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches. + else if (member.isAnyOverride && (other hasFlag ACCESSOR) && !(other hasFlag STABLE)) { + // The check above used to look at `field` == `other.accessed`, ensuring field.isVariable && !field.isLazy, + // which I think is identical to the more direct `!(other hasFlag STABLE)` (given that `other` is a method). + // Also, we're moving away from (looking at) underlying fields (vals in traits no longer have them, to begin with) + // TODO: this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here. if (!settings.overrideVars) overrideError("cannot override a mutable variable") } @@ -456,7 +469,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } else if (member.isValue && member.isLazy && other.isValue && !other.isSourceMethod && !other.isDeferred && !other.isLazy) { overrideError("cannot override a concrete non-lazy value") - } else if (other.isValue && other.isLazy && !other.isSourceMethod && !other.isDeferred && + } else if (other.isValue && other.isLazy && !other.isSourceMethod && !other.isDeferred && // !(other.hasFlag(MODULE) && other.hasFlag(PACKAGE | JAVA)) && other.hasFlag(LAZY) && (!other.isMethod || other.hasFlag(STABLE)) && !other.hasFlag(DEFERRED) member.isValue && !member.isLazy) { overrideError("must be declared lazy to override a concrete lazy value") } else if (other.isDeferred && member.isTermMacro && member.extendedOverriddenSymbols.forall(_.isDeferred)) { // (1.9) @@ -547,7 +560,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } def checkOverrideDeprecated() { - if (other.hasDeprecatedOverridingAnnotation && !member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation)) { + if (other.hasDeprecatedOverridingAnnotation && !(member.hasDeprecatedOverridingAnnotation || member.ownerChain.exists(x => x.isDeprecated || x.hasBridgeAnnotation))) { val version = other.deprecatedOverridingVersion.getOrElse("") val since = if (version.isEmpty) version else s" (since $version)" val message = other.deprecatedOverridingMessage map (msg => s": $msg") getOrElse "" @@ -651,7 +664,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans for (member <- missing) { def undefined(msg: String) = abstractClassError(false, infoString(member) + " is not defined" + msg) - val underlying = analyzer.underlyingSymbol(member) + val underlying = analyzer.underlyingSymbol(member) // TODO: don't use this method // Give a specific error message for abstract vars based on why it fails: // It could be unimplemented, have only one accessor, or be uninitialized. @@ -1133,22 +1146,16 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans case _ => } - // SI-6276 warn for `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think. - def checkInfiniteLoop(valOrDef: ValOrDefDef) { - def callsSelf = valOrDef.rhs match { - case t @ (Ident(_) | Select(This(_), _)) => - t hasSymbolWhich (_.accessedOrSelf == valOrDef.symbol) - case _ => false + // SI-6276 warn for trivial recursion, such as `def foo = foo` or `val bar: X = bar`, which come up more frequently than you might think. + // TODO: Move to abide rule. Also, this does not check that the def is final or not overridden, for example + def checkInfiniteLoop(sym: Symbol, rhs: Tree): Unit = + if (!sym.isValueParameter && sym.paramss.isEmpty) { + rhs match { + case t@(Ident(_) | Select(This(_), _)) if t hasSymbolWhich (_.accessedOrSelf == sym) => + reporter.warning(rhs.pos, s"${sym.fullLocationString} does nothing other than call itself recursively") + case _ => + } } - val trivialInfiniteLoop = ( - !valOrDef.isErroneous - && !valOrDef.symbol.isValueParameter - && valOrDef.symbol.paramss.isEmpty - && callsSelf - ) - if (trivialInfiniteLoop) - reporter.warning(valOrDef.rhs.pos, s"${valOrDef.symbol.fullLocationString} does nothing other than call itself recursively") - } // Transformation ------------------------------------------------------------ @@ -1659,16 +1666,19 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans // inside annotations. applyRefchecksToAnnotations(tree) var result: Tree = tree match { - case vod: ValOrDefDef => + // NOTE: a val in a trait is now a DefDef, with the RHS being moved to an Assign in Constructors + case tree: ValOrDefDef => checkDeprecatedOvers(tree) - checkInfiniteLoop(vod) + if (!tree.isErroneous) + checkInfiniteLoop(tree.symbol, tree.rhs) + if (settings.warnNullaryUnit) checkNullaryMethodReturnType(sym) if (settings.warnInaccessible) { if (!sym.isConstructor && !sym.isEffectivelyFinalOrNotOverridden && !sym.isSynthetic) checkAccessibilityOfReferencedTypes(tree) } - vod match { + tree match { case dd: DefDef => checkByNameRightAssociativeDef(dd) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 5f2643cb2579..bee327c760d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -133,12 +133,14 @@ trait TypeDiagnostics { alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " \n", "\n") /** The symbol which the given accessor represents (possibly in part). - * This is used for error messages, where we want to speak in terms - * of the actual declaration or definition, not in terms of the generated setters - * and getters. - */ + * This is used for error messages, where we want to speak in terms + * of the actual declaration or definition, not in terms of the generated setters + * and getters. + * + * TODO: is it wise to create new symbols simply to generate error message? is this safe in interactive/resident mode? + */ def underlyingSymbol(member: Symbol): Symbol = - if (!member.hasAccessorFlag) member + if (!member.hasAccessorFlag || member.owner.isTrait) member else if (!member.isDeferred) member.accessed else { val getter = if (member.isSetter) member.getterIn(member.owner) else member @@ -532,8 +534,8 @@ trait TypeDiagnostics { val what = ( if (sym.isDefaultGetter) "default argument" else if (sym.isConstructor) "constructor" - else if (sym.isVar || sym.isGetter && sym.accessed.isVar) "var" - else if (sym.isVal || sym.isGetter && sym.accessed.isVal || sym.isLazy) "val" + else if (sym.isVar || sym.isGetter && (sym.accessed.isVar || (sym.owner.isTrait && !sym.hasFlag(STABLE)))) "var" + else if (sym.isVal || sym.isGetter && (sym.accessed.isVal || (sym.owner.isTrait && sym.hasFlag(STABLE))) || sym.isLazy) "val" else if (sym.isSetter) "setter" else if (sym.isMethod) "method" else if (sym.isModule) "object" diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ba104fb7a6b1..2bbf8ed74e3a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1360,7 +1360,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper notAllowed(s"redefinition of $name method. See SIP-15, criterion 4.") else if (stat.symbol != null && stat.symbol.isParamAccessor) notAllowed("additional parameter") + // concrete accessor (getter) in trait corresponds to a field definition (neg/anytrait.scala) + // TODO: only reject accessors that actually give rise to field (e.g., a constant-type val is fine) + else if (!isValueClass && stat.symbol.isAccessor && !stat.symbol.isDeferred) + notAllowed("field definition") checkEphemeralDeep.traverse(rhs) + // for value class or "exotic" vals in traits + // (traits don't receive ValDefs for regular vals until fields phase -- well, except for early initialized/lazy vals) case _: ValDef => notAllowed("field definition") case _: ModuleDef => @@ -4219,7 +4225,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // if (varsym.isVariable || // // setter-rewrite has been done above, so rule out methods here, but, wait a minute, why are we assigning to non-variables after erasure?! // (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) { - if (varsym.isVariable || varsym.isValue && phase.erasedTypes) { + if (varsym.isVariable || varsym.isValue && phase.assignsFields) { val rhs1 = typedByValueExpr(rhs, lhs1.tpe) treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe } diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala index bd5daf5de015..994eac9ed839 100644 --- a/src/library/scala/deprecatedInheritance.scala +++ b/src/library/scala/deprecatedInheritance.scala @@ -8,6 +8,8 @@ package scala +import scala.annotation.meta._ + /** An annotation that designates that inheriting from a class is deprecated. * * This is usually done to warn about a non-final class being made final in a future version. @@ -41,4 +43,5 @@ package scala * @see [[scala.deprecatedOverriding]] * @see [[scala.deprecatedName]] */ +@getter @setter @beanGetter @beanSetter class deprecatedInheritance(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala index 46639986c02f..5be6830b2752 100644 --- a/src/library/scala/deprecatedOverriding.scala +++ b/src/library/scala/deprecatedOverriding.scala @@ -8,6 +8,8 @@ package scala +import scala.annotation.meta._ + /** An annotation that designates that overriding a member is deprecated. * * Overriding such a member in a sub-class then generates a warning. @@ -42,4 +44,5 @@ package scala * @see [[scala.deprecatedInheritance]] * @see [[scala.deprecatedName]] */ +@getter @setter @beanGetter @beanSetter class deprecatedOverriding(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index fa19103d0c61..d58cabf3d74c 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -305,10 +305,13 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => } /** The default kind of members to which this annotation is attached. - * For instance, for scala.deprecated defaultTargets = - * List(getter, setter, beanGetter, beanSetter). - */ - def defaultTargets = symbol.annotations map (_.symbol) filter isMetaAnnotation + * For instance, for scala.deprecated defaultTargets = + * List(getter, setter, beanGetter, beanSetter). + * + * NOTE: have to call symbol.initialize, since we won't get any annotations if the symbol hasn't yet been completed + */ + def defaultTargets = symbol.initialize.annotations map (_.symbol) filter isMetaAnnotation + // Test whether the typeSymbol of atp conforms to the given class. def matches(clazz: Symbol) = !symbol.isInstanceOf[StubSymbol] && (symbol isNonBottomSubClass clazz) // All subtrees of all args are considered. diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 3dec73da5801..35ec80901e66 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -674,8 +674,10 @@ trait Definitions extends api.StandardDefinitions { // Note that these call .dealiasWiden and not .normalize, the latter of which // tends to change the course of events by forcing types. def isFunctionType(tp: Type) = isFunctionTypeDirect(tp.dealiasWiden) + // the number of arguments expected by the function described by `tp` (a FunctionN or SAM type), // or `-1` if `tp` does not represent a function type or SAM + // for use during typers (after fields, samOf will be confused by abstract accessors for trait fields) def functionArityFromType(tp: Type) = { val dealiased = tp.dealiasWiden if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.length - 1 @@ -685,16 +687,6 @@ trait Definitions extends api.StandardDefinitions { } } - // the result type of a function or corresponding SAM type - def functionResultType(tp: Type): Type = { - val dealiased = tp.dealiasWiden - if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.last - else samOf(tp) match { - case samSym if samSym.exists => tp.memberInfo(samSym).resultType.deconst - case _ => NoType - } - } - // the SAM's parameters and the Function's formals must have the same length // (varargs etc don't come into play, as we're comparing signatures, not checking an application) def samMatchesFunctionBasedOnArity(sam: Symbol, formals: List[Any]): Boolean = diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index e06decea6d25..d088150db63f 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -169,6 +169,11 @@ class Flags extends ModifierFlags { final val SYNCHRONIZED = 1L << 45 // symbol is a method which should be marked ACC_SYNCHRONIZED + final val SYNTHESIZE_IMPL_IN_SUBCLASS = 1L << 50 // used in fields phase to indicate this accessor should receive an implementation in a subclass + + // flags used strictly internally in the Fields phase (info/tree transform): + final val NEEDS_TREES = 1L << 59 // this symbol needs a tree. (distinct from SYNTHESIZE_IMPL_IN_SUBCLASS) + // ------- shift definitions ------------------------------------------------------- // // Flags from 1L to (1L << 50) are normal flags. @@ -257,7 +262,8 @@ class Flags extends ModifierFlags { /** These modifiers appear in TreePrinter output. */ final val PrintableFlags = ExplicitFlags | BridgeFlags | LOCAL | SYNTHETIC | STABLE | CASEACCESSOR | MACRO | - ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT + ACCESSOR | SUPERACCESSOR | PARAMACCESSOR | STATIC | SPECIALIZED | SYNCHRONIZED | ARTIFACT | + SYNTHESIZE_IMPL_IN_SUBCLASS | NEEDS_TREES /** When a symbol for a field is created, only these flags survive * from Modifiers. Others which may be applied at creation time are: @@ -442,7 +448,7 @@ class Flags extends ModifierFlags { case JAVA_DEFAULTMETHOD => "" // (1L << 47) case JAVA_ENUM => "" // (1L << 48) case JAVA_ANNOTATION => "" // (1L << 49) - case 0x4000000000000L => "" // (1L << 50) + case SYNTHESIZE_IMPL_IN_SUBCLASS => "" // (1L << 50) case `lateDEFERRED` => "" // (1L << 51) case `lateFINAL` => "" // (1L << 52) case `lateMETHOD` => "" // (1L << 53) @@ -451,7 +457,7 @@ class Flags extends ModifierFlags { case `notPROTECTED` => "" // (1L << 56) case `notOVERRIDE` => "" // (1L << 57) case `notPRIVATE` => "" // (1L << 58) - case 0x800000000000000L => "" // (1L << 59) + case NEEDS_TREES => "" // (1L << 59) case 0x1000000000000000L => "" // (1L << 60) case 0x2000000000000000L => "" // (1L << 61) case 0x4000000000000000L => "" // (1L << 62) diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala index a761f686e659..f56c41d71c3b 100644 --- a/src/reflect/scala/reflect/internal/Phase.scala +++ b/src/reflect/scala/reflect/internal/Phase.scala @@ -47,6 +47,8 @@ abstract class Phase(val prev: Phase) { final val specialized: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "specialize" || prev.specialized) final val refChecked: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "refchecks" || prev.refChecked) + // are we past the fields phase, so that we should allow writing to vals (as part of type checking trait setters) + final val assignsFields: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "fields" || prev.assignsFields) /** This is used only in unsafeTypeParams, and at this writing is * overridden to false in parser, namer, typer, and erasure. (And NoPhase.) diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 30f2efd7e3d1..33ca78b43931 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -285,6 +285,7 @@ trait ReificationSupport { self: SymbolTable => val (gvdefs, etdefs) = rawEdefs.partition(treeInfo.isEarlyValDef) val (fieldDefs, UnCtor(ctorMods, ctorVparamss, lvdefs) :: body) = rest.splitAt(indexOfCtor(rest)) val evdefs = gvdefs.zip(lvdefs).map { + // TODO: in traits, early val defs are defdefs case (gvdef @ ValDef(_, _, tpt: TypeTree, _), ValDef(_, _, _, rhs)) => copyValDef(gvdef)(tpt = tpt.original, rhs = rhs) case (tr1, tr2) => diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index a52d2d851094..320c8146962d 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -30,27 +30,6 @@ abstract class SymbolPairs { val global: SymbolTable import global._ - /** Type operations relative to a prefix. All operations work on Symbols, - * and the types are the member types of those symbols in the prefix. - */ - class RelativeTo(val prefix: Type) { - def this(clazz: Symbol) = this(clazz.thisType) - import scala.language.implicitConversions // geez, it even has to hassle me when it's private - private implicit def symbolToType(sym: Symbol): Type = prefix memberType sym - - def erasureOf(sym: Symbol): Type = erasure.erasure(sym)(sym: Type) - def signature(sym: Symbol): String = sym defStringSeenAs (sym: Type) - def erasedSignature(sym: Symbol): String = sym defStringSeenAs erasureOf(sym) - - def isSameType(sym1: Symbol, sym2: Symbol): Boolean = sym1 =:= sym2 - def isSubType(sym1: Symbol, sym2: Symbol): Boolean = sym1 <:< sym2 - def isSuperType(sym1: Symbol, sym2: Symbol): Boolean = sym2 <:< sym1 - def isSameErasure(sym1: Symbol, sym2: Symbol): Boolean = erasureOf(sym1) =:= erasureOf(sym2) - def matches(sym1: Symbol, sym2: Symbol): Boolean = (sym1: Type) matches (sym2: Type) - - override def toString = s"RelativeTo($prefix)" - } - /** Are types tp1 and tp2 equivalent seen from the perspective * of `baseClass`? For instance List[Int] and Seq[Int] are =:= * when viewed from IterableClass. @@ -58,10 +37,11 @@ abstract class SymbolPairs { def sameInBaseClass(baseClass: Symbol)(tp1: Type, tp2: Type) = (tp1 baseType baseClass) =:= (tp2 baseType baseClass) - case class SymbolPair(base: Symbol, low: Symbol, high: Symbol) { + final case class SymbolPair(base: Symbol, low: Symbol, high: Symbol) { + private[this] val self = base.thisType + def pos = if (low.owner == base) low.pos else if (high.owner == base) high.pos else base.pos - def self: Type = base.thisType - def rootType: Type = base.thisType + def rootType: Type = self def lowType: Type = self memberType low def lowErased: Type = erasure.specialErasure(base)(low.tpe) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index ab52a875f841..af1cdafcdab7 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -96,8 +96,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM) def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT) def isJava: Boolean = isJavaDefined - def isVal: Boolean = isTerm && !isModule && !isMethod && !isMutable - def isVar: Boolean = isTerm && !isModule && !isMethod && !isLazy && isMutable + + def isField: Boolean = isTerm && !isModule && (!isMethod || owner.isTrait && isAccessor) + def isMutableVal = if (owner.isTrait) !hasFlag(STABLE) else isMutable + def isVal: Boolean = isField && !isMutableVal + def isVar: Boolean = isField && !isLazy && isMutableVal + def isAbstract: Boolean = isAbstractClass || isDeferred || isAbstractType def isPrivateThis = (this hasFlag PRIVATE) && (this hasFlag LOCAL) def isProtectedThis = (this hasFlag PROTECTED) && (this hasFlag LOCAL) @@ -1532,7 +1536,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => def setInfo(info: Type): this.type = { info_=(info); this } /** Modifies this symbol's info in place. */ def modifyInfo(f: Type => Type): this.type = setInfo(f(info)) - /** Substitute second list of symbols for first in current info. */ + /** Substitute second list of symbols for first in current info. + * + * NOTE: this discards the type history (uses setInfo) + */ def substInfo(syms0: List[Symbol], syms1: List[Symbol]): this.type = if (syms0.isEmpty) this else modifyInfo(_.substSym(syms0, syms1)) @@ -2048,7 +2055,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => assert(hasAccessorFlag, this) val localField = owner.info decl localName - if (localField == NoSymbol && this.hasFlag(MIXEDIN)) { + if (localField == NoSymbol && this.hasFlag(MIXEDIN)) { // TODO: fields phase does not (yet?) add MIXEDIN in setMixedinAccessorFlags // SI-8087: private[this] fields don't have a `localName`. When searching the accessed field // for a mixin accessor of such a field, we need to look for `name` instead. // The phase travel ensures that the field is found (`owner` is the trait class symbol, the @@ -2088,8 +2095,15 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** If this is a lazy value, the lazy accessor; otherwise this symbol. */ def lazyAccessorOrSelf: Symbol = if (isLazy) lazyAccessor else this - /** If this is an accessor, the accessed symbol. Otherwise, this symbol. */ - def accessedOrSelf: Symbol = if (hasAccessorFlag) accessed else this + /** `accessed`, if this is an accessor that should have an underlying field. Otherwise, `this`. + * Note that a "regular" accessor in a trait does not have a field, as an interface cannot define a field. + * "non-regular" vals are: early initialized or lazy vals. + * Eventually, we should delay introducing symbols for all val/vars until the fields (or lazyvals) phase, + * as they are an implementation detail that's irrelevant to type checking. + */ + def accessedOrSelf: Symbol = + if (hasAccessorFlag && (!owner.isTrait || hasFlag(PRESUPER | LAZY))) accessed + else this /** For an outer accessor: The class from which the outer originates. * For all other symbols: NoSymbol @@ -2532,30 +2546,34 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def symbolKind: SymbolKind = { var kind = - if (isTermMacro) ("term macro", "macro method", "MACM") - else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE") - else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY") - else if (isPackageClass) ("package class", "package", "PKC") - else if (hasPackageFlag) ("package", "package", "PK") - else if (isPackageObject) ("package object", "package", "PKO") - else if (isPackageObjectClass) ("package object class", "package", "PKOC") - else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC") - else if (isRefinementClass) ("refinement class", "", "RC") - else if (isModule) ("module", "object", "MOD") - else if (isModuleClass) ("module class", "object", "MODC") - else if (isGetter) ("getter", if (isSourceMethod) "method" else "value", "GET") - else if (isSetter) ("setter", if (isSourceMethod) "method" else "value", "SET") - else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ") - else if (isVariable) ("field", "variable", "VAR") - else if (isTrait) ("trait", "trait", "TRT") - else if (isClass) ("class", "class", "CLS") - else if (isType) ("type", "type", "TPE") - else if (isClassConstructor && (owner.hasCompleteInfo && isPrimaryConstructor)) ("primary constructor", "constructor", "PCTOR") - else if (isClassConstructor) ("constructor", "constructor", "CTOR") - else if (isSourceMethod) ("method", "method", "METH") - else if (isTerm) ("value", "value", "VAL") - else ("", "", "???") + if (isTermMacro) ("term macro", "macro method", "MACM") + else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE") + else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY") + else if (isPackageClass) ("package class", "package", "PKC") + else if (hasPackageFlag) ("package", "package", "PK") + else if (isPackageObject) ("package object", "package", "PKO") + else if (isPackageObjectClass) ("package object class", "package", "PKOC") + else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC") + else if (isRefinementClass) ("refinement class", "", "RC") + else if (isModule) ("module", "object", "MOD") + else if (isModuleClass) ("module class", "object", "MODC") + else if (isAccessor && + !hasFlag(STABLE | LAZY)) ("setter", "variable", "SET") + else if (isAccessor && !hasFlag(LAZY)) ("getter", "value", "GET") + else if (isTerm && hasFlag(LAZY)) ("lazy value", "lazy value", "LAZ") + else if (isVariable) ("field", "variable", "VAR") + else if (isTrait) ("trait", "trait", "TRT") + else if (isClass) ("class", "class", "CLS") + else if (isType) ("type", "type", "TPE") + else if (isClassConstructor && (owner.hasCompleteInfo && + isPrimaryConstructor)) ("primary constructor", "constructor", "PCTOR") + else if (isClassConstructor) ("constructor", "constructor", "CTOR") + else if (isMethod) ("method", "method", "METH") + else if (isTerm) ("value", "value", "VAL") + else ("", "", "???") + if (isSkolem) kind = (kind._1, kind._2, kind._3 + "#SKO") + SymbolKind(kind._1, kind._2, kind._3) } @@ -2623,12 +2641,17 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If hasMeaninglessName is true, uses the owner's name to disambiguate identity. */ override def toString: String = { - if (isPackageObjectOrClass && !settings.debug) - s"package object ${owner.decodedName}" - else compose( - kindString, - if (hasMeaninglessName) owner.decodedName + idString else nameString - ) + val simplifyNames = !settings.debug + if (isPackageObjectOrClass && simplifyNames) s"package object ${owner.decodedName}" + else { + val kind = kindString + val _name: String = + if (hasMeaninglessName) owner.decodedName + idString + else if (simplifyNames && (kind == "variable" || kind == "value")) unexpandedName.getterName.decode.toString // TODO: make condition less gross? + else nameString + + compose(kind, _name) + } } /** String representation of location. @@ -2764,18 +2787,21 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) ***/ override def isValueParameter = this hasFlag PARAM - override def isSetterParameter = isValueParameter && owner.isSetter - override def isAccessor = this hasFlag ACCESSOR - override def isGetter = isAccessor && !isSetter + override def isDefaultGetter = name containsName nme.DEFAULT_GETTER_STRING - override def isSetter = isAccessor && nme.isSetterName(name) // todo: make independent of name, as this can be forged. + + override def isAccessor = this hasFlag ACCESSOR + override def isGetter = isAccessor && !nme.isSetterName(name) // TODO: make independent of name, as this can be forged. + override def isSetter = isAccessor && nme.isSetterName(name) // TODO: make independent of name, as this can be forged. + override def isLocalDummy = nme.isLocalDummyName(name) + override def isClassConstructor = name == nme.CONSTRUCTOR override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR - override def isConstructor = nme.isConstructorName(name) + override def isConstructor = isClassConstructor || isMixinConstructor - override def isPackageObject = isModule && (name == nme.PACKAGE) + override def isPackageObject = isModule && (name == nme.PACKAGE) // The name in comments is what it is being disambiguated from. // TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names. @@ -2900,10 +2926,15 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isVarargsMethod = this hasFlag VARARGS override def isLiftedMethod = this hasFlag LIFTED - // TODO - this seems a strange definition for "isSourceMethod", given that - // it does not make any specific effort to exclude synthetics. Figure out what - // this method is really for and what logic makes sense. - override def isSourceMethod = !(this hasFlag STABLE) // exclude all accessors + // TODO: this definition of isSourceMethod makes no sense -- inline it and re-evaluate at each call site. + // I'm guessing it meant "method written by user, and not generated by the compiler" + // (And then assuming those generated by the compiler don't require certain transformations?) + // Use SYNTHETIC/ARTIFACT instead as an indicator? I don't see how it makes sense to only exclude getters. + // Note also that trait vals are modelled as getters, and thus that user-supplied code appears in their rhs. + // Originally, it may have been an optimization to skip methods that were not user-defined (getters), + // but it doesn't even exclude setters, contrary to its original comment (// exclude all accessors) + override def isSourceMethod = !(this hasFlag STABLE) + // unfortunately having the CASEACCESSOR flag does not actually mean you // are a case accessor (you can also be a field.) override def isCaseAccessorMethod = isCaseAccessor diff --git a/test/files/neg/overloaded-unapply.check b/test/files/neg/overloaded-unapply.check index 68a826bac2ae..3951166de550 100644 --- a/test/files/neg/overloaded-unapply.check +++ b/test/files/neg/overloaded-unapply.check @@ -7,8 +7,8 @@ match argument types (List[a]) overloaded-unapply.scala:22: error: cannot resolve overloaded unapply case List(x, xs) => 7 ^ -overloaded-unapply.scala:12: error: method unapply is defined twice - conflicting symbols both originated in file 'overloaded-unapply.scala' +overloaded-unapply.scala:12: error: method unapply is defined twice; + the conflicting method unapply was defined at line 7:7 def unapply[a](xs: List[a]): Option[Null] = xs match { ^ three errors found diff --git a/test/files/neg/t1960.check b/test/files/neg/t1960.check index 5238141c4ee4..de0907b4a904 100644 --- a/test/files/neg/t1960.check +++ b/test/files/neg/t1960.check @@ -1,4 +1,4 @@ -t1960.scala:5: error: parameter 'p' requires field but conflicts with method p in trait TBase +t1960.scala:5: error: parameter 'p' requires field but conflicts with variable p in trait TBase class Aclass (p: Int) extends TBase { def g() { f(p) } } ^ one error found diff --git a/test/files/neg/t200.check b/test/files/neg/t200.check index b6b1a3226720..f0c5e777720d 100644 --- a/test/files/neg/t200.check +++ b/test/files/neg/t200.check @@ -1,5 +1,5 @@ -t200.scala:7: error: method foo is defined twice - conflicting symbols both originated in file 't200.scala' +t200.scala:7: error: method foo is defined twice; + the conflicting method foo was defined at line 6:7 def foo: Int; ^ one error found diff --git a/test/files/neg/t2779.check b/test/files/neg/t2779.check index 0ab4c50d0f0f..9881d5182c27 100644 --- a/test/files/neg/t2779.check +++ b/test/files/neg/t2779.check @@ -1,5 +1,5 @@ -t2779.scala:16: error: method f is defined twice - conflicting symbols both originated in file 't2779.scala' +t2779.scala:16: error: method f is defined twice; + the conflicting method f was defined at line 15:18 override def f = List(M1) ^ one error found diff --git a/test/files/neg/t278.check b/test/files/neg/t278.check index 405f7d225c17..940b8edcefb0 100644 --- a/test/files/neg/t278.check +++ b/test/files/neg/t278.check @@ -4,8 +4,8 @@ t278.scala:5: error: overloaded method value a with alternatives: does not take type parameters println(a[A]) ^ -t278.scala:4: error: method a is defined twice - conflicting symbols both originated in file 't278.scala' +t278.scala:4: error: method a is defined twice; + the conflicting method a was defined at line 3:7 def a = (p:A) => () ^ two errors found diff --git a/test/files/neg/t3871.check b/test/files/neg/t3871.check index b920357ee694..c9667abfb6be 100644 --- a/test/files/neg/t3871.check +++ b/test/files/neg/t3871.check @@ -1,5 +1,5 @@ t3871.scala:4: error: variable foo in class Sub2 cannot be accessed in Sub2 - Access to protected method foo not permitted because + Access to protected variable foo not permitted because enclosing class Base is not a subclass of class Sub2 where target is defined s.foo = true diff --git a/test/files/neg/t4541.check b/test/files/neg/t4541.check index 7bd8ff78f945..7ee0cc641488 100644 --- a/test/files/neg/t4541.check +++ b/test/files/neg/t4541.check @@ -1,5 +1,5 @@ t4541.scala:11: error: variable data in class Sparse cannot be accessed in Sparse[Int] - Access to protected method data not permitted because + Access to protected variable data not permitted because prefix type Sparse[Int] does not conform to class Sparse$mcI$sp where the access take place that.data diff --git a/test/files/neg/t4541b.check b/test/files/neg/t4541b.check index 8a52fd97f4d4..2aae95f6b99a 100644 --- a/test/files/neg/t4541b.check +++ b/test/files/neg/t4541b.check @@ -1,5 +1,5 @@ t4541b.scala:13: error: variable data in class SparseArray cannot be accessed in SparseArray[Int] - Access to protected method data not permitted because + Access to protected variable data not permitted because prefix type SparseArray[Int] does not conform to class SparseArray$mcI$sp where the access take place use(that.data.clone) diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check index 4350696bc83a..fb2d9c2e476e 100644 --- a/test/files/neg/t5429.check +++ b/test/files/neg/t5429.check @@ -134,7 +134,7 @@ t5429.scala:87: error: overriding value value in class A0 of type Any; lazy value value cannot override a concrete non-lazy value override lazy val value = 0 // fail (strict over lazy) ^ -t5429.scala:91: error: value oneArg overrides nothing. +t5429.scala:91: error: lazy value oneArg overrides nothing. Note: the super classes of class F0 contain the following, non final members named oneArg: def oneArg(x: String): Any override lazy val oneArg = 15 // fail diff --git a/test/files/neg/t591.check b/test/files/neg/t591.check index d33f6d7a2fed..c0bade08146c 100644 --- a/test/files/neg/t591.check +++ b/test/files/neg/t591.check @@ -1,5 +1,5 @@ -t591.scala:38: error: method input_= is defined twice - conflicting symbols both originated in file 't591.scala' +t591.scala:40: error: method input_= is defined twice; + the conflicting variable input was defined at line 35:18 def input_=(in : Input) = {} ^ one error found diff --git a/test/files/neg/t591.scala b/test/files/neg/t591.scala index 0f0b02395c98..14fb256a69d7 100644 --- a/test/files/neg/t591.scala +++ b/test/files/neg/t591.scala @@ -35,7 +35,8 @@ trait BaseFlow extends BaseList { private var input : Input = _; private var output : Output = _; + // the error message is a bit confusing, as it points here, + // but the symbol it reports is `input`'s actual setter (the one we synthesized) def input_=(in : Input) = {} - } } diff --git a/test/files/neg/t6335.check b/test/files/neg/t6335.check index 1727a05eb208..d118440f75a9 100644 --- a/test/files/neg/t6335.check +++ b/test/files/neg/t6335.check @@ -1,9 +1,9 @@ -t6335.scala:6: error: method Z is defined twice - conflicting symbols both originated in file 't6335.scala' +t6335.scala:6: error: method Z is defined twice; + the conflicting method Z was defined at line 5:7 implicit class Z[A](val i: A) { def zz = i } ^ -t6335.scala:3: error: method X is defined twice - conflicting symbols both originated in file 't6335.scala' +t6335.scala:3: error: method X is defined twice; + the conflicting method X was defined at line 2:7 implicit class X(val x: Int) { def xx = x } ^ two errors found diff --git a/test/files/neg/t6446-additional.check b/test/files/neg/t6446-additional.check index e56a67b28b0b..45db63317ca0 100644 --- a/test/files/neg/t6446-additional.check +++ b/test/files/neg/t6446-additional.check @@ -10,18 +10,19 @@ superaccessors 6 add super accessors in traits and nested classes pickler 8 serialize symbol tables refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes - tailcalls 11 replace tail calls by jumps - specialize 12 @specialized-driven class and method specialization - explicitouter 13 this refs to outer pointers - erasure 14 erase types, add interfaces for traits - posterasure 15 clean up erased inline classes - lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 17 move nested functions to top level - constructors 18 move field definitions into constructors - flatten 19 eliminate inner classes - mixin 20 mixin composition - cleanup 21 platform-specific cleanups, generate reflective calls - delambdafy 22 remove lambdas - jvm 23 generate JVM bytecode - ploogin 24 A sample phase that does so many things it's kind of hard... - terminal 25 the last phase during a compilation run + fields 11 synthesize accessors and fields + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lazyvals 17 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + cleanup 22 platform-specific cleanups, generate reflective calls + delambdafy 23 remove lambdas + jvm 24 generate JVM bytecode + ploogin 25 A sample phase that does so many things it's kind of hard... + terminal 26 the last phase during a compilation run diff --git a/test/files/neg/t6446-missing.check b/test/files/neg/t6446-missing.check index 15f0ceb6e3b4..04523d18e672 100644 --- a/test/files/neg/t6446-missing.check +++ b/test/files/neg/t6446-missing.check @@ -11,17 +11,18 @@ superaccessors 6 add super accessors in traits and nested classes pickler 8 serialize symbol tables refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes - tailcalls 11 replace tail calls by jumps - specialize 12 @specialized-driven class and method specialization - explicitouter 13 this refs to outer pointers - erasure 14 erase types, add interfaces for traits - posterasure 15 clean up erased inline classes - lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 17 move nested functions to top level - constructors 18 move field definitions into constructors - flatten 19 eliminate inner classes - mixin 20 mixin composition - cleanup 21 platform-specific cleanups, generate reflective calls - delambdafy 22 remove lambdas - jvm 23 generate JVM bytecode - terminal 24 the last phase during a compilation run + fields 11 synthesize accessors and fields + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lazyvals 17 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + cleanup 22 platform-specific cleanups, generate reflective calls + delambdafy 23 remove lambdas + jvm 24 generate JVM bytecode + terminal 25 the last phase during a compilation run diff --git a/test/files/neg/t6446-show-phases.check b/test/files/neg/t6446-show-phases.check index 280a4f43d5b4..03f8273c178c 100644 --- a/test/files/neg/t6446-show-phases.check +++ b/test/files/neg/t6446-show-phases.check @@ -10,17 +10,18 @@ superaccessors 6 add super accessors in traits and nested classes pickler 8 serialize symbol tables refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes - tailcalls 11 replace tail calls by jumps - specialize 12 @specialized-driven class and method specialization - explicitouter 13 this refs to outer pointers - erasure 14 erase types, add interfaces for traits - posterasure 15 clean up erased inline classes - lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 17 move nested functions to top level - constructors 18 move field definitions into constructors - flatten 19 eliminate inner classes - mixin 20 mixin composition - cleanup 21 platform-specific cleanups, generate reflective calls - delambdafy 22 remove lambdas - jvm 23 generate JVM bytecode - terminal 24 the last phase during a compilation run + fields 11 synthesize accessors and fields + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lazyvals 17 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + cleanup 22 platform-specific cleanups, generate reflective calls + delambdafy 23 remove lambdas + jvm 24 generate JVM bytecode + terminal 25 the last phase during a compilation run diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check index 43c825275384..090ef72770be 100644 --- a/test/files/neg/t6666.check +++ b/test/files/neg/t6666.check @@ -1,7 +1,7 @@ t6666.scala:23: error: Implementation restriction: access of method x$2 in object O1 from <$anon: Function0>, would require illegal premature access to object O1 F.byname(x) ^ -t6666.scala:30: error: Implementation restriction: access of value x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2 +t6666.scala:30: error: Implementation restriction: access of method x$3 in object O2 from <$anon: Function0>, would require illegal premature access to object O2 F.byname(x) ^ t6666.scala:37: error: Implementation restriction: access of method x$4 in object O3 from <$anon: Function0>, would require illegal premature access to object O3 @@ -10,7 +10,7 @@ t6666.scala:37: error: Implementation restriction: access of method x$4 in objec t6666.scala:50: error: Implementation restriction: access of method x$6 in class C1 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C1 F.byname(x) ^ -t6666.scala:54: error: Implementation restriction: access of value x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2 +t6666.scala:54: error: Implementation restriction: access of method x$7 in class C2 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C2 F.byname(x) ^ t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3 diff --git a/test/files/neg/t7494-no-options.check b/test/files/neg/t7494-no-options.check index a4c4a1ad5bc9..bb143e8644f0 100644 --- a/test/files/neg/t7494-no-options.check +++ b/test/files/neg/t7494-no-options.check @@ -11,18 +11,19 @@ superaccessors 6 add super accessors in traits and nested classes pickler 8 serialize symbol tables refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes - tailcalls 11 replace tail calls by jumps - specialize 12 @specialized-driven class and method specialization - explicitouter 13 this refs to outer pointers - erasure 14 erase types, add interfaces for traits - posterasure 15 clean up erased inline classes - lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 17 move nested functions to top level - constructors 18 move field definitions into constructors - flatten 19 eliminate inner classes - mixin 20 mixin composition - cleanup 21 platform-specific cleanups, generate reflective calls - delambdafy 22 remove lambdas - jvm 23 generate JVM bytecode - ploogin 24 A sample phase that does so many things it's kind of hard... - terminal 25 the last phase during a compilation run + fields 11 synthesize accessors and fields + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lazyvals 17 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + cleanup 22 platform-specific cleanups, generate reflective calls + delambdafy 23 remove lambdas + jvm 24 generate JVM bytecode + ploogin 25 A sample phase that does so many things it's kind of hard... + terminal 26 the last phase during a compilation run diff --git a/test/files/neg/t7602.check b/test/files/neg/t7602.check index 5bb1450d7d38..5ce3776790d8 100644 --- a/test/files/neg/t7602.check +++ b/test/files/neg/t7602.check @@ -1,5 +1,5 @@ -t7602.scala:16: error: method foo is defined twice - conflicting symbols both originated in file 't7602.scala' +t7602.scala:16: error: method foo is defined twice; + the conflicting method foo was defined at line 15:7 def foo : Device ^ one error found diff --git a/test/files/neg/t7622-cyclic-dependency.check b/test/files/neg/t7622-cyclic-dependency.check index 3546964f5f68..81e3ecc6a48c 100644 --- a/test/files/neg/t7622-cyclic-dependency.check +++ b/test/files/neg/t7622-cyclic-dependency.check @@ -1 +1 @@ -error: Cycle in phase dependencies detected at cyclicdependency1, created phase-cycle.dot +error: Cycle in phase dependencies detected at cyclicdependency2, created phase-cycle.dot diff --git a/test/files/neg/t800.check b/test/files/neg/t800.check index 8ba95fddde45..238b8dd27d01 100644 --- a/test/files/neg/t800.check +++ b/test/files/neg/t800.check @@ -1,16 +1,16 @@ t800.scala:4: error: qualification is already defined as value qualification val qualification = false; ^ -t800.scala:8: error: method qualification is defined twice - conflicting symbols both originated in file 't800.scala' +t800.scala:8: error: value qualification is defined twice; + the conflicting variable qualification was defined at line 7:7 val qualification = false; ^ -t800.scala:12: error: value qualification is defined twice - conflicting symbols both originated in file 't800.scala' +t800.scala:12: error: variable qualification is defined twice; + the conflicting value qualification was defined at line 11:7 var qualification = false; ^ -t800.scala:16: error: method qualification is defined twice - conflicting symbols both originated in file 't800.scala' +t800.scala:16: error: variable qualification is defined twice; + the conflicting variable qualification was defined at line 15:7 var qualification = false; ^ four errors found diff --git a/test/files/neg/t8849.check b/test/files/neg/t8849.check index 15b00aee8b7e..1d5b4164b205 100644 --- a/test/files/neg/t8849.check +++ b/test/files/neg/t8849.check @@ -1,5 +1,5 @@ t8849.scala:8: error: ambiguous implicit values: - both value global in object Implicits of type => scala.concurrent.ExecutionContext + both lazy value global in object Implicits of type => scala.concurrent.ExecutionContext and value dummy of type scala.concurrent.ExecutionContext match expected type scala.concurrent.ExecutionContext require(implicitly[ExecutionContext] eq dummy) diff --git a/test/files/neg/trait_fields_conflicts.check b/test/files/neg/trait_fields_conflicts.check new file mode 100644 index 000000000000..696d0284c103 --- /dev/null +++ b/test/files/neg/trait_fields_conflicts.check @@ -0,0 +1,273 @@ +trait_fields_conflicts.scala:5: error: overriding value x in trait Val of type Int; + value x needs `override' modifier +trait ValForVal extends Val { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:6: error: overriding value x in trait Val of type Int; + variable x needs `override' modifier +trait VarForVal extends Val { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:7: error: overriding value x in trait Val of type Int; + method x needs `override' modifier +trait DefForVal extends Val { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:8: error: overriding variable x in trait Var of type Int; + value x needs `override' modifier +trait ValForVar extends Var { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:9: error: overriding variable x in trait Var of type Int; + variable x needs `override' modifier +trait VarForVar extends Var { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:10: error: overriding variable x in trait Var of type Int; + method x needs `override' modifier +trait DefForVar extends Var { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:11: error: overriding lazy value x in trait Lazy of type Int; + value x needs `override' modifier +trait ValForLazy extends Lazy { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:12: error: overriding lazy value x in trait Lazy of type Int; + variable x needs `override' modifier +trait VarForLazy extends Lazy { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:13: error: overriding lazy value x in trait Lazy of type Int; + method x needs `override' modifier +trait DefForLazy extends Lazy { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:16: error: overriding value x in trait Val of type Int; + variable x needs to be a stable, immutable value +trait VarForValOvr extends Val { override var x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:17: error: overriding value x in trait Val of type Int; + method x needs to be a stable, immutable value +trait DefForValOvr extends Val { override def x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:18: error: overriding variable x in trait Var of type Int; + value x cannot override a mutable variable +trait ValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes + ^ +trait_fields_conflicts.scala:19: error: overriding variable x in trait Var of type Int; + variable x cannot override a mutable variable +trait VarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:20: error: overriding variable x in trait Var of type Int; + method x cannot override a mutable variable +trait DefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:21: error: overriding lazy value x in trait Lazy of type Int; + value x must be declared lazy to override a concrete lazy value +trait ValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:22: error: overriding lazy value x in trait Lazy of type Int; + variable x needs to be a stable, immutable value +trait VarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:23: error: overriding lazy value x in trait Lazy of type Int; + method x needs to be a stable, immutable value +trait DefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:25: error: overriding value x in trait Val of type Int; + value x needs `override' modifier +class CValForVal extends Val { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:26: error: overriding value x in trait Val of type Int; + variable x needs `override' modifier +class CVarForVal extends Val { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:27: error: overriding value x in trait Val of type Int; + method x needs `override' modifier +class CDefForVal extends Val { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:28: error: overriding variable x in trait Var of type Int; + value x needs `override' modifier +class CValForVar extends Var { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:29: error: overriding variable x in trait Var of type Int; + variable x needs `override' modifier +class CVarForVar extends Var { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:30: error: overriding variable x in trait Var of type Int; + method x needs `override' modifier +class CDefForVar extends Var { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:31: error: overriding lazy value x in trait Lazy of type Int; + value x needs `override' modifier +class CValForLazy extends Lazy { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:32: error: overriding lazy value x in trait Lazy of type Int; + variable x needs `override' modifier +class CVarForLazy extends Lazy { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:33: error: overriding lazy value x in trait Lazy of type Int; + method x needs `override' modifier +class CDefForLazy extends Lazy { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:36: error: overriding value x in trait Val of type Int; + variable x needs to be a stable, immutable value +class CVarForValOvr extends Val { override var x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:37: error: overriding value x in trait Val of type Int; + method x needs to be a stable, immutable value +class CDefForValOvr extends Val { override def x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:38: error: overriding variable x in trait Var of type Int; + value x cannot override a mutable variable +class CValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes + ^ +trait_fields_conflicts.scala:39: error: overriding variable x in trait Var of type Int; + variable x cannot override a mutable variable +class CVarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:40: error: overriding variable x in trait Var of type Int; + method x cannot override a mutable variable +class CDefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:41: error: overriding lazy value x in trait Lazy of type Int; + value x must be declared lazy to override a concrete lazy value +class CValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:42: error: overriding lazy value x in trait Lazy of type Int; + variable x needs to be a stable, immutable value +class CVarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:43: error: overriding lazy value x in trait Lazy of type Int; + method x needs to be a stable, immutable value +class CDefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:49: error: overriding value x in class CVal of type Int; + value x needs `override' modifier +trait ValForCVal extends CVal { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:50: error: overriding value x in class CVal of type Int; + variable x needs `override' modifier +trait VarForCVal extends CVal { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:51: error: overriding value x in class CVal of type Int; + method x needs `override' modifier +trait DefForCVal extends CVal { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:52: error: overriding variable x in class CVar of type Int; + value x needs `override' modifier +trait ValForCVar extends CVar { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:53: error: overriding variable x in class CVar of type Int; + variable x needs `override' modifier +trait VarForCVar extends CVar { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:54: error: overriding variable x in class CVar of type Int; + method x needs `override' modifier +trait DefForCVar extends CVar { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:55: error: overriding lazy value x in class CLazy of type Int; + value x needs `override' modifier +trait ValForCLazy extends CLazy { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:56: error: overriding lazy value x in class CLazy of type Int; + variable x needs `override' modifier +trait VarForCLazy extends CLazy { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:57: error: overriding lazy value x in class CLazy of type Int; + method x needs `override' modifier +trait DefForCLazy extends CLazy { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:60: error: overriding value x in class CVal of type Int; + variable x needs to be a stable, immutable value +trait VarForCValOvr extends CVal { override var x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:61: error: overriding value x in class CVal of type Int; + method x needs to be a stable, immutable value +trait DefForCValOvr extends CVal { override def x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:62: error: overriding variable x in class CVar of type Int; + value x cannot override a mutable variable +trait ValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes + ^ +trait_fields_conflicts.scala:63: error: overriding variable x in class CVar of type Int; + variable x cannot override a mutable variable +trait VarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:64: error: overriding variable x in class CVar of type Int; + method x cannot override a mutable variable +trait DefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:65: error: overriding lazy value x in class CLazy of type Int; + value x must be declared lazy to override a concrete lazy value +trait ValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:66: error: overriding lazy value x in class CLazy of type Int; + variable x needs to be a stable, immutable value +trait VarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:67: error: overriding lazy value x in class CLazy of type Int; + method x needs to be a stable, immutable value +trait DefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:69: error: overriding value x in class CVal of type Int; + value x needs `override' modifier +class CValForCVal extends CVal { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:70: error: overriding value x in class CVal of type Int; + variable x needs `override' modifier +class CVarForCVal extends CVal { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:71: error: overriding value x in class CVal of type Int; + method x needs `override' modifier +class CDefForCVal extends CVal { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:72: error: overriding variable x in class CVar of type Int; + value x needs `override' modifier +class CValForCVar extends CVar { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:73: error: overriding variable x in class CVar of type Int; + variable x needs `override' modifier +class CVarForCVar extends CVar { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:74: error: overriding variable x in class CVar of type Int; + method x needs `override' modifier +class CDefForCVar extends CVar { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:75: error: overriding lazy value x in class CLazy of type Int; + value x needs `override' modifier +class CValForCLazy extends CLazy { val x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:76: error: overriding lazy value x in class CLazy of type Int; + variable x needs `override' modifier +class CVarForCLazy extends CLazy { var x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:77: error: overriding lazy value x in class CLazy of type Int; + method x needs `override' modifier +class CDefForCLazy extends CLazy { def x: Int = 1 } // needs override + ^ +trait_fields_conflicts.scala:80: error: overriding value x in class CVal of type Int; + variable x needs to be a stable, immutable value +class CVarForCValOvr extends CVal { override var x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:81: error: overriding value x in class CVal of type Int; + method x needs to be a stable, immutable value +class CDefForCValOvr extends CVal { override def x: Int = 1 } // bad override + ^ +trait_fields_conflicts.scala:82: error: overriding variable x in class CVar of type Int; + value x cannot override a mutable variable +class CValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes + ^ +trait_fields_conflicts.scala:83: error: overriding variable x in class CVar of type Int; + variable x cannot override a mutable variable +class CVarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:84: error: overriding variable x in class CVar of type Int; + method x cannot override a mutable variable +class CDefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:85: error: overriding lazy value x in class CLazy of type Int; + value x must be declared lazy to override a concrete lazy value +class CValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:86: error: overriding lazy value x in class CLazy of type Int; + variable x needs to be a stable, immutable value +class CVarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why? + ^ +trait_fields_conflicts.scala:87: error: overriding lazy value x in class CLazy of type Int; + method x needs to be a stable, immutable value +class CDefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why? + ^ +68 errors found diff --git a/test/files/neg/trait_fields_conflicts.scala b/test/files/neg/trait_fields_conflicts.scala new file mode 100644 index 000000000000..92fc106e44eb --- /dev/null +++ b/test/files/neg/trait_fields_conflicts.scala @@ -0,0 +1,87 @@ +trait Val { val x: Int = 123 } +trait Var { var x: Int = 123 } +trait Lazy { lazy val x: Int = 123 } + +trait ValForVal extends Val { val x: Int = 1 } // needs override +trait VarForVal extends Val { var x: Int = 1 } // needs override +trait DefForVal extends Val { def x: Int = 1 } // needs override +trait ValForVar extends Var { val x: Int = 1 } // needs override +trait VarForVar extends Var { var x: Int = 1 } // needs override +trait DefForVar extends Var { def x: Int = 1 } // needs override +trait ValForLazy extends Lazy { val x: Int = 1 } // needs override +trait VarForLazy extends Lazy { var x: Int = 1 } // needs override +trait DefForLazy extends Lazy { def x: Int = 1 } // needs override + +trait ValForValOvr extends Val { override val x: Int = 1 } // override ok +trait VarForValOvr extends Val { override var x: Int = 1 } // bad override +trait DefForValOvr extends Val { override def x: Int = 1 } // bad override +trait ValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes +trait VarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why? +trait DefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why? +trait ValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why? +trait VarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why? +trait DefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why? + +class CValForVal extends Val { val x: Int = 1 } // needs override +class CVarForVal extends Val { var x: Int = 1 } // needs override +class CDefForVal extends Val { def x: Int = 1 } // needs override +class CValForVar extends Var { val x: Int = 1 } // needs override +class CVarForVar extends Var { var x: Int = 1 } // needs override +class CDefForVar extends Var { def x: Int = 1 } // needs override +class CValForLazy extends Lazy { val x: Int = 1 } // needs override +class CVarForLazy extends Lazy { var x: Int = 1 } // needs override +class CDefForLazy extends Lazy { def x: Int = 1 } // needs override + +class CValForValOvr extends Val { override val x: Int = 1 } // override ok +class CVarForValOvr extends Val { override var x: Int = 1 } // bad override +class CDefForValOvr extends Val { override def x: Int = 1 } // bad override +class CValForVarOvr extends Var { override val x: Int = 1 } // bad override -- unsound if used in path and var changes +class CVarForVarOvr extends Var { override var x: Int = 1 } // bad override -- why? +class CDefForVarOvr extends Var { override def x: Int = 1 } // bad override -- why? +class CValForLazyOvr extends Lazy { override val x: Int = 1 } // bad override -- why? +class CVarForLazyOvr extends Lazy { override var x: Int = 1 } // bad override -- why? +class CDefForLazyOvr extends Lazy { override def x: Int = 1 } // bad override -- why? + +class CVal { val x: Int = 123 } +class CVar { var x: Int = 123 } +class CLazy { lazy val x: Int = 123 } + +trait ValForCVal extends CVal { val x: Int = 1 } // needs override +trait VarForCVal extends CVal { var x: Int = 1 } // needs override +trait DefForCVal extends CVal { def x: Int = 1 } // needs override +trait ValForCVar extends CVar { val x: Int = 1 } // needs override +trait VarForCVar extends CVar { var x: Int = 1 } // needs override +trait DefForCVar extends CVar { def x: Int = 1 } // needs override +trait ValForCLazy extends CLazy { val x: Int = 1 } // needs override +trait VarForCLazy extends CLazy { var x: Int = 1 } // needs override +trait DefForCLazy extends CLazy { def x: Int = 1 } // needs override + +trait ValForCValOvr extends CVal { override val x: Int = 1 } // override ok +trait VarForCValOvr extends CVal { override var x: Int = 1 } // bad override +trait DefForCValOvr extends CVal { override def x: Int = 1 } // bad override +trait ValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes +trait VarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why? +trait DefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why? +trait ValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why? +trait VarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why? +trait DefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why? + +class CValForCVal extends CVal { val x: Int = 1 } // needs override +class CVarForCVal extends CVal { var x: Int = 1 } // needs override +class CDefForCVal extends CVal { def x: Int = 1 } // needs override +class CValForCVar extends CVar { val x: Int = 1 } // needs override +class CVarForCVar extends CVar { var x: Int = 1 } // needs override +class CDefForCVar extends CVar { def x: Int = 1 } // needs override +class CValForCLazy extends CLazy { val x: Int = 1 } // needs override +class CVarForCLazy extends CLazy { var x: Int = 1 } // needs override +class CDefForCLazy extends CLazy { def x: Int = 1 } // needs override + +class CValForCValOvr extends CVal { override val x: Int = 1 } // override ok +class CVarForCValOvr extends CVal { override var x: Int = 1 } // bad override +class CDefForCValOvr extends CVal { override def x: Int = 1 } // bad override +class CValForCVarOvr extends CVar { override val x: Int = 1 } // bad override -- unsound if used in path and var changes +class CVarForCVarOvr extends CVar { override var x: Int = 1 } // bad override -- why? +class CDefForCVarOvr extends CVar { override def x: Int = 1 } // bad override -- why? +class CValForCLazyOvr extends CLazy { override val x: Int = 1 } // bad override -- why? +class CVarForCLazyOvr extends CLazy { override var x: Int = 1 } // bad override -- why? +class CDefForCLazyOvr extends CLazy { override def x: Int = 1 } // bad override -- why? diff --git a/test/files/neg/trait_fields_deprecated_overriding.check b/test/files/neg/trait_fields_deprecated_overriding.check new file mode 100644 index 000000000000..89dfa5c295b7 --- /dev/null +++ b/test/files/neg/trait_fields_deprecated_overriding.check @@ -0,0 +1,6 @@ +trait_fields_deprecated_overriding.scala:8: warning: overriding value x in trait DeprecatedOverriding is deprecated + override val x = 2 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/trait_fields_deprecated_overriding.flags b/test/files/neg/trait_fields_deprecated_overriding.flags new file mode 100644 index 000000000000..c6bfaf1f64a4 --- /dev/null +++ b/test/files/neg/trait_fields_deprecated_overriding.flags @@ -0,0 +1 @@ +-deprecation -Xfatal-warnings diff --git a/test/files/neg/trait_fields_deprecated_overriding.scala b/test/files/neg/trait_fields_deprecated_overriding.scala new file mode 100644 index 000000000000..e7d722c92f0b --- /dev/null +++ b/test/files/neg/trait_fields_deprecated_overriding.scala @@ -0,0 +1,11 @@ +package scala + +trait DeprecatedOverriding { + @deprecatedOverriding val x = 1 +} + +class COverride extends DeprecatedOverriding { + override val x = 2 +} + +class CSynthImpl extends DeprecatedOverriding \ No newline at end of file diff --git a/test/files/neg/val_infer.check b/test/files/neg/val_infer.check new file mode 100644 index 000000000000..711450add958 --- /dev/null +++ b/test/files/neg/val_infer.check @@ -0,0 +1,6 @@ +val_infer.scala:3: error: type mismatch; + found : String("") + required: Int + trait Sub extends Base { def foo = "" } + ^ +one error found diff --git a/test/files/neg/val_infer.scala b/test/files/neg/val_infer.scala new file mode 100644 index 000000000000..7fe839374991 --- /dev/null +++ b/test/files/neg/val_infer.scala @@ -0,0 +1,4 @@ +class Test { + trait Base { def foo: Int } + trait Sub extends Base { def foo = "" } +} \ No newline at end of file diff --git a/test/files/neg/val_sig_infer_match.check b/test/files/neg/val_sig_infer_match.check new file mode 100644 index 000000000000..704c99cf84ef --- /dev/null +++ b/test/files/neg/val_sig_infer_match.check @@ -0,0 +1,4 @@ +val_sig_infer_match.scala:21: error: value y is not a member of A + def m = f.y // doesn't compile anymore + ^ +one error found diff --git a/test/files/neg/val_sig_infer_match.scala b/test/files/neg/val_sig_infer_match.scala new file mode 100644 index 000000000000..fb8aa66d56d8 --- /dev/null +++ b/test/files/neg/val_sig_infer_match.scala @@ -0,0 +1,22 @@ +class A + +class B extends A { + def y: Int = 0 +} + +class B1 extends B +class B2 extends B + +class C { + def f: A = null +} + +class D extends C { + def s = "" + override final val f = s match { + case "" => new B1 + case _ => new B2 + } + + def m = f.y // doesn't compile anymore +} \ No newline at end of file diff --git a/test/files/neg/val_sig_infer_struct.check b/test/files/neg/val_sig_infer_struct.check new file mode 100644 index 000000000000..26efbbc3f499 --- /dev/null +++ b/test/files/neg/val_sig_infer_struct.check @@ -0,0 +1,4 @@ +val_sig_infer_struct.scala:7: error: value foo is not a member of Object + def bar = f.foo + ^ +one error found diff --git a/test/files/neg/val_sig_infer_struct.scala b/test/files/neg/val_sig_infer_struct.scala new file mode 100644 index 000000000000..e88340337cb3 --- /dev/null +++ b/test/files/neg/val_sig_infer_struct.scala @@ -0,0 +1,8 @@ +class C { + def f: Object = this +} + +class D extends C { + override val f = new Object { def foo = 1 } + def bar = f.foo +} \ No newline at end of file diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index 4876ed8fc24a..2e93f338bb3c 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -19,10 +19,7 @@ warn-unused-privates.scala:36: warning: private val in class Boppy is never used warn-unused-privates.scala:43: warning: private var in trait Accessors is never used private var v1: Int = 0 // warn ^ -warn-unused-privates.scala:43: warning: private setter in trait Accessors is never used - private var v1: Int = 0 // warn - ^ -warn-unused-privates.scala:44: warning: private setter in trait Accessors is never used +warn-unused-privates.scala:44: warning: private var in trait Accessors is never used private var v2: Int = 0 // warn, never set ^ warn-unused-privates.scala:45: warning: private var in trait Accessors is never used @@ -65,5 +62,5 @@ warn-unused-privates.scala:103: warning: local type OtherThing is never used type OtherThing = String // warn ^ error: No warnings can be incurred under -Xfatal-warnings. -22 warnings found +21 warnings found one error found diff --git a/test/files/presentation/t4287c.flags b/test/files/pos/infer_override_def_args.flags similarity index 100% rename from test/files/presentation/t4287c.flags rename to test/files/pos/infer_override_def_args.flags diff --git a/test/files/pos/infer_override_def_args.scala b/test/files/pos/infer_override_def_args.scala new file mode 100644 index 000000000000..ac10720c8128 --- /dev/null +++ b/test/files/pos/infer_override_def_args.scala @@ -0,0 +1,5 @@ +abstract class A { def foo(a: Int): A } +class B extends A { + implicit def spackle(x: Int): A = new B + def foo(a) = a +} \ No newline at end of file diff --git a/test/files/pos/trait_fields_dependent_conflict.scala b/test/files/pos/trait_fields_dependent_conflict.scala new file mode 100644 index 000000000000..afb6f4b0c5ee --- /dev/null +++ b/test/files/pos/trait_fields_dependent_conflict.scala @@ -0,0 +1,20 @@ +// derived from test/files/pos/S5.scala + +// compile with -uniqid to see a hint of the trouble +trait N { + // the symbol for self does not get rebound when synthesizing members in C + val self: N = ??? + val n: self.type = self +} + +abstract class M { + val self: N + val n: self.type +} + +abstract class MConflict extends N { + val self: N + val n: self.type +} + +class C extends M with N diff --git a/test/files/pos/trait_fields_dependent_rebind.scala b/test/files/pos/trait_fields_dependent_rebind.scala new file mode 100644 index 000000000000..e2cf4c43c3f1 --- /dev/null +++ b/test/files/pos/trait_fields_dependent_rebind.scala @@ -0,0 +1,15 @@ +// derived from test/files/pos/S5.scala + +// compile with -uniqid to see a hint of the trouble +trait N { + // the symbol for self does not get rebound when synthesizing members in C + val self: N = ??? + val n: self.type = self +} + +abstract class M { + val self: N + val n: self.type +} + +class C extends M with N diff --git a/test/files/pos/trait_fields_inherit_double_def.scala b/test/files/pos/trait_fields_inherit_double_def.scala new file mode 100644 index 000000000000..8703d6312c1d --- /dev/null +++ b/test/files/pos/trait_fields_inherit_double_def.scala @@ -0,0 +1,20 @@ +// done +// test/files/trait-defaults/fields.scala:24: error: double definition: +// def signalDelegate_=(x$1: Signalling): Unit at line 24 and +// def signalDelegate_=(x$1: Signalling): Unit at line 24 +// have same type +// class SUB extends IterableSplitter +// ^ +// one error found + +trait Signalling + +trait DelegatedSignalling extends Signalling { + var signalDelegate: Signalling +} + +trait IterableSplitter extends DelegatedSignalling { + var signalDelegate: Signalling = ??? +} + +class SUB extends IterableSplitter \ No newline at end of file diff --git a/test/files/pos/trait_fields_lambdalift.scala b/test/files/pos/trait_fields_lambdalift.scala new file mode 100644 index 000000000000..62304a526854 --- /dev/null +++ b/test/files/pos/trait_fields_lambdalift.scala @@ -0,0 +1,22 @@ +class Lift { + def foo = { + // this will be captured by the MouseHandler trait, + // which gives rise to a new trait field during LambdaLift + var Clicked = "Clicked" + + def bar = Clicked + + trait MouseHandler { + def mouseClicked = Clicked + bar + } + + class CC extends MouseHandler + + // new C {} + (new CC).mouseClicked + } +} + +object O extends Lift with App { + println(foo) +} diff --git a/test/files/pos/trait_fields_nested_private_object.scala b/test/files/pos/trait_fields_nested_private_object.scala new file mode 100644 index 000000000000..8efc1cb3fa3f --- /dev/null +++ b/test/files/pos/trait_fields_nested_private_object.scala @@ -0,0 +1,8 @@ +trait NestedObj { + private object O { println("NO") } +} + + +class C extends NestedObj { + def O = ??? +} \ No newline at end of file diff --git a/test/files/pos/trait_fields_nested_public_object.scala b/test/files/pos/trait_fields_nested_public_object.scala new file mode 100644 index 000000000000..016487fb8ab3 --- /dev/null +++ b/test/files/pos/trait_fields_nested_public_object.scala @@ -0,0 +1,5 @@ +trait NestedObj { + object O { println("NO") } +} + +class C extends NestedObj \ No newline at end of file diff --git a/test/files/pos/trait_fields_owners.scala b/test/files/pos/trait_fields_owners.scala new file mode 100644 index 000000000000..6aa5572171d4 --- /dev/null +++ b/test/files/pos/trait_fields_owners.scala @@ -0,0 +1,19 @@ +trait V { + // ok + // error: java.lang.IllegalArgumentException: Could not find proxy for val f: Function1 in List(value f, value v, trait V, package , package ) (currentOwner= value ) + val v = { val f = (x: Int) => x + 1; f(2) } + + // ok + // assertion failed: + // Trying to access the this of another class: tree.symbol = trait V, class symbol = object V$class compilation unit: fields.scala + val developmentVersion = + for { + v <- scalaPropOrNone("maven.version.number") + if v endsWith "-SNAPSHOT" + ov <- scalaPropOrNone("version.number") + } yield ov + + def scalaPropOrNone(name: String): Option[String] = ??? +} + +object O extends V \ No newline at end of file diff --git a/test/files/pos/trait_fields_private_this.scala b/test/files/pos/trait_fields_private_this.scala new file mode 100644 index 000000000000..8065cc89e6a1 --- /dev/null +++ b/test/files/pos/trait_fields_private_this.scala @@ -0,0 +1,5 @@ +trait Chars { + private[this] val char2uescapeArray: String = ??? +} + +object Chars extends Chars \ No newline at end of file diff --git a/test/files/pos/trait_fields_static_fwd.scala b/test/files/pos/trait_fields_static_fwd.scala new file mode 100644 index 000000000000..af2cdad9ff4d --- /dev/null +++ b/test/files/pos/trait_fields_static_fwd.scala @@ -0,0 +1,10 @@ +trait T { + // Need to mark the synthesized member in the object's module class as notPROTECTED, + // since the trait member will receive this flag later. + // If we don't add notPROTECTED to the synthesized one, the member will not be seen as overriding the trait member. + // Therefore, addForwarders's call to membersBasedOnFlags would see the deferred member in the trait, + // instead of the concrete (desired) one in the class, and thus not create the static forwarder. + protected val propFilename: String = "/" +} + +object P extends T diff --git a/test/files/pos/val_infer.scala b/test/files/pos/val_infer.scala new file mode 100644 index 000000000000..5f82da83931f --- /dev/null +++ b/test/files/pos/val_infer.scala @@ -0,0 +1,5 @@ +class Test { + implicit def s2i(s: String): Int = s.length + trait Base { def foo: Int } + trait Sub extends Base { val foo = "" } +} diff --git a/test/files/presentation/doc/doc.scala b/test/files/presentation/doc/doc.scala index 8c60af557b96..08c6ebf059ca 100644 --- a/test/files/presentation/doc/doc.scala +++ b/test/files/presentation/doc/doc.scala @@ -37,7 +37,7 @@ object Test extends InteractiveTest { prepre + docComment(nTags) + prepost + post } - override lazy val compiler = { + override lazy val compiler: Global { def getComment(sym: Symbol, source: SourceFile, fragments: List[(Symbol,SourceFile)]): Option[Comment] } = { prepareSettings(settings) new Global(settings, compilerReporter) with MemberLookupBase with CommentFactoryBase with doc.ScaladocGlobalTrait { outer => diff --git a/test/files/presentation/scope-completion-3.check b/test/files/presentation/scope-completion-3.check index b70a7d5c6b73..f2510127fbfa 100644 --- a/test/files/presentation/scope-completion-3.check +++ b/test/files/presentation/scope-completion-3.check @@ -3,7 +3,7 @@ reload: Completions.scala askScopeCompletion at Completions.scala(75,2) ================================================================================ [response] askScopeCompletion at (75,2) -retrieved 37 members +retrieved 38 members abstract class Base1 extends AnyRef abstract trait Trait1 extends AnyRef class Cb1 extends AnyRef @@ -14,6 +14,8 @@ def (): test.Completion1 def fb1: Int def fc1: Int def ft1: Int +def rt1: Int +def rt1_=(x$1: Int): Unit object Completion2 object Ob1 object Oc1 @@ -30,23 +32,22 @@ private[this] val vb1: Int private[this] val vb3: Int private[this] val vc1: Int private[this] val vc2: Int -private[this] val vt1: Int private[this] val vt3: Int private[this] var rb1: Int private[this] var rb3: Int private[this] var rc1: Int private[this] var rc2: Int -private[this] var rt1: Int private[this] var rt3: Int type tb1 = Completion1.this.tb1 type tc1 = Completion1.this.tc1 type tt1 = Completion1.this.tt1 +val vt1: Int ================================================================================ askScopeCompletion at Completions.scala(104,2) ================================================================================ [response] askScopeCompletion at (104,2) -retrieved 37 members +retrieved 38 members abstract class Base1 extends AnyRef abstract trait Trait1 extends AnyRef class Cb1 extends AnyRef @@ -57,6 +58,8 @@ def (): test.Completion2.type def fb1: Int def fo1: Int def ft1: Int +def rt1: Int +def rt1_=(x$1: Int): Unit object Completion2 object Ob1 object Oo1 @@ -73,15 +76,14 @@ private[this] val vb1: Int private[this] val vb3: Int private[this] val vo1: Int private[this] val vo2: Int -private[this] val vt1: Int private[this] val vt3: Int private[this] var rb1: Int private[this] var rb3: Int private[this] var ro1: Int private[this] var ro2: Int -private[this] var rt1: Int private[this] var rt3: Int type tb1 = test.Completion2.tb1 type to1 = test.Completion2.to1 type tt1 = test.Completion2.tt1 +val vt1: Int ================================================================================ diff --git a/test/files/presentation/t4287c.check b/test/files/presentation/t4287c.check deleted file mode 100644 index 42fc30997df4..000000000000 --- a/test/files/presentation/t4287c.check +++ /dev/null @@ -1,11 +0,0 @@ -reload: Foo.scala - -askHyperlinkPos for `A` at (1,18) Foo.scala -================================================================================ -[response] found askHyperlinkPos for `A` at (3,8) Foo.scala -================================================================================ - -askHyperlinkPos for `a` at (1,25) Foo.scala -================================================================================ -[response] found askHyperlinkPos for `a` at (4,7) Foo.scala -================================================================================ diff --git a/test/files/presentation/t4287c/Test.scala b/test/files/presentation/t4287c/Test.scala deleted file mode 100644 index bec1131c4cc1..000000000000 --- a/test/files/presentation/t4287c/Test.scala +++ /dev/null @@ -1,3 +0,0 @@ -import scala.tools.nsc.interactive.tests.InteractiveTest - -object Test extends InteractiveTest \ No newline at end of file diff --git a/test/files/presentation/t4287c/src/Foo.scala b/test/files/presentation/t4287c/src/Foo.scala deleted file mode 100644 index 26870b502132..000000000000 --- a/test/files/presentation/t4287c/src/Foo.scala +++ /dev/null @@ -1,9 +0,0 @@ -class A(a: Int = A/*#*/.a/*#*/) - -object A { - val a = 2 -} - -class B extends A { - def this(a) = this() -} \ No newline at end of file diff --git a/test/files/run/SymbolsTest.scala b/test/files/run/SymbolsTest.scala index d5948ea168fa..7c185b0e0995 100644 --- a/test/files/run/SymbolsTest.scala +++ b/test/files/run/SymbolsTest.scala @@ -137,16 +137,16 @@ object Test { // } // val an2 = () => { // object nested { - // val m = 'mfsa + // val m = 'mfsa // } // nested.m // } // val an3 = () => { // object nested { - // val f = () => { - // 'layered - // } - // def gets = f() + // val f = () => { + // 'layered + // } + // def gets = f() // } // nested.gets // } @@ -204,8 +204,8 @@ object Test { val s1 = 's1 def s2 = 's2 object inner { - val s3 = 's3 - val s4 = 's4 + val s3 = 's3 + val s4 = 's4 } } @@ -223,8 +223,8 @@ object Test { val s5 = 's5 def s6 = 's6 object inner2 { - val s7 = 's7 - def s8 = 's8 + val s7 = 's7 + def s8 = 's8 } } assert(Local.s5 == 's5) diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index 1bb7c6ceab14..ca0005ea4d14 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -21,7 +21,6 @@ lub(List(Int @testAnn, Int)) [1] pluginsPt(?, Trees$Annotated) [7] pluginsPt(?, Trees$Apply) [11] pluginsPt(?, Trees$ApplyImplicitView) [2] -pluginsPt(?, Trees$Assign) [7] pluginsPt(?, Trees$Block) [4] pluginsPt(?, Trees$ClassDef) [2] pluginsPt(?, Trees$DefDef) [14] @@ -31,9 +30,9 @@ pluginsPt(?, Trees$Literal) [16] pluginsPt(?, Trees$New) [5] pluginsPt(?, Trees$PackageDef) [1] pluginsPt(?, Trees$Return) [1] -pluginsPt(?, Trees$Select) [50] +pluginsPt(?, Trees$Select) [43] pluginsPt(?, Trees$Super) [2] -pluginsPt(?, Trees$This) [20] +pluginsPt(?, Trees$This) [13] pluginsPt(?, Trees$TypeApply) [3] pluginsPt(?, Trees$TypeBoundsTree) [2] pluginsPt(?, Trees$TypeDef) [1] @@ -47,23 +46,19 @@ pluginsPt(Boolean @testAnn, Trees$Literal) [1] pluginsPt(Boolean @testAnn, Trees$Select) [1] pluginsPt(Boolean, Trees$Apply) [1] pluginsPt(Boolean, Trees$Ident) [1] -pluginsPt(Boolean, Trees$Literal) [1] pluginsPt(Double, Trees$Select) [1] pluginsPt(Int @testAnn, Trees$Literal) [1] pluginsPt(Int, Trees$Apply) [1] -pluginsPt(Int, Trees$Ident) [2] -pluginsPt(Int, Trees$If) [1] -pluginsPt(Int, Trees$Literal) [5] +pluginsPt(Int, Trees$Ident) [1] +pluginsPt(Int, Trees$Literal) [4] pluginsPt(Int, Trees$Select) [3] -pluginsPt(List, Trees$Apply) [1] pluginsPt(List[Any], Trees$Select) [1] pluginsPt(String @testAnn, Trees$Select) [1] pluginsPt(String, Trees$Apply) [1] pluginsPt(String, Trees$Block) [2] -pluginsPt(String, Trees$Ident) [4] +pluginsPt(String, Trees$Ident) [3] pluginsPt(String, Trees$Literal) [1] pluginsPt(String, Trees$Select) [1] -pluginsPt(String, Trees$Typed) [1] pluginsPt(Unit, Trees$Assign) [1] pluginsPt(testAnn, Trees$Apply) [5] pluginsTypeSig(, Trees$Template) [2] @@ -119,7 +114,7 @@ pluginsTyped(=> Int, Trees$TypeApply) [1] pluginsTyped(=> String @testAnn, Trees$Select) [1] pluginsTyped(A, Trees$Apply) [1] pluginsTyped(A, Trees$Ident) [2] -pluginsTyped(A, Trees$This) [8] +pluginsTyped(A, Trees$This) [1] pluginsTyped(A, Trees$TypeTree) [4] pluginsTyped(A.super.type, Trees$Super) [1] pluginsTyped(A.this.type, Trees$This) [11] @@ -128,25 +123,23 @@ pluginsTyped(AnyRef, Trees$Select) [4] pluginsTyped(Array[Any], Trees$ArrayValue) [1] pluginsTyped(Boolean @testAnn, Trees$Select) [1] pluginsTyped(Boolean @testAnn, Trees$TypeTree) [4] -pluginsTyped(Boolean(false), Trees$Literal) [2] +pluginsTyped(Boolean(false), Trees$Literal) [1] pluginsTyped(Boolean, Trees$Apply) [1] -pluginsTyped(Boolean, Trees$Select) [4] +pluginsTyped(Boolean, Trees$Select) [3] pluginsTyped(Char('c'), Trees$Literal) [2] pluginsTyped(Double, Trees$Apply) [3] pluginsTyped(Double, Trees$Select) [6] pluginsTyped(Int @testAnn, Trees$TypeTree) [2] pluginsTyped(Int @testAnn, Trees$Typed) [2] -pluginsTyped(Int(0), Trees$Literal) [3] +pluginsTyped(Int(0), Trees$Literal) [2] pluginsTyped(Int(1) @testAnn, Trees$Typed) [1] pluginsTyped(Int(1), Trees$Literal) [8] pluginsTyped(Int(2), Trees$Literal) [1] pluginsTyped(Int, Trees$Apply) [1] -pluginsTyped(Int, Trees$Ident) [2] -pluginsTyped(Int, Trees$If) [2] -pluginsTyped(Int, Trees$Select) [15] +pluginsTyped(Int, Trees$Ident) [1] +pluginsTyped(Int, Trees$If) [1] +pluginsTyped(Int, Trees$Select) [12] pluginsTyped(Int, Trees$TypeTree) [13] -pluginsTyped(List, Trees$Apply) [1] -pluginsTyped(List, Trees$Select) [1] pluginsTyped(List[Any], Trees$Apply) [1] pluginsTyped(List[Any], Trees$Select) [1] pluginsTyped(List[Any], Trees$TypeTree) [3] @@ -159,15 +152,13 @@ pluginsTyped(String(""), Trees$Literal) [2] pluginsTyped(String("huhu"), Trees$Literal) [1] pluginsTyped(String("str") @testAnn, Trees$Typed) [1] pluginsTyped(String("str"), Trees$Literal) [1] -pluginsTyped(String("str"), Trees$Typed) [1] pluginsTyped(String("two"), Trees$Literal) [2] pluginsTyped(String, Trees$Apply) [2] pluginsTyped(String, Trees$Block) [2] -pluginsTyped(String, Trees$Ident) [1] -pluginsTyped(String, Trees$Select) [9] +pluginsTyped(String, Trees$Select) [7] pluginsTyped(String, Trees$TypeTree) [7] pluginsTyped(Unit, Trees$Apply) [2] -pluginsTyped(Unit, Trees$Assign) [8] +pluginsTyped(Unit, Trees$Assign) [1] pluginsTyped(Unit, Trees$Block) [4] pluginsTyped(Unit, Trees$If) [1] pluginsTyped(Unit, Trees$Literal) [5] diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index 280a4f43d5b4..03f8273c178c 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -10,17 +10,18 @@ superaccessors 6 add super accessors in traits and nested classes pickler 8 serialize symbol tables refchecks 9 reference/override checking, translate nested objects uncurry 10 uncurry, translate function values to anonymous classes - tailcalls 11 replace tail calls by jumps - specialize 12 @specialized-driven class and method specialization - explicitouter 13 this refs to outer pointers - erasure 14 erase types, add interfaces for traits - posterasure 15 clean up erased inline classes - lazyvals 16 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 17 move nested functions to top level - constructors 18 move field definitions into constructors - flatten 19 eliminate inner classes - mixin 20 mixin composition - cleanup 21 platform-specific cleanups, generate reflective calls - delambdafy 22 remove lambdas - jvm 23 generate JVM bytecode - terminal 24 the last phase during a compilation run + fields 11 synthesize accessors and fields + tailcalls 12 replace tail calls by jumps + specialize 13 @specialized-driven class and method specialization + explicitouter 14 this refs to outer pointers + erasure 15 erase types, add interfaces for traits + posterasure 16 clean up erased inline classes + lazyvals 17 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 18 move nested functions to top level + constructors 19 move field definitions into constructors + flatten 20 eliminate inner classes + mixin 21 mixin composition + cleanup 22 platform-specific cleanups, generate reflective calls + delambdafy 23 remove lambdas + jvm 24 generate JVM bytecode + terminal 25 the last phase during a compilation run diff --git a/test/files/run/reflection-fieldsymbol-navigation.check b/test/files/run/reflection-fieldsymbol-navigation.check index ae0597a04516..fd06c78a1874 100644 --- a/test/files/run/reflection-fieldsymbol-navigation.check +++ b/test/files/run/reflection-fieldsymbol-navigation.check @@ -1,6 +1,6 @@ -method x +variable x false variable x true -method x -method x_= +variable x +variable x diff --git a/test/files/run/repl-colon-type.check b/test/files/run/repl-colon-type.check index 21fbe34d96b4..1217e8d8c2d7 100644 --- a/test/files/run/repl-colon-type.check +++ b/test/files/run/repl-colon-type.check @@ -35,7 +35,7 @@ Int scala> :type protected lazy val f = 5 :5: error: lazy value f cannot be accessed in object $iw - Access to protected value f not permitted because + Access to protected lazy value f not permitted because enclosing object $eval in package $line13 is not a subclass of object $iw where target is defined lazy val $result = f diff --git a/test/files/run/showdecl.check b/test/files/run/showdecl.check index b8d7f94c57f7..d431c36f6d0d 100644 --- a/test/files/run/showdecl.check +++ b/test/files/run/showdecl.check @@ -8,7 +8,7 @@ initialized y: lazy val y: Int uninitialized z: def z: initialized z: def z: Int uninitialized t: def t: -initialized t: def t[T <: Int](x: D)(y: x.W): Int +initialized t: def t[T <: ](x: D)(y: x.W): Int uninitialized W: type W = String initialized W: type W = String uninitialized C: class C extends diff --git a/test/files/run/showdecl/Macros_1.scala b/test/files/run/showdecl/Macros_1.scala index c68dd275defd..89b8e8d3c20d 100644 --- a/test/files/run/showdecl/Macros_1.scala +++ b/test/files/run/showdecl/Macros_1.scala @@ -9,7 +9,7 @@ object Macros { import c.universe._ def test(sym: Symbol): Unit = { println(s"uninitialized ${sym.name}: ${showDecl(sym)}") - sym.info + sym.info // NOTE: not fullyInitializeSymbol, so some parts may still be LazyTypes println(s"initialized ${sym.name}: ${showDecl(sym)}") } diff --git a/test/files/run/showraw_mods.check b/test/files/run/showraw_mods.check index 4d3416042229..ff77d22adf01 100644 --- a/test/files/run/showraw_mods.check +++ b/test/files/run/showraw_mods.check @@ -1 +1 @@ -Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), TypeName("C"), List(), Template(List(Ident(TypeName("AnyRef"))), noSelfType, List(DefDef(Modifiers(), TermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), ValDef(Modifiers(PRIVATE | LOCAL), TermName("x"), TypeTree(), Literal(Constant(2))), ValDef(Modifiers(MUTABLE), TermName("y"), TypeTree(), Select(This(TypeName("C")), TermName("x"))), ValDef(Modifiers(LAZY), TermName("z"), TypeTree(), Select(This(TypeName("C")), TermName("y"))))))), Literal(Constant(()))) +Block(List(ClassDef(Modifiers(ABSTRACT | DEFAULTPARAM/TRAIT), TypeName("C"), List(), Template(List(Ident(TypeName("AnyRef"))), noSelfType, List(DefDef(Modifiers(), TermName("$init$"), List(), List(List()), TypeTree(), Block(List(), Literal(Constant(())))), DefDef(Modifiers(PRIVATE | METHOD | LOCAL | STABLE | ACCESSOR), TermName("x"), List(), List(), TypeTree(), Literal(Constant(2))), DefDef(Modifiers(METHOD | ACCESSOR), TermName("y"), List(), List(), TypeTree(), Select(This(TypeName("C")), TermName("x"))), DefDef(Modifiers(METHOD | ACCESSOR), TermName("y_$eq"), List(), List(List(ValDef(Modifiers(PARAM | SYNTHETIC), TermName("x$1"), TypeTree(), EmptyTree))), TypeTree(), EmptyTree), ValDef(Modifiers(LAZY), TermName("z"), TypeTree(), Select(This(TypeName("C")), TermName("y"))))))), Literal(Constant(()))) diff --git a/test/files/run/t4287inferredMethodTypes.check b/test/files/run/t4287inferredMethodTypes.check deleted file mode 100644 index 56e9c097cc12..000000000000 --- a/test/files/run/t4287inferredMethodTypes.check +++ /dev/null @@ -1,30 +0,0 @@ -[[syntax trees at end of typer]] // newSource1.scala -[0:92]package [0:0] { - [0:21]class A extends [7:21][23]scala.AnyRef { - [8:16] private[this] val a: [8]Int = _; - <8:20>def (<8:20>a: [11] = [17:20]A.a): [7]A = <8:20>{ - <8:20><8:20><8:20>A.super.(); - <8:20>() - } - }; - [23:47]object A extends [32:47][49]scala.AnyRef { - [49]def (): [32]A.type = [49]{ - [49][49][49]A.super.(); - [32]() - }; - [36:45]private[this] val a: [40]Int = [44:45]2; - [40] def a: [40]Int = [40][40]A.this.a; - [8] def $default$1: [8]Int = [19]A.a - }; - [49:92]class B extends [57:92][65:66]A { - [65]def (): [57]B = [65]{ - [65][65][65]B.super.([65]A.$default$1); - [57]() - }; - [70:90]def ([79:80]a: [79]Int): [74]B = [84:90]{ - [84:90][84:90][84]B.this.(); - [84]() - } - } -} - diff --git a/test/files/run/t4287inferredMethodTypes.scala b/test/files/run/t4287inferredMethodTypes.scala deleted file mode 100644 index f14e672da88a..000000000000 --- a/test/files/run/t4287inferredMethodTypes.scala +++ /dev/null @@ -1,25 +0,0 @@ -import scala.tools.partest.DirectTest - -object Test extends DirectTest { - - override def extraSettings: String = - s"-usejavacp -Yinfer-argument-types -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}" - - override def code = """ -class A(a: Int = A.a) - -object A { - val a = 2 -} - -class B extends A { - def this(a) = this() -} - """.trim - - override def show(): Unit = { - Console.withErr(System.out) { - compile() - } - } -} \ No newline at end of file diff --git a/test/files/run/t6733.check b/test/files/run/t6733.check index aeb595fbfd60..7062301c565e 100644 --- a/test/files/run/t6733.check +++ b/test/files/run/t6733.check @@ -2,23 +2,22 @@ method $init$: isPrivateThis = false, isProtectedThis = false value pri1a: isPrivateThis = true, isProtectedThis = false method pri2a: isPrivateThis = true, isProtectedThis = false variable pri3a: isPrivateThis = true, isProtectedThis = false -value pri4a: isPrivateThis = true, isProtectedThis = false +variable pri3a: isPrivateThis = true, isProtectedThis = false +lazy value pri4a: isPrivateThis = true, isProtectedThis = false lazy value pri4a: isPrivateThis = true, isProtectedThis = false type Pri5a: isPrivateThis = true, isProtectedThis = false class Pri6: isPrivateThis = true, isProtectedThis = false trait Pri7: isPrivateThis = true, isProtectedThis = false object Pri8: isPrivateThis = true, isProtectedThis = false value pro1a: isPrivateThis = false, isProtectedThis = true -value pro1a: isPrivateThis = true, isProtectedThis = false value pro1b: isPrivateThis = false, isProtectedThis = true method pro2a: isPrivateThis = false, isProtectedThis = true method pro2b: isPrivateThis = false, isProtectedThis = true -method pro3a: isPrivateThis = false, isProtectedThis = true -method pro3a_=: isPrivateThis = false, isProtectedThis = true -variable pro3a: isPrivateThis = true, isProtectedThis = false -method pro3b: isPrivateThis = false, isProtectedThis = true -method pro3b_=: isPrivateThis = false, isProtectedThis = true -value pro4a: isPrivateThis = false, isProtectedThis = true +variable pro3a: isPrivateThis = false, isProtectedThis = true +variable pro3a: isPrivateThis = false, isProtectedThis = true +variable pro3b: isPrivateThis = false, isProtectedThis = true +variable pro3b: isPrivateThis = false, isProtectedThis = true +lazy value pro4a: isPrivateThis = false, isProtectedThis = true lazy value pro4a: isPrivateThis = true, isProtectedThis = false type Pro5a: isPrivateThis = false, isProtectedThis = true type Pro5b: isPrivateThis = false, isProtectedThis = true diff --git a/test/files/run/t7533.check b/test/files/run/t7533.check index fa5b3edc8ff0..61fd4657bd5c 100644 --- a/test/files/run/t7533.check +++ b/test/files/run/t7533.check @@ -1,30 +1,29 @@ Testing Symbol.isAbstract... =======class C======= -class C => true -constructor C => false -value x1 => true -value x2 => false -value x2 => false -method y1 => true -method y2 => false -type T1 => true -type T2 => false +class C => abstract +constructor C => concrete +value xAbs => abstract +value x => concrete +value x => concrete +method yAbs => abstract +method y => concrete +type TAbs => abstract +type T => concrete =======trait T======= -trait T => true -method $init$ => false -value z1 => true -value z2 => false -value z2 => false -method w1 => true -method w2 => false -type U1 => true -type U2 => false -=======class D======= -class D => false -constructor D => false -value x1 => false -value x1 => false -method y1 => false +trait T => abstract +method $init$ => concrete +value zAbs => abstract +value z => concrete +method wAbs => abstract +method w => concrete +type UAbs => abstract +type U => concrete +=======class AllConcrete======= +class AllConcrete => concrete +constructor AllConcrete => concrete +value xAbs => concrete +value xAbs => concrete +method yAbs => concrete =======object M======= -object M => false -constructor M => false +object M => concrete +constructor M => concrete diff --git a/test/files/run/t7533.scala b/test/files/run/t7533.scala index c7bd8e8d434c..65c5c26b428d 100644 --- a/test/files/run/t7533.scala +++ b/test/files/run/t7533.scala @@ -1,24 +1,24 @@ import scala.reflect.runtime.universe._ abstract class C { - val x1: Int - val x2: Int = 2 - def y1: Int - def y2: Int = 2 - type T1 <: Int - type T2 = Int + val xAbs: Int + val x: Int = 2 + def yAbs: Int + def y: Int = 2 + type TAbs <: Int + type T = Int } trait T { - val z1: Int - val z2: Int = 2 - def w1: Int - def w2: Int = 2 - type U1 <: Int - type U2 = Int + val zAbs: Int + val z: Int = 2 + def wAbs: Int + def w: Int = 2 + type UAbs <: Int + type U = Int } -class D extends C { - val x1 = 3 - def y1 = 3 +class AllConcrete extends C { + val xAbs = 3 + def yAbs = 3 } object M @@ -27,12 +27,12 @@ object Test extends App { def test[T: TypeTag] = { val sym = typeOf[T].typeSymbol println(s"=======$sym=======") - def printAbstract(sym: Symbol) = println(s"$sym => ${sym.isAbstract}") + def printAbstract(sym: Symbol) = println(s"$sym => ${if (sym.isAbstract) "abstract" else "concrete"}") printAbstract(sym) sym.info.decls.sorted.foreach(printAbstract) } test[C] test[T] - test[D] + test[AllConcrete] test[M.type] } \ No newline at end of file diff --git a/test/files/run/t8549.scala b/test/files/run/t8549.scala index da7a731459fd..7a38491231dd 100644 --- a/test/files/run/t8549.scala +++ b/test/files/run/t8549.scala @@ -79,7 +79,7 @@ object Test extends App { } } - // Generated on 20160715-08:27:53 with Scala version 2.12.0-20160715-012500-f5a80bd) + // Generated on 20160720-18:56:11 with Scala version 2.12.0-local-5815f9a) overwrite.foreach(updateComment) check(Some(1))("rO0ABXNyAApzY2FsYS5Tb21lESLyaV6hi3QCAAFMAAV2YWx1ZXQAEkxqYXZhL2xhbmcvT2JqZWN0O3hyAAxzY2FsYS5PcHRpb27+aTf92w5mdAIAAHhwc3IAEWphdmEubGFuZy5JbnRlZ2VyEuKgpPeBhzgCAAFJAAV2YWx1ZXhyABBqYXZhLmxhbmcuTnVtYmVyhqyVHQuU4IsCAAB4cAAAAAE=") @@ -174,7 +174,7 @@ object Test extends App { // check(mutable.ArraySeq(1, 2, 3))( "rO0ABXNyACFzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTZXEVPD3SKEkOcwIAAkkABmxlbmd0aFsABWFycmF5dAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwAAAAA3VyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABQAAAAJzcQB+AAUAAAAD") check(mutable.AnyRefMap("a" -> "A"))( "rO0ABXNyACJzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQW55UmVmTWFwAAAAAAAAAAECAAdJAAVfc2l6ZUkAB192YWNhbnRJAARtYXNrTAAMZGVmYXVsdEVudHJ5dAARTHNjYWxhL0Z1bmN0aW9uMTtbACtzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkQW55UmVmTWFwJCRfaGFzaGVzdAACW0lbAClzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkQW55UmVmTWFwJCRfa2V5c3QAE1tMamF2YS9sYW5nL09iamVjdDtbACtzY2FsYSRjb2xsZWN0aW9uJG11dGFibGUkQW55UmVmTWFwJCRfdmFsdWVzcQB+AAN4cAAAAAEAAAAAAAAAB3NyADNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQW55UmVmTWFwJEV4Y2VwdGlvbkRlZmF1bHQAAAAAAAAAAQIAAHhwdXIAAltJTbpgJnbqsqUCAAB4cAAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA+UkA2AAAAAHVyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAAhwcHBwcHB0AAFhcHVxAH4ACQAAAAhwcHBwcHB0AAFBcA==") check(mutable.ArrayStack(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuQXJyYXlTdGFja3bdxXbcnLBeAgACSQAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJEFycmF5U3RhY2skJGluZGV4WwAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJEFycmF5U3RhY2skJHRhYmxldAATW0xqYXZhL2xhbmcvT2JqZWN0O3hwAAAAA3VyABNbTGphdmEubGFuZy5PYmplY3Q7kM5YnxBzKWwCAAB4cAAAAANzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAA3NxAH4ABQAAAAJzcQB+AAUAAAAB") - check(mutable.DoubleLinkedList(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuRG91YmxlTGlua2VkTGlzdI73LKsKRr1RAgADTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NlcTtMAARwcmV2cQB+AAJ4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAAc3EAfgAEAAAAAnNxAH4AAHNxAH4ABAAAAANzcQB+AABwcQB+AAtxAH4ACXEAfgAHcQB+AANw") + check(mutable.DoubleLinkedList(1, 2, 3))( "rO0ABXNyAClzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuRG91YmxlTGlua2VkTGlzdI73LKsKRr1RAgADTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0ACtMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0RvdWJsZUxpbmtlZExpc3Q7TAAEcHJldnEAfgACeHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAHNxAH4ABAAAAAJzcQB+AABzcQB+AAQAAAADc3EAfgAAcHEAfgALcQB+AAlxAH4AB3EAfgADcA==") check(mutable.HashMap())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAAAAAAABAB4") check(mutable.HashMap(1 -> 1))( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXEAfgAEeA==") @@ -189,7 +189,7 @@ object Test extends App { // check(new mutable.History())( "rO0ABXNyACBzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuSGlzdG9yeUhuXxDIFJrsAgACSQAKbWF4SGlzdG9yeUwAA2xvZ3QAIExzY2FsYS9jb2xsZWN0aW9uL211dGFibGUvUXVldWU7eHAAAAPoc3IAHnNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5RdWV1ZbjMURVfOuHHAgAAeHIAJHNjYWxhLmNvbGxlY3Rpb24ubXV0YWJsZS5NdXRhYmxlTGlzdFJpnjJ+gFbAAgADSQADbGVuTAAGZmlyc3QwdAAlTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9MaW5rZWRMaXN0O0wABWxhc3QwcQB+AAV4cAAAAABzcgAjc2NhbGEuY29sbGVjdGlvbi5tdXRhYmxlLkxpbmtlZExpc3Sak+nGCZHaUQIAAkwABGVsZW10ABJMamF2YS9sYW5nL09iamVjdDtMAARuZXh0dAAeTHNjYWxhL2NvbGxlY3Rpb24vbXV0YWJsZS9TZXE7eHBwcQB+AApxAH4ACg==") check(mutable.LinkedHashMap(1 -> 2))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaE1hcAAAAAAAAAABAwAAeHB3DQAAAu4AAAABAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJ4") check(mutable.LinkedHashSet(1, 2, 3))( "rO0ABXNyACZzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkSGFzaFNldAAAAAAAAAABAwAAeHB3DQAAAu4AAAADAAAABABzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAgAAAAJzcQB+AAIAAAADeA==") - check(mutable.LinkedList(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkTGlzdJqT6cYJkdpRAgACTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0AB5Mc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL1NlcTt4cHNyABFqYXZhLmxhbmcuSW50ZWdlchLioKT3gYc4AgABSQAFdmFsdWV4cgAQamF2YS5sYW5nLk51bWJlcoaslR0LlOCLAgAAeHAAAAABc3EAfgAAc3EAfgAEAAAAAnNxAH4AAHNxAH4ABAAAAANzcQB+AABwcQB+AAs=") + check(mutable.LinkedList(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlua2VkTGlzdJqT6cYJkdpRAgACTAAEZWxlbXQAEkxqYXZhL2xhbmcvT2JqZWN0O0wABG5leHR0ACVMc2NhbGEvY29sbGVjdGlvbi9tdXRhYmxlL0xpbmtlZExpc3Q7eHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4AAHNxAH4ABAAAAAJzcQB+AABzcQB+AAQAAAADc3EAfgAAcHEAfgAL") // TODO SI-8576 unstable under -Xcheckinit // check(mutable.ListBuffer(1, 2, 3))( "rO0ABXNyACNzY2FsYS5jb2xsZWN0aW9uLm11dGFibGUuTGlzdEJ1ZmZlci9y9I7QyWzGAwAEWgAIZXhwb3J0ZWRJAANsZW5MAAVsYXN0MHQAKUxzY2FsYS9jb2xsZWN0aW9uL2ltbXV0YWJsZS8kY29sb24kY29sb247TAAqc2NhbGEkY29sbGVjdGlvbiRtdXRhYmxlJExpc3RCdWZmZXIkJHN0YXJ0dAAhTHNjYWxhL2NvbGxlY3Rpb24vaW1tdXRhYmxlL0xpc3Q7eHBzcgARamF2YS5sYW5nLkludGVnZXIS4qCk94GHOAIAAUkABXZhbHVleHIAEGphdmEubGFuZy5OdW1iZXKGrJUdC5TgiwIAAHhwAAAAAXNxAH4ABAAAAAJzcQB+AAQAAAADc3IALHNjYWxhLmNvbGxlY3Rpb24uaW1tdXRhYmxlLkxpc3RTZXJpYWxpemVFbmQkilxjW/dTC20CAAB4cHcFAAAAAAN4") diff --git a/test/files/run/trait_fields_bytecode.scala b/test/files/run/trait_fields_bytecode.scala new file mode 100644 index 000000000000..d87412f43ed2 --- /dev/null +++ b/test/files/run/trait_fields_bytecode.scala @@ -0,0 +1,23 @@ +trait TFinal { final val bla: Int = 123 } + +// bla should be final in C +class CFinal extends TFinal + + +trait TConst { final val C = "S" } +// there should be a C method in `T$class`! +class CConst extends TConst { } + + +object Test { + def main(args: Array[String]): Unit = { + val f1 = classOf[CFinal].getDeclaredMethod("bla") + import java.lang.reflect.Modifier._ + assert(isFinal(f1.getModifiers), f1) + + classOf[CConst].getMethod("C") + + import language.reflectiveCalls + assert(new CConst().asInstanceOf[{def C: String}].C == "S") + } +} diff --git a/test/files/run/trait_fields_final.scala b/test/files/run/trait_fields_final.scala new file mode 100644 index 000000000000..8b32e5b47d9b --- /dev/null +++ b/test/files/run/trait_fields_final.scala @@ -0,0 +1,21 @@ +// TODO: clarify meaning of final in traits +// In the new compiler, there's no final modifier after mixin for `meh`'s setter, +// whereas 2.12.0-M3 makes meh's trait setter final. +// NOTE: bytecode is identical, but the scalasignature is different +trait Foo { self: Meh => + def bar(x: String) = x == "a" + private final val meh = bar("a") +} + +abstract class Meh extends Foo + +object Test { + def main(args: Array[String]): Unit = { + val setter = classOf[Meh].getDeclaredMethod("Foo$_setter_$Foo$$meh_$eq", java.lang.Boolean.TYPE) + val getter = classOf[Meh].getDeclaredMethod("Foo$$meh") + import java.lang.reflect.Modifier._ + assert(isFinal(setter.getModifiers), setter) + assert(isFinal(getter.getModifiers), getter) + } + +} diff --git a/test/files/run/trait_fields_init.check b/test/files/run/trait_fields_init.check new file mode 100644 index 000000000000..84c1a2ead91a --- /dev/null +++ b/test/files/run/trait_fields_init.check @@ -0,0 +1,21 @@ +x +y +z +abstract +public +protected +abstract protected +private +private[this] +abstract +public +protected +abstract protected +private +private[this] +abstract +public +protected +abstract protected +private +private[this] diff --git a/test/files/run/trait_fields_init.scala b/test/files/run/trait_fields_init.scala new file mode 100644 index 000000000000..496911d5384a --- /dev/null +++ b/test/files/run/trait_fields_init.scala @@ -0,0 +1,55 @@ +trait T { + val abs: String + protected val protabs: String + val pub = "public" + protected val prot = "protected" + private val privvy = "private" + private[this] val privateThis = "private[this]" + // TODO: + // final val const = "const" + + trait Nested { println(abs + privateThis) } + + object NO { + println(abs) + println(pub) + println(prot) + println(protabs) + println(privvy) + println(privateThis) + } + + trait NT { + println(abs) + println(pub) + println(prot) + println(protabs) + println(privvy) + println(privateThis) + } + + class NC { + println(abs) + println(pub) + println(prot) + println(protabs) + println(privvy) + println(privateThis) + } +} + +class C extends AnyRef with T { + println("x") + val abs = "abstract" + println("y") + val protabs = "abstract protected" + final val const = "const" + println("z") +} + +object Test extends C { + def main(args: Array[String]): Unit = { + NO + new NT{} + new NC +}} \ No newline at end of file diff --git a/test/files/run/trait_fields_repl.check b/test/files/run/trait_fields_repl.check new file mode 100644 index 000000000000..d03a565c7b3e --- /dev/null +++ b/test/files/run/trait_fields_repl.check @@ -0,0 +1,11 @@ + +scala> trait B { val y = "a" } +defined trait B + +scala> trait T extends B { val x: y.type = y } +defined trait T + +scala> println((new T{}).x) +a + +scala> :quit diff --git a/test/files/run/trait_fields_repl.scala b/test/files/run/trait_fields_repl.scala new file mode 100644 index 000000000000..311477b7d2f1 --- /dev/null +++ b/test/files/run/trait_fields_repl.scala @@ -0,0 +1,10 @@ +// TODO: fix AME when this runs in REPL +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ +trait B { val y = "a" } +trait T extends B { val x: y.type = y } +println((new T{}).x) +""" +} diff --git a/test/files/run/trait_fields_three_layer_overrides.check b/test/files/run/trait_fields_three_layer_overrides.check new file mode 100644 index 000000000000..8bb45803c512 --- /dev/null +++ b/test/files/run/trait_fields_three_layer_overrides.check @@ -0,0 +1,2 @@ +the real universe.TypeTag +1 diff --git a/test/files/run/trait_fields_three_layer_overrides.scala b/test/files/run/trait_fields_three_layer_overrides.scala new file mode 100644 index 000000000000..9d7aa94341e3 --- /dev/null +++ b/test/files/run/trait_fields_three_layer_overrides.scala @@ -0,0 +1,25 @@ +// interesting hierarchies/overrides distilled from reflect/compiler + +trait Aliases { + val TypeTag = "universe.TypeTag" +} +trait AliasesOverrides extends Aliases { // or self: Aliases => + override val TypeTag = "the real universe.TypeTag" +} +class Context extends Aliases with AliasesOverrides + + + +trait SymbolTable { + def currentRunId: Int = -1 +} +trait ReflectSetup extends SymbolTable { + override val currentRunId = 1 +} +class G extends SymbolTable with ReflectSetup + + +object Test extends App { + println((new Context).TypeTag) + println((new G).currentRunId) +} \ No newline at end of file diff --git a/test/files/run/trait_fields_volatile.scala b/test/files/run/trait_fields_volatile.scala new file mode 100644 index 000000000000..eedb6de1c26c --- /dev/null +++ b/test/files/run/trait_fields_volatile.scala @@ -0,0 +1,13 @@ +// bytecode should reflect volatile annotation +trait VolatileAbort { + @volatile private var abortflag = false +} +class DefaultSignalling extends VolatileAbort + +object Test { + def main(args: Array[String]): Unit = { + val field = classOf[DefaultSignalling].getDeclaredFields.find(_.getName.contains("abortflag")).get + assert(java.lang.reflect.Modifier.isVolatile(field.getModifiers), field) + } + +} diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index d581ca8cf432..6f9b711b348c 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -151,7 +151,7 @@ class BasePrintTest { |else | ((a.toString): String)""", typedCode=sm""" - |val a = 1; + |val a: Int = 1; |if (PrintersContext.this.a.>(1)) | ((PrintersContext.this.a): scala.Int) |else @@ -864,7 +864,7 @@ class TraitPrintTest { @Test def testTraitWithSelf2 = assertPrintedCode(sm""" |trait X { self: scala.Cloneable with scala.Serializable => - | val x: scala.Int = 1 + | val x: Int = 1 |}""") @Test def testTraitTypeParams = assertPrintedCode("trait X[A, B]") @@ -903,7 +903,7 @@ class TraitPrintTest { | type Foo; | type XString = scala.Predef.String |} with scala.Serializable { - | val z = 7 + | val z: Int = 7 |}""") @Test def testTraitWithSingletonTypeTree = assertPrintedCode(sm""" diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index 54f4c805c1d1..d24b4e518b57 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -104,6 +104,7 @@ class ScalaInlineInfoTest extends BytecodeTesting { ("x4()I", MethodInlineInfo(false,false,false)), ("x4$(LT;)I", MethodInlineInfo(true ,false,false)), ("x5()I", MethodInlineInfo(true, false,false)), + ("x5$(LT;)I", MethodInlineInfo(true ,false,false)), ("L$lzycompute$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true, false,false)), ("L$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true ,false,false)), ("nest$1()I", MethodInlineInfo(true, false,false)), @@ -127,7 +128,7 @@ class ScalaInlineInfoTest extends BytecodeTesting { "x3_$eq(I)V" -> MethodInlineInfo(false,false,false), "x4$lzycompute()I" -> MethodInlineInfo(true ,false,false), "x4()I" -> MethodInlineInfo(false,false,false), - "x5()I" -> MethodInlineInfo(true ,false,false), +// "x5()I" -> MethodInlineInfo(true ,false,false), -- there is no x5 in the class as it's implemented fully in the interface "T$$super$toString()Ljava/lang/String;" -> MethodInlineInfo(true ,false,false), "()V" -> MethodInlineInfo(false,false,false)), None) diff --git a/test/pending/run/origins.check b/test/pending/run/origins.check index b12cb6e38fbc..af94b549d302 100644 --- a/test/pending/run/origins.check +++ b/test/pending/run/origins.check @@ -1,6 +1,4 @@ ->> Origins tag 'boop' logged 65 calls from 3 distinguished sources. +>> Origins tag 'boop' logged 65 calls from 1 distinguished sources. - 50 Test$$anonfun$f3$1.apply(origins.scala:16) - 10 Test$$anonfun$f2$1.apply(origins.scala:15) - 5 Test$$anonfun$f1$1.apply(origins.scala:14) + 65 null From 8fa63b7538e169a4b72b95d9dd8fa7a8939279d9 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 30 May 2016 20:30:16 +0200 Subject: [PATCH 0208/2793] Test EnclosingMethod attribute for classes in lazy vals Local and anonymous classes need to have an EnclosingMethod attribute denoting the enclosing class and method. In fact, the enclosing class must always be defined for local and anonymous classes, but the enclosing method may be null (for local / anonymous classes defined in field initializers or local blocks within a class body). The new test here ensures that classes declared within a lazy val initializer block indeed have the enclosing method set to null. --- .../jvm/innerClassAttribute/Classes_1.scala | 37 +++++++++++++++++++ test/files/jvm/innerClassAttribute/Test.scala | 17 ++++++++- 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/test/files/jvm/innerClassAttribute/Classes_1.scala b/test/files/jvm/innerClassAttribute/Classes_1.scala index bffc495b4f72..27f01a880a7f 100644 --- a/test/files/jvm/innerClassAttribute/Classes_1.scala +++ b/test/files/jvm/innerClassAttribute/Classes_1.scala @@ -303,3 +303,40 @@ object NestedInValueClass { def f = { class C; new C } // outer class A$, outer method f } } + +object LocalAndAnonymousInLazyInitializer { + abstract class A + class C { + lazy val a: A = new A { } + lazy val b: A = { + class AA extends A + new AA + } + lazy val c: A = { + object AA extends A + AA + } + } + object O { + lazy val a: A = new A { } + lazy val b: A = { + class AA extends A + new AA + } + lazy val c: A = { + object AA extends A + AA + } + } + trait T { + lazy val a: A = new A { } + lazy val b: A = { + class AA extends A + new AA + } + lazy val c: A = { + object AA extends A + AA + } + } +} \ No newline at end of file diff --git a/test/files/jvm/innerClassAttribute/Test.scala b/test/files/jvm/innerClassAttribute/Test.scala index 1b78773d4222..5c666a615fa1 100644 --- a/test/files/jvm/innerClassAttribute/Test.scala +++ b/test/files/jvm/innerClassAttribute/Test.scala @@ -416,7 +416,7 @@ object Test extends BytecodeTest { def testAnonymousClassesMayBeNestedInSpecialized() { assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$$anon$17", "AnonymousClassesMayBeNestedInSpecialized$C", "foo", "(Ljava/lang/Object;)LAnonymousClassesMayBeNestedInSpecialized$A;") - assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp$$anon$18", "AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp", "foo$mcI$sp", "(I)LAnonymousClassesMayBeNestedInSpecialized$A;") + assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp$$anon$21", "AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp", "foo$mcI$sp", "(I)LAnonymousClassesMayBeNestedInSpecialized$A;") } def testNestedInValueClass() { @@ -444,6 +444,20 @@ object Test extends BytecodeTest { testInner("NestedInValueClass$A$", a, am, b, c, methodHandlesLookup) } + def testLocalAndAnonymousInLazyInitializer(): Unit = { + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$$anon$18", "LocalAndAnonymousInLazyInitializer$C", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$4", "LocalAndAnonymousInLazyInitializer$C", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$5$", "LocalAndAnonymousInLazyInitializer$C", null, null) + + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$$anon$19", "LocalAndAnonymousInLazyInitializer$O$", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$6", "LocalAndAnonymousInLazyInitializer$O$", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$7$", "LocalAndAnonymousInLazyInitializer$O$", null, null) + + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$$anon$20", "LocalAndAnonymousInLazyInitializer$T", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$8", "LocalAndAnonymousInLazyInitializer$T", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$9$", "LocalAndAnonymousInLazyInitializer$T", null, null) + } + def show(): Unit = { testA1() testA2() @@ -473,5 +487,6 @@ object Test extends BytecodeTest { testSpecializedClassesTopLevel() testAnonymousClassesMayBeNestedInSpecialized() testNestedInValueClass() + testLocalAndAnonymousInLazyInitializer() } } From fcfe7050a50d2c71094a9ac212330be87c4d0781 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 26 May 2016 20:09:28 -0700 Subject: [PATCH 0209/2793] Fields phase synthesizes modules For now, keep the info transform in refchecks. Ultimately, refchecks should only check, not transform trees/infos. Fixes https://github.com/scala/scala-dev/issues/126: the accessor for a module in a trait is correctly marked non-final (it's deferred). --- .../scala/tools/nsc/transform/Fields.scala | 308 ++++++++++++------ .../scala/tools/nsc/transform/Mixin.scala | 2 +- .../tools/nsc/typechecker/RefChecks.scala | 78 +---- .../scala/reflect/internal/Symbols.scala | 13 +- .../scala/reflect/internal/Variances.scala | 4 +- test/files/neg/t0764.check | 2 +- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 2 +- 7 files changed, 221 insertions(+), 188 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 0dd7b1fee025..f5f0b229e430 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -14,18 +14,24 @@ import symtab.Flags._ * * For traits: * - * - Namers translates a definition `val x = rhs` into a getter `def x = rhs` -- no underlying field is created. - * - This phase synthesizes accessors and fields for any vals mixed into a non-trait class. - * - Constructors will move the rhs to an assignment in the template body. - * and those statements then move to the template into the constructor, - * which means it will initialize the fields defined in this template (and execute the corresponding side effects). - * We need to maintain the connection between getter and rhs until after specialization so that it can duplicate vals. + * - Namers translates a definition `val x = rhs` into a getter `def x = rhs` -- no underlying field is created. + * - This phase synthesizes accessors and fields for any vals mixed into a non-trait class. + * - Constructors will move the rhs to an assignment in the template body. + * Those statements then move to the template into the constructor, + * which means it will initialize the fields defined in this template (and execute the corresponding side effects). + * We need to maintain the connection between getter and rhs until after specialization so that it can duplicate vals. + * - A ModuleDef is desugared to a ClassDef, an accessor (which reuses the module's term symbol) + * and a module var (unless the module is static and does not implement a member of a supertype, or we're in a trait). + * For subclasses of traits that define modules, a module var is mixed in, as well as the required module accessors. * + * Runs after uncurry to deal with classes that implement SAM traits with ValDefs. * Runs before erasure (to get bridges), and thus before lambdalift/flatten, so that nested functions/definitions must be considered. + * * We run after uncurry because it can introduce subclasses of traits with fields (SAMs with vals). * Lambdalift also introduces new fields (paramaccessors for captured vals), but runs too late in the pipeline * (mixins still synthesizes implementations for accessors that need to be mixed into subclasses of local traits that capture). * + * * In the future, would like to get closer to dotty, which lifts a val's RHS (a similar thing is done for template-level statements) * to a method `$_initialize_$1$x` instead of a block, which is used in the constructor to initialize the val. * This makes for a nice unification of strict and lazy vals, in that the RHS is lifted to a method for both, @@ -35,8 +41,8 @@ import symtab.Flags._ * if we encode the name (and place in initialisation order) of the field * in the name of its initializing method, to allow separate compilation. * (The name mangling must include ordering, and thus complicate incremental compilation: - * ideally, we'd avoid renumbering unchanged methods, but that would result in - * different bytecode between clean recompiles and incremental ones). + * ideally, we'd avoid renumbering unchanged methods, but that would result in + * different bytecode between clean recompiles and incremental ones). * * In the even longer term (Scala 3?), I agree with @DarkDimius that it would make sense * to hide the difference between strict and lazy vals. All vals are lazy, @@ -68,12 +74,14 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // TODO: reuse MIXEDIN for NEEDS_TREES? override def phaseNewFlags: Long = NEEDS_TREES | OVERRIDDEN_TRAIT_SETTER + // informs the tree traversal of the shape of the tree to emit + // (it's an *overridden* trait setter) private final val OVERRIDDEN_TRAIT_SETTER = TRANS_FLAG final val TRAIT_SETTER_FLAGS = NEEDS_TREES | DEFERRED | ProtectedLocal private def accessorImplementedInSubclass(accessor: Symbol) = - (accessor hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) && (accessor hasFlag (ACCESSOR)) + (accessor hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) && (accessor hasFlag (ACCESSOR | MODULE)) private def concreteOrSynthImpl(sym: Symbol): Boolean = !(sym hasFlag DEFERRED) || (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) @@ -98,7 +106,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor ) - def checkAndClearOverridden(setter: Symbol) = checkAndClear(OVERRIDDEN_TRAIT_SETTER)(setter) + def checkAndClearOverriddenTraitSetter(setter: Symbol) = checkAndClear(OVERRIDDEN_TRAIT_SETTER)(setter) def checkAndClearNeedsTrees(setter: Symbol) = checkAndClear(NEEDS_TREES)(setter) def checkAndClear(flag: Long)(sym: Symbol) = sym.hasFlag(flag) match { @@ -162,6 +170,25 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor sym setAnnotations (sym.annotations filter AnnotationInfo.mkFilter(GetterTargetClass, defaultRetention = false)) } + + // can't use the referenced field since it already tracks the module's moduleClass + private[this] val moduleVarOf = perRunCaches.newMap[Symbol, Symbol] + + private def newModuleVarSymbol(site: Symbol, module: Symbol, tp: Type, extraFlags: Long): TermSymbol = { +// println(s"new module var in $site for $module of type $tp") + val moduleVar = site.newVariable(nme.moduleVarName(module.name.toTermName), module.pos.focus, MODULEVAR | extraFlags) setInfo tp addAnnotation VolatileAttr + moduleVarOf(module) = moduleVar + + moduleVar + } + + private def moduleInit(module: Symbol) = { +// println(s"moduleInit for $module in ${module.ownerChain} --> ${moduleVarOf.get(module)}") + val moduleVar = moduleVarOf(module) + gen.mkAssignAndReturn(moduleVar, gen.newModule(module, moduleVar.info)) + } + + private object synthFieldsAndAccessors extends TypeMap { private def newTraitSetter(getter: Symbol, clazz: Symbol) = { // Add setter for an immutable, memoizing getter @@ -178,6 +205,32 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor setter } + private def newModuleAccessor(module: Symbol, site: Symbol, moduleVar: Symbol) = { + val accessor = site.newMethod(module.name.toTermName, site.pos, STABLE | MODULE | NEEDS_TREES) + + moduleVarOf(accessor) = moduleVar + + // we're in the same prefix as module, so no need for site.thisType.memberType(module) + accessor setInfo MethodType(Nil, moduleVar.info) + accessor.setModuleClass(module.moduleClass) + + if (module.isPrivate) accessor.expandName(module.owner) + + accessor + } + + + // needed for the following scenario (T could be trait or class) + // trait T { def f: Object }; object O extends T { object f }. Need to generate method f in O. + // marking it as an ACCESSOR so that it will get to `getterBody` when synthesizing trees below + // it should not be considered a MODULE + def newMatchingModuleAccessor(clazz: Symbol, module: Symbol): MethodSymbol = { + val acc = clazz.newMethod(module.name.toTermName, module.pos, (module.flags & ~MODULE) | STABLE | NEEDS_TREES | ACCESSOR) + acc.referenced = module + acc setInfo MethodType(Nil, module.moduleClass.tpe) + } + + def apply(tp0: Type): Type = tp0 match { // TODO: make less destructive (name changes, decl additions, flag setting -- // none of this is actually undone when travelling back in time using atPhase) @@ -214,6 +267,11 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor if (member hasFlag STABLE) // TODO: check isGetter? newDecls += newTraitSetter(member, clazz) } + } else if (member hasFlag MODULE) { + nonStaticModuleToMethod(member) + + member setFlag NEEDS_TREES + synthesizeImplInSubclasses(member) } } @@ -228,27 +286,48 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor case tp@ClassInfoType(parents, oldDecls, clazz) if !clazz.isPackageClass => val site = clazz.thisType - // TODO (1): improve logic below, which is used to avoid mixing in anything that would result in an error in refchecks - // (a reason to run after refchecks? we should run before pickler, though, I think, so that the synthesized stats are pickled) - - val membersNeedingSynthesis = clazz.mixinClasses.flatMap { mixin => - // afterOwnPhase, so traits receive trait setters for vals - afterOwnPhase {mixin.info}.decls.toList.filter(accessorImplementedInSubclass) - } - -// println(s"mixing in for $clazz: $membersNeedingSynthesis from ${clazz.mixinClasses}") // TODO: setter conflicts? def accessorConflictsExistingVal(accessor: Symbol): Boolean = { val existingGetter = oldDecls.lookup(accessor.name.getterName) - // println(s"$existingGetter from $accessor to ${accessor.name.getterName}") +// println(s"$existingGetter from $accessor to ${accessor.name.getterName}") val tp = fieldTypeOfAccessorIn(accessor, site) (existingGetter ne NoSymbol) && (tp matches (site memberInfo existingGetter).resultType) // !existingGetter.isDeferred && -- see (3) } + def newModuleVar(member: Symbol): TermSymbol = + newModuleVarSymbol(clazz, member, site.memberType(member).resultType, PrivateLocal | SYNTHETIC | NEEDS_TREES) + + // a module does not need treatment here if it's static, unless it has a matching member in a superclass + // a non-static method needs a module var + val modulesNeedingExpansion = + oldDecls.toList.filter(m => m.isModule && (!m.isStatic || m.isOverridingSymbol)) + + // expand module def in class/object (if they need it -- see modulesNeedingExpansion above) + val expandedModules = + modulesNeedingExpansion map { module => + // expanding module def (top-level or nested in static module) + if (module.isStatic) { // implies m.isOverridingSymbol as per above filter + // Need a module accessor, to implement/override a matching member in a superclass. + // Never a need for a module var if the module is static. + newMatchingModuleAccessor(clazz, module) + } else { + nonStaticModuleToMethod(module) + // must reuse symbol instead of creating an accessor + module setFlag NEEDS_TREES + newModuleVar(module) + } + } + +// println(s"expanded modules for $clazz: $expandedModules") + + // afterOwnPhase, so traits receive trait setters for vals (needs to be at finest grain to avoid looping) + val synthInSubclass = + clazz.mixinClasses.flatMap(mixin => afterOwnPhase{mixin.info}.decls.toList.filter(accessorImplementedInSubclass)) + // mixin field accessors -- // invariant: (accessorsMaybeNeedingImpl, mixedInAccessorAndFields).zipped.forall(case (acc, clone :: _) => `clone` is clone of `acc` case _ => true) - val synthAccessorAndFields = membersNeedingSynthesis map { member => + val mixedInAccessorAndFields = synthInSubclass.map{ member => def cloneAccessor() = { val clonedAccessor = (member cloneSymbol clazz) setPos clazz.pos setMixedinAccessorFlags(member, clonedAccessor) @@ -258,13 +337,17 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // if we don't cloneInfo, method argument symbols are shared between trait and subclasses --> lambalift proxy crash // TODO: use derive symbol variant? - // println(s"cloning accessor $accessor to $clazz / $clonedInfo -> $relativeInfo") +// println(s"cloning accessor $member to $clazz") clonedAccessor setInfo ((clazz.thisType memberType member) cloneInfo clonedAccessor) // accessor.info.cloneInfo(clonedAccessor).asSeenFrom(clazz.thisType, accessor.owner) } + if (member hasFlag MODULE) { + val moduleVar = newModuleVar(member) + List(moduleVar, newModuleAccessor(member, clazz, moduleVar)) + } // when considering whether to mix in the trait setter, forget about conflicts -- they will be reported for the getter // a trait setter for an overridden val will receive a unit body in the tree transform - if (nme.isTraitSetterName(member.name)) { + else if (nme.isTraitSetterName(member.name)) { val getter = member.getterIn(member.owner) val clone = cloneAccessor() @@ -290,13 +373,13 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } else List(cloneAccessor()) } - // println(s"new decls for $clazz: $mixedInAccessorAndFields") +// println(s"mixedInAccessorAndFields for $clazz: $mixedInAccessorAndFields") // omit fields that are not memoized, retain all other members def omittableField(sym: Symbol) = sym.isValue && !sym.isMethod && !fieldMemoizationIn(sym, clazz).stored val newDecls = - if (synthAccessorAndFields.isEmpty) oldDecls.filterNot(omittableField) + if (expandedModules.isEmpty && mixedInAccessorAndFields.isEmpty) oldDecls.filterNot(omittableField) else { // must not alter `decls` directly val newDecls = newScope @@ -304,12 +387,13 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val enterAll = (_: List[Symbol]) foreach enter oldDecls foreach { d => if (!omittableField(d)) enter(d) } - synthAccessorAndFields foreach enterAll + expandedModules foreach enter + mixedInAccessorAndFields foreach enterAll newDecls } - // println(s"new decls: $newDecls") +// println(s"new decls for $clazz: $expandedModules ++ $mixedInAccessorAndFields") if (newDecls eq oldDecls) tp else ClassInfoType(parents, newDecls, clazz) @@ -319,6 +403,11 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } + // done by uncurry's info transformer + // instead of forcing every member's info to run said transformer, duplicate the flag update logic... + def nonStaticModuleToMethod(module: Symbol): Unit = { + if (!module.isStatic) module setFlag METHOD | STABLE + } class FieldsTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { def mkTypedUnit(pos: Position) = localTyper.typedPos(pos)(CODE.UNIT) @@ -330,57 +419,64 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // synth trees for accessors/fields and trait setters when they are mixed into a class - def fieldsAndAccessors(exprOwner: Symbol): List[ValOrDefDef] = { - if (exprOwner.isLocalDummy) { - val clazz = exprOwner.owner - def fieldAccess(accessor: Symbol): Option[Tree] = { - val fieldName = accessor.localName - val field = clazz.info.decl(fieldName) - // The `None` result denotes an error, but we defer to refchecks to report it. - // This is the result of overriding a val with a def, so that no field is found in the subclass. - if (field.exists) Some(Select(This(clazz), field)) - else None - } + def fieldsAndAccessors(clazz: Symbol): List[ValOrDefDef] = { + def fieldAccess(accessor: Symbol): Option[Tree] = { + val fieldName = accessor.localName + val field = clazz.info.decl(fieldName) + // The `None` result denotes an error, but we defer to refchecks to report it. + // This is the result of overriding a val with a def, so that no field is found in the subclass. + if (field.exists) Some(Select(This(clazz), field)) + else None + } - def getterBody(getter: Symbol): Option[Tree] = { + def getterBody(getter: Symbol): Option[Tree] = { + // accessor created by newMatchingModuleAccessor for a static module that does need an accessor + // (because there's a matching member in a super class) + if (getter.asTerm.referenced.isModule) { + Some(gen.mkAttributedRef(clazz.thisType, getter.asTerm.referenced)) + } else { val fieldMemoization = fieldMemoizationIn(getter, clazz) if (fieldMemoization.pureConstant) Some(gen.mkAttributedQualifier(fieldMemoization.tp)) // TODO: drop when we no longer care about producing identical bytecode else fieldAccess(getter) } + } - // println(s"accessorsAndFieldsNeedingTrees for $templateSym: $accessorsAndFieldsNeedingTrees") - def setterBody(setter: Symbol): Option[Tree] = { - // trait setter in trait - if (clazz.isTrait) Some(EmptyTree) - // trait setter for overridden val in class - else if (checkAndClearOverridden(setter)) Some(mkTypedUnit(setter.pos)) - // trait val/var setter mixed into class - else fieldAccess(setter) map (fieldSel => Assign(fieldSel, Ident(setter.firstParam))) - } + // println(s"accessorsAndFieldsNeedingTrees for $templateSym: $accessorsAndFieldsNeedingTrees") + def setterBody(setter: Symbol): Option[Tree] = { + // trait setter in trait + if (clazz.isTrait) Some(EmptyTree) + // trait setter for overridden val in class + else if (checkAndClearOverriddenTraitSetter(setter)) Some(mkTypedUnit(setter.pos)) + // trait val/var setter mixed into class + else fieldAccess(setter) map (fieldSel => Assign(fieldSel, Ident(setter.firstParam))) + } + def moduleAccessorBody(module: Symbol): Some[Tree] = Some( + // added during synthFieldsAndAccessors using newModuleAccessor + // a module defined in a trait by definition can't be static (it's a member of the trait and thus gets a new instance for every outer instance) + if (clazz.isTrait) EmptyTree + // symbol created by newModuleAccessor for a (non-trait) class + else moduleInit(module) + ) - clazz.info.decls.toList.filter(checkAndClearNeedsTrees) flatMap { - case setter if setter.isSetter => setterBody(setter) map mkAccessor(setter) - case getter if getter.isAccessor => getterBody(getter) map mkAccessor(getter) - case field if !(field hasFlag METHOD) => Some(mkField(field)) // vals/vars and module vars (cannot have flags PACKAGE | JAVA since those never receive NEEDS_TREES) - case _ => None - } - } else { -// println(s"$exprOwner : ${exprOwner.info} --> ${exprOwner.info.decls}") - Nil + clazz.info.decls.toList.filter(checkAndClearNeedsTrees) flatMap { + case module if module hasAllFlags (MODULE | METHOD) => moduleAccessorBody(module) map mkAccessor(module) + case setter if setter.isSetter => setterBody(setter) map mkAccessor(setter) + case getter if getter.hasFlag(ACCESSOR) => getterBody(getter) map mkAccessor(getter) + case field if !(field hasFlag METHOD) => Some(mkField(field)) // vals/vars and module vars (cannot have flags PACKAGE | JAVA since those never receive NEEDS_TREES) + case _ => None } } def rhsAtOwner(stat: ValOrDefDef, newOwner: Symbol): Tree = atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol -> newOwner))) - private def transformStat(exprOwner: Symbol)(stat: Tree): List[Tree] = { + + private def Thicket(trees: List[Tree]) = Block(trees, EmptyTree) + override def transform(stat: Tree): Tree = { val clazz = currentOwner val statSym = stat.symbol - // println(s"transformStat $statSym in ${exprOwner.ownerChain}") - // currentRun.trackerFactory.snapshot() - /* For traits, the getter has the val's RHS, which is already constant-folded. There is no valdef. For classes, we still have the classic scheme of private[this] valdef + getter & setter that read/assign to the field. @@ -396,54 +492,58 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor */ stat match { // TODO: consolidate with ValDef case - case stat@DefDef(_, _, _, _, _, rhs) if (statSym hasFlag ACCESSOR) && !excludedAccessorOrFieldByFlags(statSym) => - /* TODO: defer replacing ConstantTyped tree by the corresponding constant until erasure - (until then, trees should not be constant-folded -- only their type tracks the resulting constant) - TODO: also remove ACCESSOR flag since there won't be an underlying field to access? - */ - def statInlinedConstantRhs = - if (clazz.isTrait) stat // we've already done this for traits.. the asymmetry will be solved by the above todo - else deriveDefDef(stat)(_ => gen.mkAttributedQualifier(rhs.tpe)) - - if (rhs ne EmptyTree) { - val fieldMemoization = fieldMemoizationIn(statSym, clazz) - - // if we decide to have non-stored fields with initialization effects, the stat's RHS should be replaced by unit - // if (!fieldMemoization.stored) deriveUnitDef(stat) else stat - - if (fieldMemoization.pureConstant) statInlinedConstantRhs :: Nil - else super.transform(stat) :: Nil - } else { - stat :: Nil + // TODO: defer replacing ConstantTyped tree by the corresponding constant until erasure + // (until then, trees should not be constant-folded -- only their type tracks the resulting constant) + // also remove ACCESSOR flag since there won't be an underlying field to access? + case DefDef(_, _, _, _, _, rhs) if (statSym hasFlag ACCESSOR) + && (rhs ne EmptyTree) && !excludedAccessorOrFieldByFlags(statSym) + && !clazz.isTrait // we've already done this for traits.. the asymmetry will be solved by the above todo + && fieldMemoizationIn(statSym, clazz).pureConstant => + deriveDefDef(stat)(_ => gen.mkAttributedQualifier(rhs.tpe)) // TODO: recurse? + + // drop the val for (a) constant (pure & not-stored) and (b) not-stored (but still effectful) fields + case ValDef(mods, _, _, rhs) if (rhs ne EmptyTree) && !excludedAccessorOrFieldByFlags(statSym) + && fieldMemoizationIn(statSym, clazz).pureConstant => + EmptyTree + + case ModuleDef(_, _, impl) => + // ??? The typer doesn't take kindly to seeing this ClassDef; we have to set NoType so it will be ignored. + val cd = super.transform(ClassDef(statSym.moduleClass, impl) setType NoType) + if (clazz.isClass) cd + else { // local module -- symbols cannot be generated by info transformer, so do it all here + val moduleVar = newModuleVarSymbol(currentOwner, statSym, statSym.info.resultType, 0) + Thicket(cd :: mkField(moduleVar) :: mkAccessor(statSym)(moduleInit(statSym)) :: Nil) } - case stat@ValDef(mods, _, _, rhs) if !excludedAccessorOrFieldByFlags(statSym) => - if (rhs ne EmptyTree) { - val fieldMemoization = fieldMemoizationIn(statSym, clazz) - - // drop the val for (a) constant (pure & not-stored) and (b) not-stored (but still effectful) fields - if (fieldMemoization.pureConstant) Nil // (a) - else super.transform(stat) :: Nil // if (fieldMemoization.stored) - // else rhsAtOwner(transformStat, exprOwner) :: Nil // (b) -- not used currently - } else { - stat :: Nil - } + case tree => + super.transform(tree) - - case tree => List( - if (exprOwner != currentOwner && tree.isTerm) atOwner(exprOwner)(super.transform(tree)) - else super.transform(tree) - ) } } - // TODO flatMapConserve or something like it - // TODO use thicket encoding of multi-tree transformStat? - // if (!currentOwner.isClass || currentOwner.isPackageClass || currentOwner.isInterface) stats flatMap transformStat(exprOwner) // for the ModuleDef case, the only top-level case in that method - // else - override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = - afterOwnPhase { - fieldsAndAccessors(exprOwner) ++ (stats flatMap transformStat(exprOwner)) - } + def transformTermsAtExprOwner(exprOwner: Symbol)(stat: Tree) = + if (stat.isTerm) atOwner(exprOwner)(transform(stat)) + else transform(stat) + + override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = { + val addedStats = + if (exprOwner.isLocalDummy) afterOwnPhase { fieldsAndAccessors(exprOwner.owner) } + else Nil + + val newStats = + stats mapConserve (if (exprOwner != currentOwner) transformTermsAtExprOwner(exprOwner) else transform) + + addedStats ::: (if (newStats eq stats) stats else { + // check whether we need to flatten thickets and drop empty ones + if (newStats exists { case EmptyTree => true case Block(_, EmptyTree) => true case _ => false }) + newStats flatMap { + case EmptyTree => Nil + case Block(thicket, EmptyTree) => thicket + case stat => stat :: Nil + } + else newStats + }) + } + } } diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index d98daf0ffb31..0033736dbef5 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -319,7 +319,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } else if (mixinMember.hasAllFlags(METHOD | MODULE) && mixinMember.hasNoFlags(LIFTED | BRIDGE)) { // mixin objects: todo what happens with abstract objects? - addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos) + // addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos) } else if (mixinMember.hasFlag(ACCESSOR) && notDeferredOrLate(mixinMember) && (mixinMember hasFlag (LAZY | PARAMACCESSOR)) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0eae1ce41987..46ad4b35a17d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -93,8 +93,13 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans rtp1 <:< rtp2 case (NullaryMethodType(rtp1), MethodType(List(), rtp2)) => rtp1 <:< rtp2 - case (TypeRef(_, sym, _), _) if sym.isModuleClass => + + // all this module business would be so much simpler if we moduled^w modelled a module as a class and an accessor, like we do for fields + case (TypeRef(_, sym, _), _) if sym.isModuleClass => overridesTypeInPrefix(NullaryMethodType(tp1), tp2, prefix, isModuleOverride) + case (_, TypeRef(_, sym, _)) if sym.isModuleClass => + overridesTypeInPrefix(tp1, NullaryMethodType(tp2), prefix, isModuleOverride) + case _ => def classBoundAsSeen(tp: Type) = tp.typeSymbol.classBound.asSeenFrom(prefix, tp.typeSymbol.owner) (tp1 <:< tp2) || isModuleOverride && ( @@ -1182,69 +1187,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans finally popLevel() } - /** Eliminate ModuleDefs. In all cases the ModuleDef (carrying a module symbol) is - * replaced with a ClassDef (carrying the corresponding module class symbol) with additional - * trees created as follows: - * - * 1) A statically reachable object (either top-level or nested only in objects) receives - * no additional trees. - * 2) An inner object which matches an existing member (e.g. implements an interface) - * receives an accessor DefDef to implement the interface. - * 3) An inner object otherwise receives a private ValDef which declares a module var - * (the field which holds the module class - it has a name like Foo$module) and an - * accessor for that field. The instance is created lazily, on first access. - */ - private def eliminateModuleDefs(moduleDef: Tree): List[Tree] = exitingRefchecks { - val ModuleDef(_, _, impl) = moduleDef - val module = moduleDef.symbol - val site = module.owner - val moduleName = module.name.toTermName - // The typer doesn't take kindly to seeing this ClassDef; we have to - // set NoType so it will be ignored. - val cdef = ClassDef(module.moduleClass, impl) setType NoType - - def matchingInnerObject() = { - val newFlags = (module.flags | STABLE) & ~MODULE - val newInfo = NullaryMethodType(module.moduleClass.tpe) - val accessor = site.newMethod(moduleName, module.pos, newFlags) setInfoAndEnter newInfo - - DefDef(accessor, Select(This(site), module)) :: Nil - } - val newTrees = cdef :: ( - if (module.isStatic) - // trait T { def f: Object }; object O extends T { object f }. Need to generate method f in O. - if (module.isOverridingSymbol) matchingInnerObject() else Nil - else - newInnerObject(site, module) - ) - transformTrees(newTrees map localTyper.typedPos(moduleDef.pos)) - } - def newInnerObject(site: Symbol, module: Symbol): List[Tree] = { - if (site.isTrait) - DefDef(module, EmptyTree) :: Nil - else { - val moduleVar = site newModuleVarSymbol module - // used for the mixin case: need a new symbol owned by the subclass for the accessor, rather than repurposing the module symbol - def mkAccessorSymbol = - site.newMethod(module.name.toTermName, site.pos, STABLE | MODULE | MIXEDIN) - .setInfo(moduleVar.tpe) - .andAlso(self => if (module.isPrivate) self.expandName(module.owner)) - - val accessor = if (module.owner == site) module else mkAccessorSymbol - val accessorDef = DefDef(accessor, gen.mkAssignAndReturn(moduleVar, gen.newModule(module, moduleVar.tpe)).changeOwner(moduleVar -> accessor)) - - ValDef(moduleVar) :: accessorDef :: Nil - } - } - def mixinModuleDefs(clazz: Symbol): List[Tree] = { - val res = for { - mixinClass <- clazz.mixinClasses.iterator - module <- mixinClass.info.decls.iterator.filter(_.isModule) - newMember <- newInnerObject(clazz, module) - } yield transform(localTyper.typedPos(clazz.pos)(newMember)) - res.toList - } def transformStat(tree: Tree, index: Int): List[Tree] = tree match { case t if treeInfo.isSelfConstrCall(t) => @@ -1255,7 +1198,6 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans debuglog("refsym = " + currentLevel.refsym) reporter.error(currentLevel.refpos, "forward reference not allowed from self constructor invocation") } - case ModuleDef(_, _, _) => eliminateModuleDefs(tree) case ValDef(_, _, _, _) => val tree1 = transform(tree) // important to do before forward reference check if (tree1.symbol.isLazy) tree1 :: Nil @@ -1702,13 +1644,12 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans checkOverloadedRestrictions(currentOwner, currentOwner) // SI-7870 default getters for constructors live in the companion module checkOverloadedRestrictions(currentOwner, currentOwner.companionModule) - val bridges = addVarargBridges(currentOwner) - val moduleDesugared = if (currentOwner.isTrait) Nil else mixinModuleDefs(currentOwner) + val bridges = addVarargBridges(currentOwner) // TODO: do this during uncurry? checkAllOverrides(currentOwner) checkAnyValSubclass(currentOwner) if (currentOwner.isDerivedValueClass) currentOwner.primaryConstructor makeNotPrivate NoSymbol // SI-6601, must be done *after* pickler! - if (bridges.nonEmpty || moduleDesugared.nonEmpty) deriveTemplate(tree)(_ ::: bridges ::: moduleDesugared) else tree + if (bridges.nonEmpty) deriveTemplate(tree)(_ ::: bridges) else tree case dc@TypeTreeWithDeferredRefCheck() => abort("adapt should have turned dc: TypeTreeWithDeferredRefCheck into tpt: TypeTree, with tpt.original == dc") case tpt@TypeTree() => @@ -1821,7 +1762,8 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans } result match { case ClassDef(_, _, _, _) - | TypeDef(_, _, _, _) => + | TypeDef(_, _, _, _) + | ModuleDef(_, _, _) => if (result.symbol.isLocalToBlock || result.symbol.isTopLevel) varianceValidator.traverse(result) case tt @ TypeTree() if tt.original != null => diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index af1cdafcdab7..1456022b1005 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -324,17 +324,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def newImport(pos: Position): TermSymbol = newTermSymbol(nme.IMPORT, pos) - def newModuleVarSymbol(accessor: Symbol): TermSymbol = { - val newName = nme.moduleVarName(accessor.name.toTermName) - val newFlags = MODULEVAR | ( if (this.isClass) PrivateLocal | SYNTHETIC else 0 ) - val newInfo = thisType.memberType(accessor).finalResultType - val mval = newVariable(newName, accessor.pos.focus, newFlags.toLong) addAnnotation VolatileAttr - - if (this.isClass) - mval setInfoAndEnter newInfo - else - mval setInfo newInfo - } final def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = newTermSymbol(name, pos, newFlags).asInstanceOf[ModuleSymbol] @@ -3262,7 +3251,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * returned, otherwise, `NoSymbol` is returned. */ protected final def companionModule0: Symbol = - flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModuleNotMethod && (sym isCoDefinedWith this)) + flatOwnerInfo.decl(name.toTermName).suchThat(sym => sym.isModule && (sym isCoDefinedWith this)) override def companionModule = companionModule0 override def companionSymbol = companionModule0 diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 69bade55f1a5..bc8a5de119cd 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -167,7 +167,9 @@ trait Variances { case ClassDef(_, _, _, _) | TypeDef(_, _, _, _) => validateVariance(sym) super.traverse(tree) - // ModuleDefs need not be considered because they have been eliminated already + case ModuleDef(_, _, _) => + validateVariance(sym.moduleClass) + super.traverse(tree) case ValDef(_, _, _, _) => validateVariance(sym) case DefDef(_, _, tparams, vparamss, _, _) => diff --git a/test/files/neg/t0764.check b/test/files/neg/t0764.check index 0c7cff1e1e65..830278e71565 100644 --- a/test/files/neg/t0764.check +++ b/test/files/neg/t0764.check @@ -1,5 +1,5 @@ t0764.scala:13: error: type mismatch; - found : Node{type T = _1.type} where val _1: Node{type T = NextType} + found : Node{type T = _2.type} where val _2: Node{type T = NextType} required: Node{type T = Main.this.AType} (which expands to) Node{type T = Node{type T = NextType}} new Main[AType]( (value: AType).prepend ) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index d24b4e518b57..9217183c7424 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -83,7 +83,7 @@ class ScalaInlineInfoTest extends BytecodeTesting { false, // final class None, // not a sam Map( - ("O()LT$O$;", MethodInlineInfo(true ,false,false)), // the accessor is abstract in bytecode, but still effectivelyFinal because there's no (late)DEFERRED flag, https://github.com/scala/scala-dev/issues/126 + ("O()LT$O$;", MethodInlineInfo(false,false,false)), ("T$$super$toString()Ljava/lang/String;", MethodInlineInfo(true ,false,false)), ("T$_setter_$x1_$eq(I)V", MethodInlineInfo(false,false,false)), ("f1()I", MethodInlineInfo(false,false,false)), From 1f6f7f8aa94c622665a35343de8108ea66a787b7 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 26 May 2016 20:11:26 -0700 Subject: [PATCH 0210/2793] Don't cache `MethodSymbol`'s `memberType`. Correct caching is impossible because `sym.tpeHK.asSeenFrom(pre, sym.owner)` may have different results even for reference-identical `sym.tpeHK` and `pre` (even in the same period). For example, `pre` could be a `ThisType`. For such a type, `tpThen eq tpNow` does not imply `tpThen` and `tpNow` mean the same thing, because `tpThen.typeSymbol.info` could have been different from what it is now, and the cache won't know simply by looking at `pre`. Somehow this distinction never caused trouble, but when starting to desugar module definitions during the fields phase, it causes several test failures. I tried keying the cache on the current period to no avail. --- .../scala/reflect/internal/Symbols.scala | 20 ----------------- .../scala/reflect/internal/Types.scala | 22 +++++++++---------- .../reflect/runtime/SynchronizedSymbols.scala | 7 +----- 3 files changed, 11 insertions(+), 38 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 1456022b1005..e4388561603b 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2906,11 +2906,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** A class for method symbols */ class MethodSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName) extends TermSymbol(initOwner, initPos, initName) with MethodSymbolApi { - private[this] var mtpePeriod = NoPeriod - private[this] var mtpePre: Type = _ - private[this] var mtpeResult: Type = _ - private[this] var mtpeInfo: Type = _ - override def isLabel = this hasFlag LABEL override def isVarargsMethod = this hasFlag VARARGS override def isLiftedMethod = this hasFlag LIFTED @@ -2928,21 +2923,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => // are a case accessor (you can also be a field.) override def isCaseAccessorMethod = isCaseAccessor - def typeAsMemberOf(pre: Type): Type = { - if (mtpePeriod == currentPeriod) { - if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult - } else if (isValid(mtpePeriod)) { - mtpePeriod = currentPeriod - if ((mtpePre eq pre) && (mtpeInfo eq info)) return mtpeResult - } - val res = pre.computeMemberType(this) - mtpePeriod = currentPeriod - mtpePre = pre - mtpeInfo = info - mtpeResult = res - res - } - override def isVarargs: Boolean = definitions.isVarArgsList(paramss.flatten) override def returnType: Type = { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 895bb60a081f..fb78aa5009c5 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -686,23 +686,21 @@ trait Types * }}} */ def memberInfo(sym: Symbol): Type = { - require(sym ne NoSymbol, this) +// assert(sym ne NoSymbol, this) sym.info.asSeenFrom(this, sym.owner) } /** The type of `sym`, seen as a member of this type. */ - def memberType(sym: Symbol): Type = sym match { - case meth: MethodSymbol => - meth.typeAsMemberOf(this) - case _ => - computeMemberType(sym) - } - - def computeMemberType(sym: Symbol): Type = sym.tpeHK match { //@M don't prematurely instantiate higher-kinded types, they will be instantiated by transform, typedTypeApply, etc. when really necessary - case OverloadedType(_, alts) => - OverloadedType(this, alts) + def memberType(sym: Symbol): Type = sym.tpeHK match { + case OverloadedType(_, alts) => OverloadedType(this, alts) case tp => - if (sym eq NoSymbol) NoType else tp.asSeenFrom(this, sym.owner) + // Correct caching is nearly impossible because `sym.tpeHK.asSeenFrom(pre, sym.owner)` + // may have different results even for reference-identical `sym.tpeHK` and `pre` (even in the same period). + // For example, `pre` could be a `ThisType`. For such a type, `tpThen eq tpNow` does not imply + // `tpThen` and `tpNow` mean the same thing, because `tpThen.typeSymbol.info` could have been different + // from what it is now, and the cache won't know simply by looking at `pre`. + if (sym eq NoSymbol) NoType + else tp.asSeenFrom(this, sym.owner) } /** Substitute types `to` for occurrences of references to diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 313ec89311c0..237afa082b19 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -199,12 +199,7 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb trait SynchronizedTermSymbol extends SynchronizedSymbol - trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol { - // we can keep this lock fine-grained, because it's just a cache over asSeenFrom, which makes deadlocks impossible - // unfortunately we cannot elide this lock, because the cache depends on `pre` - private lazy val typeAsMemberOfLock = new Object - override def typeAsMemberOf(pre: Type): Type = gilSynchronizedIfNotThreadsafe { typeAsMemberOfLock.synchronized { super.typeAsMemberOf(pre) } } - } + trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol trait SynchronizedModuleSymbol extends ModuleSymbol with SynchronizedTermSymbol From e26b4f49d80caa8f71a1986f604cca7f4714e3c3 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 26 May 2016 20:10:47 -0700 Subject: [PATCH 0211/2793] Uncurry's info transform: non-static module --> method We do this during uncurry so we can insert the necessary applications to the empty argument list. Fields is too late. Refchecks is no longer an info transform. --- src/compiler/scala/tools/nsc/Global.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 2 +- .../scala/tools/nsc/transform/UnCurry.scala | 7 +++-- .../tools/nsc/typechecker/RefChecks.scala | 26 ++----------------- .../scala/tools/reflect/ReflectGlobal.scala | 3 +-- .../scala/reflect/internal/Phase.scala | 4 ++- .../scala/reflect/internal/Types.scala | 6 ++--- .../internal/transform/RefChecks.scala | 14 ---------- .../internal/transform/Transforms.scala | 5 +--- .../reflect/internal/transform/UnCurry.scala | 7 ++++- test/files/run/t6240-universe-code-gen.scala | 2 +- 11 files changed, 24 insertions(+), 54 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c2d92ce7f975..af866e1a6fe0 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -456,7 +456,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } with Pickler // phaseName = "refchecks" - override object refChecks extends { + object refChecks extends { val global: Global.this.type = Global.this val runsAfter = List("pickler") val runsRightAfter = None diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 289ac0cc023c..dc62b40578a6 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -508,7 +508,7 @@ abstract class Erasure extends AddInterfaces // If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we // end up with two module symbols with the same name in the same scope, which is surprising // when implementing later phases. - if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | lateMETHOD | STABLE) + if (member.isModule) newFlags = (newFlags | METHOD) & ~(MODULE | STABLE) val bridge = other.cloneSymbolImpl(root, newFlags) setPos root.pos debuglog("generating bridge from %s (%s): %s to %s: %s".format( diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 374e8430d819..a337ab7359c7 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -510,8 +510,11 @@ abstract class UnCurry extends InfoTransform case MethodType(_, _) => tree case tp => tree setType MethodType(Nil, tp.resultType) } - if (tree.symbol.isMethod && !tree.tpe.isInstanceOf[PolyType]) - gen.mkApplyIfNeeded(removeNullary()) + val sym = tree.symbol + // our info transformer may not have run yet, so duplicate flag logic instead of forcing it to run + val isMethodExitingUncurry = (sym hasFlag METHOD) || (sym hasFlag MODULE) && !sym.isStatic + if (isMethodExitingUncurry && !tree.tpe.isInstanceOf[PolyType]) + gen.mkApplyIfNeeded(removeNullary()) // apply () if tree.tpe has zero-arg MethodType else if (tree.isType) TypeTree(tree.tpe) setPos tree.pos else diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 46ad4b35a17d..7021e12f1ad5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -14,7 +14,7 @@ import scala.tools.nsc.settings.ScalaVersion import scala.tools.nsc.settings.NoScalaVersion import symtab.Flags._ -import transform.InfoTransform +import transform.Transform /**

@@ -43,7 +43,7 @@ import transform.InfoTransform * * @todo Check whether we always check type parameter bounds. */ -abstract class RefChecks extends InfoTransform with scala.reflect.internal.transform.RefChecks { +abstract class RefChecks extends Transform { val global: Global // need to repeat here because otherwise last mixin defines global as // SymbolTable. If we had DOT this would not be an issue @@ -54,31 +54,9 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans /** the following two members override abstract members in Transform */ val phaseName: String = "refchecks" - override def phaseNewFlags: Long = lateMETHOD def newTransformer(unit: CompilationUnit): RefCheckTransformer = new RefCheckTransformer(unit) - override def changesBaseClasses = false - - override def transformInfo(sym: Symbol, tp: Type): Type = { - // !!! This is a sketchy way to do things. - // It would be better to replace the module symbol with a method symbol - // rather than creating this module/method hybrid which must be special - // cased all over the place. Look for the call sites which use(d) some - // variation of "isMethod && !isModule", which to an observer looks like - // a nonsensical condition. (It is now "isModuleNotMethod".) - if (sym.isModule && !sym.isStatic) { - sym setFlag lateMETHOD | STABLE - // Note that this as far as we can see it works equally well - // to set the METHOD flag here and dump lateMETHOD, but it does - // mean that under separate compilation the typer will see - // modules as methods (albeit stable ones with singleton types.) - // So for now lateMETHOD lives while we try to convince ourselves - // we can live without it or deliver that info some other way. - log(s"Stabilizing module method for ${sym.fullLocationString}") - } - super.transformInfo(sym, tp) - } val toJavaRepeatedParam = new SubstSymMap(RepeatedParamClass -> JavaRepeatedParamClass) val toScalaRepeatedParam = new SubstSymMap(JavaRepeatedParamClass -> RepeatedParamClass) diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index e30d1ed7cd94..b80524df2b37 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -30,8 +30,7 @@ class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val override def transformedType(sym: Symbol) = postErasure.transformInfo(sym, erasure.transformInfo(sym, - uncurry.transformInfo(sym, - refChecks.transformInfo(sym, sym.info)))) + uncurry.transformInfo(sym, sym.info))) override def isCompilerUniverse = true diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala index f56c41d71c3b..eb193adbf2b9 100644 --- a/src/reflect/scala/reflect/internal/Phase.scala +++ b/src/reflect/scala/reflect/internal/Phase.scala @@ -47,7 +47,9 @@ abstract class Phase(val prev: Phase) { final val specialized: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "specialize" || prev.specialized) final val refChecked: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "refchecks" || prev.refChecked) - // are we past the fields phase, so that we should allow writing to vals (as part of type checking trait setters) + // are we past the fields phase, so that: + // - we should allow writing to vals (as part of type checking trait setters) + // - modules have module accessors final val assignsFields: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "fields" || prev.assignsFields) /** This is used only in unsafeTypeParams, and at this writing is diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index fb78aa5009c5..7dda80537858 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3469,10 +3469,10 @@ trait Types if (!sym.isOverridableMember || sym.owner == pre.typeSymbol) sym else pre.nonPrivateMember(sym.name).suchThat { sym => // SI-7928 `isModuleNotMethod` is here to avoid crashing with spuriously "overloaded" module accessor and module symbols. - // These appear after refchecks eliminates ModuleDefs that implement an interface. + // These appear after the fields phase eliminates ModuleDefs that implement an interface. // Here, we exclude the module symbol, which allows us to bind to the accessor. - // SI-8054 We must only do this after refchecks, otherwise we exclude the module symbol which does not yet have an accessor! - val isModuleWithAccessor = phase.refChecked && sym.isModuleNotMethod + // SI-8054 We must only do this after fields, otherwise we exclude the module symbol which does not yet have an accessor! + val isModuleWithAccessor = phase.assignsFields && sym.isModuleNotMethod sym.isType || (!isModuleWithAccessor && sym.isStable && !sym.hasVolatileType) } orElse sym } diff --git a/src/reflect/scala/reflect/internal/transform/RefChecks.scala b/src/reflect/scala/reflect/internal/transform/RefChecks.scala index 4ca114e78140..e69de29bb2d1 100644 --- a/src/reflect/scala/reflect/internal/transform/RefChecks.scala +++ b/src/reflect/scala/reflect/internal/transform/RefChecks.scala @@ -1,14 +0,0 @@ -package scala -package reflect -package internal -package transform - -trait RefChecks { - - val global: SymbolTable - import global._ - - def transformInfo(sym: Symbol, tp: Type): Type = - if (sym.isModule && !sym.isStatic) NullaryMethodType(tp) - else tp -} diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala index 0d2f355aa5f5..de5bfbd39aa8 100644 --- a/src/reflect/scala/reflect/internal/transform/Transforms.scala +++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala @@ -23,12 +23,10 @@ trait Transforms { self: SymbolTable => } } - private val refChecksLazy = new Lazy(new { val global: Transforms.this.type = self } with RefChecks) private val uncurryLazy = new Lazy(new { val global: Transforms.this.type = self } with UnCurry) private val erasureLazy = new Lazy(new { val global: Transforms.this.type = self } with Erasure) private val postErasureLazy = new Lazy(new { val global: Transforms.this.type = self } with PostErasure) - def refChecks = refChecksLazy.force def uncurry = uncurryLazy.force def erasure = erasureLazy.force def postErasure = postErasureLazy.force @@ -36,8 +34,7 @@ trait Transforms { self: SymbolTable => def transformedType(sym: Symbol) = postErasure.transformInfo(sym, erasure.transformInfo(sym, - uncurry.transformInfo(sym, - refChecks.transformInfo(sym, sym.info)))) + uncurry.transformInfo(sym, sym.info))) def transformedType(tpe: Type) = postErasure.elimErasedValueType(erasure.scalaErasure(uncurry.uncurry(tpe))) diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index 85e3ac60e8d4..a50084f40d0d 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -83,5 +83,10 @@ trait UnCurry { * @MAT: starting with this phase, the info of every symbol will be normalized */ def transformInfo(sym: Symbol, tp: Type): Type = - if (sym.isType) uncurryType(tp) else uncurry(tp) + if (sym.isType) uncurryType(tp) + else if ((sym hasFlag MODULE) && !sym.isStatic) { // see Fields::nonStaticModuleToMethod + sym setFlag METHOD | STABLE + MethodType(Nil, uncurry(tp)) + } + else uncurry(tp) } diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala index 60e1f76b54e1..80b60bab7e3c 100644 --- a/test/files/run/t6240-universe-code-gen.scala +++ b/test/files/run/t6240-universe-code-gen.scala @@ -54,7 +54,7 @@ object Test extends App { | |${forceCode("this", JavaUniverseTpe)} |${forceCode("definitions", DefinitionsModule.info)} - |${forceCode("refChecks", typeOf[scala.reflect.internal.transform.RefChecks])} + | |${forceCode("uncurry", typeOf[scala.reflect.internal.transform.UnCurry])} |${forceCode("erasure", typeOf[scala.reflect.internal.transform.Erasure])} | } From 6858134fb01315c13df05fbef1b310443f3dac95 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 3 Jun 2016 10:45:27 -0700 Subject: [PATCH 0212/2793] Address lrytz's review feedback Remove obsolete hack for BeanSetter's RHS Use currentOwner.isClass instead of exprOwner.isLocalDummy Refactor: shortest branches first in if/else Fix comments from when the prototype ran before refchecks Also, store `isScala212` as a `val` in `Namer` since the `def` on `settings` parses the version each time... --- .../scala/tools/nsc/transform/Fields.scala | 16 ++++++++-------- .../tools/nsc/typechecker/MethodSynthesis.scala | 13 ++++++++++--- .../scala/tools/nsc/typechecker/Namers.scala | 11 +++++++++-- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index f5f0b229e430..105bf0410dc0 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -287,7 +287,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor case tp@ClassInfoType(parents, oldDecls, clazz) if !clazz.isPackageClass => val site = clazz.thisType - // TODO: setter conflicts? + // setter conflicts cannot arise independently from a getter conflict, since a setter without a getter does not a val definition make def accessorConflictsExistingVal(accessor: Symbol): Boolean = { val existingGetter = oldDecls.lookup(accessor.name.getterName) // println(s"$existingGetter from $accessor to ${accessor.name.getterName}") @@ -345,7 +345,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val moduleVar = newModuleVar(member) List(moduleVar, newModuleAccessor(member, clazz, moduleVar)) } - // when considering whether to mix in the trait setter, forget about conflicts -- they will be reported for the getter + // when considering whether to mix in the trait setter, forget about conflicts -- they are reported for the getter // a trait setter for an overridden val will receive a unit body in the tree transform else if (nme.isTraitSetterName(member.name)) { val getter = member.getterIn(member.owner) @@ -356,8 +356,8 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor List(clone) } - // avoid creating early errors in case of conflicts (wait until refchecks); - // also, skip overridden accessors contributed by supertraits (only act on the last overriding one) + // don't cause conflicts, skip overridden accessors contributed by supertraits (only act on the last overriding one) + // see pos/trait_fields_dependent_conflict.scala and neg/t1960.scala else if (accessorConflictsExistingVal(member) || isOverriddenAccessor(member, clazz)) Nil else if (member.isGetter && fieldMemoizationIn(member, clazz).stored) { // add field if needed @@ -370,7 +370,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor field setAnnotations (member.annotations filter AnnotationInfo.mkFilter(FieldTargetClass, defaultRetention = true)) List(cloneAccessor(), field) - } else List(cloneAccessor()) + } else List(cloneAccessor()) // no field needed (constant-typed getter has constant as its RHS) } // println(s"mixedInAccessorAndFields for $clazz: $mixedInAccessorAndFields") @@ -423,7 +423,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor def fieldAccess(accessor: Symbol): Option[Tree] = { val fieldName = accessor.localName val field = clazz.info.decl(fieldName) - // The `None` result denotes an error, but we defer to refchecks to report it. + // The `None` result denotes an error, but it's refchecks' job to report it (this fallback is for robustness). // This is the result of overriding a val with a def, so that no field is found in the subclass. if (field.exists) Some(Select(This(clazz), field)) else None @@ -527,8 +527,8 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor override def transformStats(stats: List[Tree], exprOwner: Symbol): List[Tree] = { val addedStats = - if (exprOwner.isLocalDummy) afterOwnPhase { fieldsAndAccessors(exprOwner.owner) } - else Nil + if (!currentOwner.isClass) Nil + else afterOwnPhase { fieldsAndAccessors(currentOwner) } val newStats = stats mapConserve (if (exprOwner != currentOwner) transformTermsAtExprOwner(exprOwner) else transform) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 408b457d5b77..0f79bb60ed34 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -536,6 +536,8 @@ trait MethodSynthesis { super.validate() } } + + // This trait is mixed into BooleanBeanGetter and BeanGetter by beanAccessorsFromNames, but not by beanAccessors trait NoSymbolBeanGetter extends AnyBeanGetter { // Derives a tree without attempting to use the original tree's symbol. override def derivedTree = { @@ -547,10 +549,15 @@ trait MethodSynthesis { } override def createAndEnterSymbol(): MethodSymbol = enterSyntheticSym(derivedTree).asInstanceOf[MethodSymbol] } - case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter { } - case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter { } + + // NoSymbolBeanGetter synthesizes the getter's RHS (which defers to the regular setter) + // (not sure why, but there is one use site of the BeanGetters where NoSymbolBeanGetter is not mixed in) + // TODO: clean this up... + case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter + case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter + + // the bean setter's RHS delegates to the setter case class BeanSetter(tree: ValDef) extends BeanAccessor("set") with DerivedSetter { - // TODO: document, motivate override protected def setterRhs = Apply(Ident(tree.name.setterName), List(Ident(setterParam))) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 784b43ab8431..98dca1089c45 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -61,6 +61,11 @@ trait Namers extends MethodSynthesis { private lazy val innerNamer = if (isTemplateContext(context)) createInnerNamer() else this + // Cached as a val because `settings.isScala212` parses the Scala version each time... + // Not in Namers because then we need to go to outer first to check this. + // I do think it's ok to check every time we create a Namer instance (so, not a lazy val). + private[this] val isScala212 = settings.isScala212 + def createNamer(tree: Tree): Namer = { val sym = tree match { case ModuleDef(_, _, _) => tree.symbol.moduleClass @@ -1380,7 +1385,9 @@ trait Namers extends MethodSynthesis { val pt = { val valOwner = owner.owner // there's no overriding outside of classes, and we didn't use to do this in 2.11, so provide opt-out - if (valOwner.isClass && settings.isScala212) { + + if (!isScala212 || !valOwner.isClass) WildcardType + else { // normalize to getter so that we correctly consider a val overriding a def // (a val's name ends in a " ", so can't compare to def) val overridingSym = if (isGetter) vdef.symbol else vdef.symbol.getterIn(valOwner) @@ -1391,7 +1398,7 @@ trait Namers extends MethodSynthesis { if (overridden == NoSymbol || overridden.isOverloaded) WildcardType else valOwner.thisType.memberType(overridden).resultType - } else WildcardType + } } def patchSymInfo(tp: Type): Unit = From 6f0bb49c17ea1a46283777e39ed5ce016aa048a5 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 4 May 2016 18:22:34 -0700 Subject: [PATCH 0213/2793] Reduce flag fiddling There isn't much point to the late* flags in a world where we're mutating flags left and right in tree and info transformers... So, lets get rid of the indirection until we can include flags in a symbol's type history, like we do for its info. This retires lateDEFERRED (redundant with SYNTHESIZE_IMPL_IN_SUBCLASS). Since it's introduced so late, it makes little sense to have these synthetic members go back to DEFERRED. Instead, just set DEFERRED directly. Also remove unused late* and not* flags. notPRIVATE subsumes lateFINAL for effective finality (scala/scala-dev#126) --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 1 + .../nsc/backend/jvm/BTypesFromSymbols.scala | 11 ++--- .../tools/nsc/transform/AddInterfaces.scala | 4 -- .../scala/tools/nsc/transform/Erasure.scala | 2 +- .../scala/tools/nsc/transform/Fields.scala | 8 ++-- .../scala/tools/nsc/transform/Flatten.scala | 2 +- .../scala/tools/nsc/transform/Mixin.scala | 32 +++++++-------- .../tools/nsc/transform/SpecializeTypes.scala | 2 +- .../scala/reflect/internal/Flags.scala | 40 +++++++++---------- .../scala/reflect/internal/Symbols.scala | 24 ++++------- .../scala/tools/nsc/symtab/FlagsTest.scala | 5 --- 11 files changed, 52 insertions(+), 79 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 27a4cbd1346f..e1decaba3ec1 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -239,6 +239,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { sym.isErroneous } + /* * must-single-thread */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 836893a98b17..b2a575d7d1dc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -12,6 +12,7 @@ import scala.tools.nsc.backend.jvm.opt._ import scala.tools.nsc.backend.jvm.BTypes._ import BackendReporting._ import scala.tools.nsc.settings.ScalaSettings +import scala.reflect.internal.Flags.{DEFERRED, SYNTHESIZE_IMPL_IN_SUBCLASS} /** * This class mainly contains the method classBTypeFromSymbol, which extracts the necessary @@ -580,7 +581,7 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // // However, due to https://github.com/scala/scala-dev/issues/126, this currently does not // work, the abstract accessor for O will be marked effectivelyFinal. - val effectivelyFinal = methodSym.isEffectivelyFinalOrNotOverridden && !methodSym.isDeferred + val effectivelyFinal = methodSym.isEffectivelyFinalOrNotOverridden && !(methodSym hasFlag DEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS) val info = MethodInlineInfo( effectivelyFinal = effectivelyFinal, @@ -716,15 +717,9 @@ class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { // scala compiler. The word final is heavily overloaded unfortunately; // for us it means "not overridable". At present you can't override // vars regardless; this may change. - // - // The logic does not check .isFinal (which checks flags for the FINAL flag, - // and includes symbols marked lateFINAL) instead inspecting rawflags so - // we can exclude lateFINAL. Such symbols are eligible for inlining, but to - // avoid breaking proxy software which depends on subclassing, we do not - // emit ACC_FINAL. val finalFlag = ( - (((sym.rawflags & symtab.Flags.FINAL) != 0) || isTopLevelModuleClass(sym)) + (sym.isFinal || isTopLevelModuleClass(sym)) && !sym.enclClass.isTrait && !sym.isClassConstructor && (!sym.isMutable || nme.isTraitSetterName(sym.name)) // lazy vals and vars and their setters cannot be final, but trait setters are diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 104e2e8c937f..406832c262dd 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -13,10 +13,6 @@ abstract class AddInterfaces extends InfoTransform { self: Erasure => import global._ // the global environment import definitions._ // standard classes and methods - /** lateDEFERRED for formerly concrete methods in such traits. - */ - override def phaseNewFlags: Long = lateDEFERRED - def transformMixinInfo(tp: Type): Type = tp match { case ClassInfoType(parents, decls, clazz) if clazz.isPackageClass || !clazz.isJavaDefined => diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index dc62b40578a6..f3fd7c5f673d 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -504,7 +504,7 @@ abstract class Erasure extends AddInterfaces if (!bridgeNeeded) return - var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY | lateDEFERRED) + var newFlags = (member.flags | BRIDGE | ARTIFACT) & ~(ACCESSOR | DEFERRED | LAZY) // If `member` is a ModuleSymbol, the bridge should not also be a ModuleSymbol. Otherwise we // end up with two module symbols with the same name in the same scope, which is surprising // when implementing later phases. diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 105bf0410dc0..a9ed8e3aca09 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -83,10 +83,10 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor private def accessorImplementedInSubclass(accessor: Symbol) = (accessor hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) && (accessor hasFlag (ACCESSOR | MODULE)) - private def concreteOrSynthImpl(sym: Symbol): Boolean = !(sym hasFlag DEFERRED) || (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) + @inline final def notDeferredOrSynthImpl(sym: Symbol): Boolean = !(sym hasFlag DEFERRED) || (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS) private def synthesizeImplInSubclasses(accessor: Symbol): Unit = - accessor setFlag lateDEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS + accessor setFlag SYNTHESIZE_IMPL_IN_SUBCLASS private def setClonedTraitSetterFlags(clazz: Symbol, correspondingGetter: Symbol, cloneInSubclass: Symbol): Unit = { val overridden = isOverriddenAccessor(correspondingGetter, clazz) @@ -96,7 +96,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // TODO: add MIXEDIN (see e.g., `accessed` on `Symbol`) private def setMixedinAccessorFlags(orig: Symbol, cloneInSubclass: Symbol): Unit = - cloneInSubclass setFlag OVERRIDE | NEEDS_TREES resetFlag DEFERRED | lateDEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS + cloneInSubclass setFlag OVERRIDE | NEEDS_TREES resetFlag DEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS private def setFieldFlags(accessor: Symbol, fieldInSubclass: TermSymbol): Unit = fieldInSubclass setFlag (NEEDS_TREES | @@ -128,7 +128,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor def matchingAccessor(pre: Type, member: Symbol, clazz: Symbol) = { - val res = member.matchingSymbol(clazz, pre) filter (sym => (sym hasFlag ACCESSOR) && concreteOrSynthImpl(sym)) + val res = member.matchingSymbol(clazz, pre) filter (sym => (sym hasFlag ACCESSOR) && notDeferredOrSynthImpl(sym)) // if (res != NoSymbol) println(s"matching accessor for $member in $clazz = $res (under $pre)") // else println(s"no matching accessor for $member in $clazz (under $pre) among ${clazz.info.decls}") res diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index 0db9f195971b..29ba21cba740 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -76,7 +76,7 @@ abstract class Flatten extends InfoTransform { decls1 enter sym if (sym.isModule) { // In theory, we could assert(sym.isMethod), because nested, non-static modules are - // transformed to methods (lateMETHOD flag added in RefChecks). But this requires + // transformed to methods (METHOD flag added in UnCurry). But this requires // forcing sym.info (see comment on isModuleNotMethod), which forces stub symbols // too eagerly (SI-8907). diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 0033736dbef5..441ae625d025 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -19,8 +19,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { /** The name of the phase: */ val phaseName: String = "mixin" - /** The phase might set the following new flags: */ - override def phaseNewFlags: Long = lateMODULE | notOVERRIDE /** This map contains a binding (class -> info) if * the class with this info at phase mixinPhase has been treated for mixin composition @@ -46,7 +44,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { */ private def isImplementedStatically(sym: Symbol) = ( sym.isMethod - && notDeferredOrLate(sym) + && notDeferred(sym) && sym.owner.isTrait && (!sym.isModule || sym.hasFlag(PRIVATE | LIFTED)) && (!(sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isLazy) @@ -109,7 +107,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { // --------- type transformation ----------------------------------------------- - private def notDeferredOrLate(sym: Symbol) = !sym.hasFlag(DEFERRED) || sym.hasFlag(lateDEFERRED) + @inline final def notDeferred(sym: Symbol) = fields.notDeferredOrSynthImpl(sym) /** Is member overridden (either directly or via a bridge) in base class sequence `bcs`? */ def isOverriddenAccessor(member: Symbol, bcs: List[Symbol]): Boolean = beforeOwnPhase { @@ -118,7 +116,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { sym => sym.hasFlag(ACCESSOR) && !sym.hasFlag(MIXEDIN) && - notDeferredOrLate(sym) && + notDeferred(sym) && matchesType(sym.tpe, member.tpe, alwaysMatchSimple = true)) } ( bcs.head != member.owner @@ -178,11 +176,14 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { treatedClassInfos(clazz) = clazz.info assert(phase == currentRun.mixinPhase, phase) - /* Create a new getter. Getters are never private or local. They are - * always accessors and deferred. */ + /* Create a new getter. Getters are never private or local. + * They are always accessors and deferred. + * + * TODO: I guess newGetter and newSetter are needed for fields added after the fields phase (lambdalift) -- can we fix that? + */ def newGetter(field: Symbol): Symbol = { - // println("creating new getter for "+ field +" : "+ field.info +" at "+ field.locationString+(field hasFlag MUTABLE)) - val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED | ( if (field.isMutable) 0 else STABLE ) + //println(s"creating new getter for $field : ${field.info} at ${field.locationString} // mutable: ${field hasFlag MUTABLE}") + val newFlags = field.flags & ~PrivateLocal | ACCESSOR | ( if (field.isMutable) 0 else STABLE ) | SYNTHESIZE_IMPL_IN_SUBCLASS // TODO: do we need SYNTHESIZE_IMPL_IN_SUBCLASS to indicate that `notDeferred(setter)` should hold // TODO preserve pre-erasure info? clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType(Nil, field.info) } @@ -190,9 +191,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { /* Create a new setter. Setters are never private or local. They are * always accessors and deferred. */ def newSetter(field: Symbol): Symbol = { - //println("creating new setter for "+field+field.locationString+(field hasFlag MUTABLE)) + //println(s"creating new setter for $field ${field.locationString} // mutable: ${field hasFlag MUTABLE}") val setterName = field.setterName - val newFlags = field.flags & ~PrivateLocal | ACCESSOR | lateDEFERRED + val newFlags = field.flags & ~PrivateLocal | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS // TODO: do we need SYNTHESIZE_IMPL_IN_SUBCLASS to indicate that `notDeferred(setter)` should hold val setter = clazz.newMethod(setterName, field.pos, newFlags) // TODO preserve pre-erasure info? setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe) @@ -240,7 +241,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def cloneAndAddMixinMember(mixinClass: Symbol, mixinMember: Symbol): Symbol = ( cloneAndAddMember(mixinClass, mixinMember, clazz) setPos clazz.pos - resetFlag DEFERRED | lateDEFERRED + resetFlag DEFERRED ) /* Mix in members of implementation class mixinClass into class clazz */ @@ -319,9 +320,9 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } else if (mixinMember.hasAllFlags(METHOD | MODULE) && mixinMember.hasNoFlags(LIFTED | BRIDGE)) { // mixin objects: todo what happens with abstract objects? - // addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~(DEFERRED | lateDEFERRED)) setPos clazz.pos) + // addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~DEFERRED) setPos clazz.pos) } - else if (mixinMember.hasFlag(ACCESSOR) && notDeferredOrLate(mixinMember) + else if (mixinMember.hasFlag(ACCESSOR) && notDeferred(mixinMember) && (mixinMember hasFlag (LAZY | PARAMACCESSOR)) && !isOverriddenAccessor(mixinMember, clazz.info.baseClasses)) { // pick up where `fields` left off -- it already mixed in fields and accessors for regular vals. @@ -925,9 +926,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { addDefDef(sym) } else { // if class is not a trait add accessor definitions - // used to include `sym` with `sym hasFlag lateDEFERRED` as not deferred, - // but I don't think MIXEDIN members ever get this flag - assert(!sym.hasFlag(lateDEFERRED), s"mixedin $sym from $clazz has lateDEFERRED flag?!") if (sym.hasFlag(ACCESSOR) && !sym.hasFlag(DEFERRED)) { assert(sym hasFlag (LAZY | PARAMACCESSOR), s"mixed in $sym from $clazz is not lazy/param?!?") diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 40ab8c0cf896..87c14eb3a119 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -59,7 +59,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val phaseName: String = "specialize" /** The following flags may be set by this phase: */ - override def phaseNewFlags: Long = notPRIVATE | lateFINAL + override def phaseNewFlags: Long = notPRIVATE /** This phase changes base classes. */ override def changesBaseClasses = true diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index d088150db63f..a146f9aea5dc 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -178,13 +178,14 @@ class Flags extends ModifierFlags { // // Flags from 1L to (1L << 50) are normal flags. // - // The flags DEFERRED (1L << 4) to MODULE (1L << 8) have a `late` counterpart. Late flags change - // their counterpart from 0 to 1 after a specific phase (see below). The first late flag - // (lateDEFERRED) is at (1L << 51), i.e., late flags are shifted by 47. The last one is (1L << 55). + // The "late" counterpart to flags DEFERRED (1L << 4) to MODULE (1L << 8) + // show up in `sym.flags` as their regular counterpart once the phase mask admits them (see below). + // The first late flag (lateDEFERRED) is at (1L << 51), i.e., late flags are shifted by 47. The last one is (1L << 55). + // Think of it as a poor man's flag history akin to the type history for a symbol's info. // - // The flags PROTECTED (1L) to PRIVATE (1L << 2) have a `not` counterpart. Negated flags change - // their counterpart from 1 to 0 after a specific phase (see below). They are shifted by 56, i.e., - // the first negated flag (notPROTECTED) is at (1L << 56), the last at (1L << 58). + // The "not" counterpart to flags PROTECTED (1L) to PRIVATE (1L << 2) + // are negated flags that suppress their counterpart after a specific phase (see below). + // They are shifted by 56, i.e., the first negated flag (notPROTECTED) is at (1L << 56), the last at (1L << 58). // // Late and negative flags are only enabled after certain phases, implemented by the phaseNewFlags // method of the SubComponent, so they implement a bit of a flag history. @@ -216,20 +217,15 @@ class Flags extends ModifierFlags { // erasure 15 [START] // mixin 20 [START] // - // lateMETHOD set in RefChecks#transformInfo. - // lateFINAL set in Symbols#makeNotPrivate. // notPRIVATE set in Symbols#makeNotPrivate, IExplicitOuter#transform, Inliners. // notPROTECTED set in ExplicitOuter#transform. - // lateDEFERRED set in AddInterfaces, Mixin, etc. - // lateMODULE set in Mixin#transformInfo. - // notOVERRIDE set in Mixin#preTransform. - final val lateDEFERRED = (DEFERRED: Long) << LateShift - final val lateFINAL = (FINAL: Long) << LateShift - final val lateMETHOD = (METHOD: Long) << LateShift - final val lateMODULE = (MODULE: Long) << LateShift +// final val lateDEFERRED = (DEFERRED: Long) << LateShift // unused +// final val lateFINAL = (FINAL: Long) << LateShift // only used for inliner -- could be subsumed by notPRIVATE? +// final val lateMETHOD = (METHOD: Long) << LateShift // unused +// final val lateMODULE = (MODULE: Long) << LateShift // unused - final val notOVERRIDE = (OVERRIDE: Long) << AntiShift +// final val notOVERRIDE = (OVERRIDE: Long) << AntiShift // unused final val notPRIVATE = (PRIVATE: Long) << AntiShift final val notPROTECTED = (PROTECTED: Long) << AntiShift @@ -449,13 +445,13 @@ class Flags extends ModifierFlags { case JAVA_ENUM => "" // (1L << 48) case JAVA_ANNOTATION => "" // (1L << 49) case SYNTHESIZE_IMPL_IN_SUBCLASS => "" // (1L << 50) - case `lateDEFERRED` => "" // (1L << 51) - case `lateFINAL` => "" // (1L << 52) - case `lateMETHOD` => "" // (1L << 53) - case 0x80000000000000L => "" // (1L << 54) - case `lateMODULE` => "" // (1L << 55) + case 0x08000000000000L => "" // (1L << 51) + case 0x10000000000000L => "" // (1L << 52) + case 0x20000000000000L => "" // (1L << 53) + case 0x40000000000000L => "" // (1L << 54) + case 0x80000000000000L => "" // (1L << 55) case `notPROTECTED` => "" // (1L << 56) - case `notOVERRIDE` => "" // (1L << 57) + case 0x200000000000000L => "" // (1L << 57) case `notPRIVATE` => "" // (1L << 58) case NEEDS_TREES => "" // (1L << 59) case 0x1000000000000000L => "" // (1L << 60) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index e4388561603b..487aadf5e544 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -746,10 +746,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def hasGetter = isTerm && nme.isLocalName(name) /** - * Nested modules which have no static owner when ModuleDefs are eliminated (refchecks) are - * given the lateMETHOD flag, which makes them appear as methods after refchecks. + * Nested modules with a non-static owner receive the METHOD flag during UnCurry's info transform. + * (They are replaced by a ClassDef and DefDef for the module accessor during the fields phase.) * - * Note: the lateMETHOD flag is added lazily in the info transformer of the RefChecks phase. + * Note: the METHOD flag is added lazily in the info transformer of the UnCurry phase. * This means that forcing the `sym.info` may change the value of `sym.isMethod`. Forcing the * info is in the responsibility of the caller. Doing it eagerly here was tried (0ccdb151f) but * has proven to lead to bugs (SI-8907). @@ -985,10 +985,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isEffectivelyFinal: Boolean = ( (this hasFlag FINAL | PACKAGE) || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects) - || isTerm && ( - isPrivate - || isLocalToBlock - ) + || isTerm && (isPrivate || isLocalToBlock || (hasAllFlags(notPRIVATE | METHOD) && !hasFlag(DEFERRED))) || isClass && originalOwner.isTerm && children.isEmpty // we track known subclasses of term-owned classes, use that infer finality ) /** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */ @@ -2450,14 +2447,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ final def makeNotPrivate(base: Symbol) { if (this.isPrivate) { - setFlag(notPRIVATE) - // Marking these methods final causes problems for proxies which use subclassing. If people - // write their code with no usage of final, we probably shouldn't introduce it ourselves - // unless we know it is safe. ... Unfortunately if they aren't marked final the inliner - // thinks it can't inline them. So once again marking lateFINAL, and in genjvm we no longer - // generate ACC_FINAL on "final" methods which are actually lateFINAL. - if (isMethod && !isDeferred) - setFlag(lateFINAL) + setFlag(notPRIVATE) // this makes it effectively final (isEffectivelyFinal) + // don't set FINAL -- methods not marked final by user should not end up final in bytecode + // inliner will know it's effectively final (notPRIVATE non-deferred method) if (!isStaticModule && !isClassConstructor) { expandName(base) if (isModule) moduleClass.makeNotPrivate(base) @@ -2886,7 +2878,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def owner = { if (Statistics.hotEnabled) Statistics.incCounter(ownerCount) - // a module symbol may have the lateMETHOD flag after refchecks, see isModuleNotMethod + // a non-static module symbol gets the METHOD flag in uncurry's info transform -- see isModuleNotMethod if (!isMethod && needsFlatClasses) rawowner.owner else rawowner } diff --git a/test/junit/scala/tools/nsc/symtab/FlagsTest.scala b/test/junit/scala/tools/nsc/symtab/FlagsTest.scala index 96eae38011f8..e88b3f9e966d 100644 --- a/test/junit/scala/tools/nsc/symtab/FlagsTest.scala +++ b/test/junit/scala/tools/nsc/symtab/FlagsTest.scala @@ -31,12 +31,7 @@ class FlagsTest { @Test def testTimedFlags(): Unit = { - testLate(lateDEFERRED, _.isDeferred) - testLate(lateFINAL, _.isFinal) - testLate(lateMETHOD, _.isMethod) - testLate(lateMODULE, _.isModule) testNot(PROTECTED | notPROTECTED, _.isProtected) - testNot(OVERRIDE | notOVERRIDE, _.isOverride) testNot(PRIVATE | notPRIVATE, _.isPrivate) assertFalse(withFlagMask(AllFlags)(sym.setFlag(PRIVATE | notPRIVATE).isPrivate)) From 8f792280630721bdc1e6ee9199eb0cf8cb035fce Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sun, 29 May 2016 21:45:08 -0700 Subject: [PATCH 0214/2793] Simplify erasure + mixin Remove some old, obsolete & untested hacks from ExplicitOuter. Added a test for one of them to show this is now fine. There are a lot of `makeNotPrivate` invocations sprinkled around the codebase. Lets see if we can centralize the ones dealing with trait methods that need implementations in the phase that emits them. For example Fields (accessors for fields/modules) or SuperAccessors. --- .../tools/nsc/transform/AddInterfaces.scala | 94 ------------ .../scala/tools/nsc/transform/Erasure.scala | 59 ++++++-- .../tools/nsc/transform/ExplicitOuter.scala | 105 +++++-------- .../scala/tools/nsc/transform/Fields.scala | 3 + .../scala/tools/nsc/transform/Mixin.scala | 142 +++++++----------- .../scala/reflect/internal/Definitions.scala | 6 +- .../reflect/internal/transform/Erasure.scala | 14 +- test/files/run/t2946/MyResponseCommon_2.scala | 7 + test/files/run/t2946/ResponseCommon_1.scala | 13 ++ .../backend/jvm/opt/ScalaInlineInfoTest.scala | 2 +- 10 files changed, 177 insertions(+), 268 deletions(-) create mode 100644 test/files/run/t2946/MyResponseCommon_2.scala create mode 100644 test/files/run/t2946/ResponseCommon_1.scala diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 406832c262dd..e69de29bb2d1 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -1,94 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc -package transform - -import symtab._ -import Flags._ - -abstract class AddInterfaces extends InfoTransform { self: Erasure => - import global._ // the global environment - import definitions._ // standard classes and methods - - def transformMixinInfo(tp: Type): Type = tp match { - case ClassInfoType(parents, decls, clazz) if clazz.isPackageClass || !clazz.isJavaDefined => - - val parents1 = parents match { - case Nil => Nil - case hd :: tl => - assert(!hd.typeSymbol.isTrait, clazz) - if (clazz.isTrait) ObjectTpe :: tl - else parents - } - if (clazz.isTrait) { - decls foreach { sym => - if (!sym.isType) sym.info // initialize to set lateMETHOD flag if necessary - } - } - if (parents1 eq parents) tp - else ClassInfoType(parents1, decls, clazz) - case _ => - tp - } - -// Tree transformation -------------------------------------------------------------- - private class ChangeOwnerAndReturnTraverser(oldowner: Symbol, newowner: Symbol) - extends ChangeOwnerTraverser(oldowner, newowner) { - override def traverse(tree: Tree) { - tree match { - case _: Return => change(tree.symbol) - case _ => - } - super.traverse(tree) - } - } - - /** Add calls to supermixin constructors - * `super[mix].$init$()` - * to tree, which is assumed to be the body of a constructor of class clazz. - */ - private def addMixinConstructorCalls(tree: Tree, clazz: Symbol): Tree = { - def mixinConstructorCall(mc: Symbol): Tree = atPos(tree.pos) { - Apply(SuperSelect(clazz, mc.primaryConstructor), Nil) - } - val mixinConstructorCalls: List[Tree] = { - for (mc <- clazz.mixinClasses.reverse - if mc.isTrait && mc.primaryConstructor != NoSymbol) - yield mixinConstructorCall(mc) - } - tree match { - - case Block(Nil, expr) => - // AnyVal constructor - have to provide a real body so the - // jvm doesn't throw a VerifyError. But we can't add the - // body until now, because the typer knows that Any has no - // constructor and won't accept a call to super.init. - assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz) - Block(List(Apply(gen.mkSuperInitCall, Nil)), expr) - - case Block(stats, expr) => - // needs `hasSymbolField` check because `supercall` could be a block (named / default args) - val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER)) - treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr) - } - } - - protected val mixinTransformer = new Transformer { - override def transform(tree: Tree): Tree = { - val sym = tree.symbol - val tree1 = tree match { - case DefDef(_,_,_,_,_,_) if sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass => - deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3) - case Template(parents, self, body) => - val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos) - treeCopy.Template(tree, parents1, noSelfType, body) - case _ => - tree - } - super.transform(tree1) - } - } -} diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index f3fd7c5f673d..d190802f66f6 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -12,7 +12,7 @@ import symtab._ import Flags._ import scala.reflect.internal.Mode._ -abstract class Erasure extends AddInterfaces +abstract class Erasure extends InfoTransform with scala.reflect.internal.transform.Erasure with typechecker.Analyzer with TypingTransformers @@ -373,16 +373,53 @@ abstract class Erasure extends AddInterfaces class UnknownSig extends Exception - /** The symbol's erased info. This is the type's erasure, except for the following symbols: - * - * - For $asInstanceOf : [T]T - * - For $isInstanceOf : [T]scala#Boolean - * - For class Array : [T]C where C is the erased classinfo of the Array class. - * - For Array[T]. : {scala#Int)Array[T] - * - For a type parameter : A type bounds type consisting of the erasures of its bounds. - */ - override def transformInfo(sym: Symbol, tp: Type): Type = - transformMixinInfo(super.transformInfo(sym, tp)) + // TODO: move to constructors? + object mixinTransformer extends Transformer { + /** Add calls to supermixin constructors + * `super[mix].$init$()` + * to tree, which is assumed to be the body of a constructor of class clazz. + */ + private def addMixinConstructorCalls(tree: Tree, clazz: Symbol): Tree = { + def mixinConstructorCall(mc: Symbol): Tree = atPos(tree.pos) { + Apply(SuperSelect(clazz, mc.primaryConstructor), Nil) + } + val mixinConstructorCalls: List[Tree] = { + for (mc <- clazz.mixinClasses.reverse + if mc.isTrait && mc.primaryConstructor != NoSymbol) + yield mixinConstructorCall(mc) + } + tree match { + + case Block(Nil, expr) => + // AnyVal constructor - have to provide a real body so the + // jvm doesn't throw a VerifyError. But we can't add the + // body until now, because the typer knows that Any has no + // constructor and won't accept a call to super.init. + assert((clazz isSubClass AnyValClass) || clazz.info.parents.isEmpty, clazz) + Block(List(Apply(gen.mkSuperInitCall, Nil)), expr) + + case Block(stats, expr) => + // needs `hasSymbolField` check because `supercall` could be a block (named / default args) + val (presuper, supercall :: rest) = stats span (t => t.hasSymbolWhich(_ hasFlag PRESUPER)) + treeCopy.Block(tree, presuper ::: (supercall :: mixinConstructorCalls ::: rest), expr) + } + } + + override def transform(tree: Tree): Tree = { + val sym = tree.symbol + val tree1 = tree match { + case DefDef(_,_,_,_,_,_) if sym.isClassConstructor && sym.isPrimaryConstructor && sym.owner != ArrayClass => + deriveDefDef(tree)(addMixinConstructorCalls(_, sym.owner)) // (3) + case Template(parents, self, body) => + val parents1 = sym.owner.info.parents map (t => TypeTree(t) setPos tree.pos) + treeCopy.Template(tree, parents1, noSelfType, body) + case _ => + tree + } + super.transform(tree1) + } + } + val deconstMap = new TypeMap { // For some reason classOf[Foo] creates ConstantType(Constant(tpe)) with an actual Type for tpe, diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 411ff6b9bec2..f3d5ceb0f0f2 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -158,13 +158,6 @@ abstract class ExplicitOuter extends InfoTransform case MethodType(params, resTp) => val resTpTransformed = transformInfo(sym, resTp) - // juggle flags (and mangle names) after transforming info - if (sym.owner.isTrait) { - // TODO: I don't believe any private accessors remain after the fields phase - if ((sym hasFlag (ACCESSOR | SUPERACCESSOR)) || sym.isModule) sym.makeNotPrivate(sym.owner) // 5 - if (sym.isProtected) sym setFlag notPROTECTED // 6 - } - val paramsWithOuter = if (sym.isClassConstructor && isInner(sym.owner)) // 1 sym.newValueParameter(nme.OUTER_ARG, sym.pos).setInfo(sym.owner.outerClass.thisType) :: params @@ -202,14 +195,6 @@ abstract class ExplicitOuter extends InfoTransform if (restp eq restp1) tp else PolyType(tparams, restp1) case _ => - // Local fields of traits need to be unconditionally unprivatized. - // Reason: Those fields might need to be unprivatized if referenced by an inner class. - // On the other hand, mixing in the trait into a separately compiled - // class needs to have a common naming scheme, independently of whether - // the field was accessed from an inner class or not. See #2946 - if (sym.owner.isTrait && sym.isLocalToThis && - (sym.getterIn(sym.owner) == NoSymbol)) - sym.makeNotPrivate(sym.owner) tp } @@ -300,61 +285,41 @@ abstract class ExplicitOuter extends InfoTransform } } - /**

- * The phase performs the following transformations on terms: - *

- *
    - *
  1. - *

    - * An class which is not an interface and is not static gets an outer - * accessor (@see outerDefs). - *

    - *

    - * 1a. A class which is not a trait gets an outer field. - *

    - *
  2. - *
  3. - * A constructor of a non-trait inner class gets an outer parameter. - *
  4. - *
  5. - * A reference C.this where C refers to an - * outer class is replaced by a selection - * this.$outer$$C1 ... .$outer$$Cn (@see outerPath) - *
  6. - *
  7. - *
  8. - *
  9. - * A call to a constructor Q.(args) or Q.$init$(args) where Q != this and - * the constructor belongs to a non-static class is augmented by an outer argument. - * E.g. Q.(OUTER, args) where OUTER - * is the qualifier corresponding to the singleton type Q. - *
  10. - *
  11. - * A call to a constructor this.(args) in a - * secondary constructor is augmented to this.(OUTER, args) - * where OUTER is the last parameter of the secondary constructor. - *
  12. - *
  13. - * Remove private modifier from class members M - * that are accessed from an inner class. - *
  14. - *
  15. - * Remove protected modifier from class members M - * that are accessed without a super qualifier accessed from an inner - * class or trait. - *
  16. - *
  17. - * Remove private and protected modifiers - * from type symbols - *
  18. - *
  19. - * Remove private modifiers from members of traits - *
  20. - *
- *

- * Note: The whole transform is run in phase explicitOuter.next. - *

- */ + /** The phase performs the following transformations (more or less...): + * + * (1) An class which is not an interface and is not static gets an outer accessor (@see outerDefs). + * (1a) A class which is not a trait gets an outer field. + * + * (4) A constructor of a non-trait inner class gets an outer parameter. + * + * (5) A reference C.this where C refers to an outer class is replaced by a selection + * `this.$outer$$C1 ... .$outer$$Cn` (@see outerPath) + * + * (7) A call to a constructor Q.(args) or Q.$init$(args) where Q != this and + * the constructor belongs to a non-static class is augmented by an outer argument. + * E.g. Q.(OUTER, args) where OUTER + * is the qualifier corresponding to the singleton type Q. + * + * (8) A call to a constructor this.(args) in a + * secondary constructor is augmented to this.(OUTER, args) + * where OUTER is the last parameter of the secondary constructor. + * + * (9) Remove private modifier from class members M that are accessed from an inner class. + * + * (10) Remove protected modifier from class members M that are accessed + * without a super qualifier accessed from an inner class or trait. + * + * (11) Remove private and protected modifiers from type symbols + * + * Note: The whole transform is run in phase explicitOuter.next. + * + * TODO: Make this doc reflect what's actually going on. + * Some of the deviations are motivated by separate compilation + * (name mangling based on usage is inherently unstable). + * Now that traits are compiled 1:1 to interfaces, they can have private members, + * so there's also less need to make trait members non-private + * (they still may need to be implemented in subclasses, though we could make those protected...). + */ class ExplicitOuterTransformer(unit: CompilationUnit) extends OuterPathTransformer(unit) { transformer => diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index a9ed8e3aca09..1900fcdc16e8 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -53,6 +53,8 @@ import symtab.Flags._ * An overridden val's side-effect is still performed. * The only change due to overriding is that its value is never written to the field * (the overridden val's value is, of course, stored in the field in addition to its side-effect being performed). + * + * TODO: check init support (or drop the -Xcheck-init flag??) */ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransformers { @@ -247,6 +249,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val accessorUnderConsideration = !(member hasFlag (DEFERRED | LAZY)) // destructively mangle accessor's name (which may cause rehashing of decls), also sets flags + // TODO: technically, only necessary for stored fields if (member hasFlag PRIVATE) member makeNotPrivate clazz // Need to mark as notPROTECTED, so that it's carried over to the synthesized member in subclasses, diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 441ae625d025..a1441fe7b36e 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -19,6 +19,22 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { /** The name of the phase: */ val phaseName: String = "mixin" + /** Some trait methods need to be implemented in subclasses, so they cannot be private. + * + * They may be protected, now that traits are compiled 1:1 to interfaces. + * + * TODO: interfaces can also have private members, so there's also less need to make trait members non-private + * can we leave more methods private? + * (they still may need to be implemented in subclasses, though we could make those protected...). + */ + def publicizeTraitMethod(sym: Symbol): Unit = { + if ((sym hasFlag PRIVATE) && + ( (sym hasFlag SUPERACCESSOR) // super accessors by definition must be implemented in a subclass, so can't have the private (TODO: why are they ever private in a trait to begin with!?!?) + || (sym hasFlag ACCESSOR | MODULE))) // an accessor / module *may* need to be implemented in a subclass, and thus cannot be private + sym.makeNotPrivate(sym.owner) + + if (sym hasFlag PROTECTED) sym setFlag notPROTECTED + } /** This map contains a binding (class -> info) if * the class with this info at phase mixinPhase has been treated for mixin composition @@ -43,7 +59,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * methods in the impl class (because they can have arbitrary initializers) */ private def isImplementedStatically(sym: Symbol) = ( - sym.isMethod + (sym.isMethod || ((sym hasFlag MODULE) && !sym.isStatic)) && notDeferred(sym) && sym.owner.isTrait && (!sym.isModule || sym.hasFlag(PRIVATE | LIFTED)) @@ -221,6 +237,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { } clazz.info.decls.unlink(member) } + else if (member.isMethod) publicizeTraitMethod(member) } debuglog("new defs of " + clazz + " = " + clazz.info.decls) } @@ -318,10 +335,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { superAccessor.asInstanceOf[TermSymbol] setAlias alias1 } } - else if (mixinMember.hasAllFlags(METHOD | MODULE) && mixinMember.hasNoFlags(LIFTED | BRIDGE)) { - // mixin objects: todo what happens with abstract objects? - // addMember(clazz, mixinMember.cloneSymbol(clazz, mixinMember.flags & ~DEFERRED) setPos clazz.pos) - } else if (mixinMember.hasFlag(ACCESSOR) && notDeferred(mixinMember) && (mixinMember hasFlag (LAZY | PARAMACCESSOR)) && !isOverriddenAccessor(mixinMember, clazz.info.baseClasses)) { @@ -866,109 +879,70 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { def getterBody(getter: Symbol) = { assert(getter.isGetter) - val readValue = getter.tpe match { - // A field "final val f = const" in a trait generates a getter with a ConstantType. - case MethodType(Nil, ConstantType(c)) => - Literal(c) - case _ => - // if it is a mixed-in lazy value, complete the accessor - if (getter.isLazy) { - val isUnit = isUnitGetter(getter) - val initCall = Apply(SuperSelect(clazz, initializer(getter)), Nil) - val selection = fieldAccess(getter) - val init = if (isUnit) initCall else atPos(getter.pos)(Assign(selection, initCall)) - val returns = if (isUnit) UNIT else selection - mkLazyDef(clazz, getter, List(init), returns, fieldOffset(getter)) + val readValue = + if (getter.isLazy) { + getter.tpe.resultType match { + case ConstantType(c) => Literal(c) + case _ => + val initCall = Apply(SuperSelect(clazz, initializer(getter)), Nil) + val offset = fieldOffset(getter) + if (isUnitGetter(getter)) mkLazyDef(clazz, getter, List(initCall), UNIT, offset) + else mkLazyDef(clazz, getter, List(atPos(getter.pos)(Assign(fieldAccess(getter), initCall))), fieldAccess(getter), offset) } - // For a field of type Unit in a trait, no actual field is generated when being mixed in. - else if (isUnitGetter(getter)) UNIT - else fieldAccess(getter) - } + } else { + assert(getter.hasFlag(PARAMACCESSOR)) + fieldAccess(getter) + } + if (!needsInitFlag(getter)) readValue else mkCheckedAccessor(clazz, readValue, fieldOffset(getter), getter.pos, getter) } def setterBody(setter: Symbol) = { val getter = setter.getterIn(clazz) - - // A trait with a field of type Unit creates a trait setter (invoked by the - // implementation class constructor), like for any other trait field. - // However, no actual field is created in the class that mixes in the trait. - // Therefore the setter does nothing (except setting the -Xcheckinit flag). + assert(getter.hasFlag(PARAMACCESSOR), s"missing implementation for non-paramaccessor $setter in $clazz") val setInitFlag = if (!needsInitFlag(getter)) Nil else List(mkSetFlag(clazz, fieldOffset(getter), getter, bitmapKind(getter))) - val fieldInitializer = - if (isUnitGetter(getter)) Nil - else List(Assign(fieldAccess(setter), Ident(setter.firstParam))) - - (fieldInitializer ::: setInitFlag) match { - case Nil => UNIT - // If there's only one statement, the Block factory does not actually create a Block. - case stats => Block(stats: _*) - } + Block(Assign(fieldAccess(setter), Ident(setter.firstParam)) :: setInitFlag : _*) } def fieldAccess(accessor: Symbol) = Select(This(clazz), accessor.accessed) - def isOverriddenSetter(sym: Symbol) = - nme.isTraitSetterName(sym.name) && { - val other = sym.nextOverriddenSymbol - isOverriddenAccessor(other.getterIn(other.owner), clazz.info.baseClasses) - } - // for all symbols `sym` in the class definition, which are mixed in: + // for all symbols `sym` in the class definition, which are mixed in by mixinTraitMembers for (sym <- clazz.info.decls ; if sym hasFlag MIXEDIN) { // if current class is a trait, add an abstract method for accessor `sym` - if (clazz.isTrait) { - addDefDef(sym) - } else { - // if class is not a trait add accessor definitions - if (sym.hasFlag(ACCESSOR) && !sym.hasFlag(DEFERRED)) { - assert(sym hasFlag (LAZY | PARAMACCESSOR), s"mixed in $sym from $clazz is not lazy/param?!?") - - // add accessor definitions - addDefDef(sym, { - if (sym.isSetter) { - // If this is a setter of a mixed-in field which is overridden by another mixin, - // the trait setter of the overridden one does not need to do anything - the - // trait setter of the overriding field will initialize the field. - if (isOverriddenSetter(sym)) UNIT - else setterBody(sym) - } - else getterBody(sym) - }) - } - else if (sym.isModule && !(sym hasFlag LIFTED | BRIDGE)) { - // Moved to Refchecks - } - else if (!sym.isMethod) { - // add fields - addValDef(sym) - } - else if (sym.isSuperAccessor) { - // add superaccessors - addDefDef(sym) - } - else { - // add forwarders - assert(sym.alias != NoSymbol, (sym, sym.debugFlagString, clazz)) - // debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString) - if (!sym.isMacro) addDefDef(sym, Apply(SuperSelect(clazz, sym.alias), sym.paramss.head.map(Ident(_)))) - } + // ditto for a super accessor (will get an RHS in completeSuperAccessor) + if (clazz.isTrait || sym.isSuperAccessor) addDefDef(sym) + // implement methods mixed in from a supertrait (the symbols were created by mixinTraitMembers) + else if (sym.hasFlag(ACCESSOR) && !sym.hasFlag(DEFERRED)) { + assert(sym hasFlag (LAZY | PARAMACCESSOR), s"mixed in $sym from $clazz is not lazy/param?!?") + + // add accessor definitions + addDefDef(sym, if (sym.isSetter) setterBody(sym) else getterBody(sym)) + } + else if (!sym.isMethod) addValDef(sym) // field + else if (!sym.isMacro) { // forwarder + assert(sym.alias != NoSymbol, (sym, sym.debugFlagString, clazz)) + // debuglog("New forwarder: " + sym.defString + " => " + sym.alias.defString) + addDefDef(sym, Apply(SuperSelect(clazz, sym.alias), sym.paramss.head.map(Ident(_)))) } } + stats1 = add(stats1, newDefs.toList) - if (clazz.isTrait) stats1 = - stats1.filter { + + if (clazz.isTrait) stats1 = stats1.filter { case vd: ValDef => - // TODO do we get here? + assert(vd.symbol.hasFlag(PRESUPER | PARAMACCESSOR | LAZY), s"unexpected valdef $vd in trait $clazz") false case _ => true } + if (!clazz.isTrait) stats1 = stats1 map completeSuperAccessor + stats1 } @@ -989,8 +963,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { /** The transform that gets applied to a tree after it has been completely * traversed and possible modified by a preTransform. * This step will - * - change every node type that refers to an implementation class to its - * corresponding interface, unless the node's symbol is an implementation class. * - change parents of templates to conform to parents in the symbol info * - add all new definitions to a class or interface * - remove widening casts @@ -998,8 +970,6 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * to static calls of methods in implementation modules (@see staticCall) * - change super calls to methods in implementation classes to static calls * (@see staticCall) - * - change `this` in implementation modules to references to the self parameter - * - refer to fields in some implementation class via an abstract method in the interface. */ private def postTransform(tree: Tree): Tree = { val sym = tree.symbol @@ -1020,6 +990,8 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { treeCopy.Template(tree, parents1, self, statsWithNewDefs) case Select(qual, name) if sym.owner.isTrait && !sym.isMethod => + assert(sym.hasFlag(PARAMACCESSOR | PRESUPER), s"!!! Unexpected reference to field $sym in trait $currentOwner") + // refer to fields in some trait an abstract getter in the interface. val ifaceGetter = sym getterIn sym.owner diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 35ec80901e66..eca1bbea5ac6 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1040,11 +1040,7 @@ trait Definitions extends api.StandardDefinitions { } } - /** Remove references to class Object (other than the head) in a list of parents */ - def removeLaterObjects(tps: List[Type]): List[Type] = tps match { - case Nil => Nil - case x :: xs => x :: xs.filterNot(_.typeSymbol == ObjectClass) - } + /** Remove all but one reference to class Object from a list of parents. */ def removeRedundantObjects(tps: List[Type]): List[Type] = tps match { case Nil => Nil diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 412c49f571a2..62ca50d035a2 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -148,9 +148,19 @@ trait Erasure { apply(atp) case ClassInfoType(parents, decls, clazz) => ClassInfoType( - if (clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil + if (clazz == ObjectClass || isPrimitiveValueClass(clazz) || parents.isEmpty) Nil else if (clazz == ArrayClass) ObjectTpe :: Nil - else removeLaterObjects(parents map this), + else { + val erasedParents = parents map this + + // drop first parent for traits -- it has been normalized to a class by now, + // but we should drop that in bytecode + val firstParent = + if (clazz.hasFlag(Flags.TRAIT) && !clazz.hasFlag(Flags.JAVA)) ObjectTpe + else erasedParents.head + + firstParent :: erasedParents.tail.filter(_.typeSymbol != ObjectClass) + }, decls, clazz) case _ => mapOver(tp) diff --git a/test/files/run/t2946/MyResponseCommon_2.scala b/test/files/run/t2946/MyResponseCommon_2.scala new file mode 100644 index 000000000000..4f8f924f2cae --- /dev/null +++ b/test/files/run/t2946/MyResponseCommon_2.scala @@ -0,0 +1,7 @@ +class MyResponseCommon extends Parser with ResponseCommon + +object Test { + def main(args: Array[String]) { + new MyResponseCommon + } +} diff --git a/test/files/run/t2946/ResponseCommon_1.scala b/test/files/run/t2946/ResponseCommon_1.scala new file mode 100644 index 000000000000..bb921e7027b9 --- /dev/null +++ b/test/files/run/t2946/ResponseCommon_1.scala @@ -0,0 +1,13 @@ +class Parser { + def parse(t: Any): Unit = {} +} + +trait ResponseCommon extends Parser { + private[this] var paramsParser: Parser = null + def withParamsParser(parser: Parser) = {paramsParser = parser; this} + + override abstract def parse(t: Any): Unit = t match { + case ("params", value: List[_]) => value.foreach {paramsParser.parse(_)} + case _ => super.parse(t) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index 9217183c7424..e03b703dc9d2 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -106,7 +106,7 @@ class ScalaInlineInfoTest extends BytecodeTesting { ("x5()I", MethodInlineInfo(true, false,false)), ("x5$(LT;)I", MethodInlineInfo(true ,false,false)), ("L$lzycompute$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true, false,false)), - ("L$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true ,false,false)), + ("T$$L$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true ,false,false)), ("nest$1()I", MethodInlineInfo(true, false,false)), ("$init$(LT;)V", MethodInlineInfo(true,false,false))), None // warning From b79c0d124e839e9e7ae5db883488c0134642472b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 30 May 2016 23:48:28 -0700 Subject: [PATCH 0215/2793] LambdaLift emits paramaccessor syms and defdefs ... instead of emitting ValDefs and field symbols, which are then promptly unlinked and transformed by the "late trait methods" logic in mixins... Mixins still synthesizes implementations for these accessors in subclasses. A paramaccessor in a trait is a method without an underlying field. --- .../tools/nsc/transform/Constructors.scala | 20 ++++-- .../tools/nsc/transform/LambdaLift.scala | 35 +++++++++-- .../scala/tools/nsc/transform/LazyVals.scala | 2 +- .../scala/tools/nsc/transform/Mixin.scala | 61 +++---------------- .../nsc/typechecker/SuperAccessors.scala | 11 ++-- 5 files changed, 60 insertions(+), 69 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index ec8dc6883447..0a87e358b441 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -506,7 +506,8 @@ abstract class Constructors extends Statics with Transform with TypingTransforme ) /* - * whether `sym` denotes a param-accessor (ie a field) that fulfills all of: + * whether `sym` denotes a param-accessor (ie in a class a PARAMACCESSOR field, or in a trait a method with same flag) + * that fulfills all of: * (a) has stationary value, ie the same value provided via the corresponding ctor-arg; and * (b) isn't subject to specialization. We might be processing statements for: * (b.1) the constructor in the generic (super-)class; or @@ -519,10 +520,11 @@ abstract class Constructors extends Statics with Transform with TypingTransforme case Apply(Select(This(_), _), List()) => // references to parameter accessor methods of own class become references to parameters // outer accessors become references to $outer parameter - if (clazz.isTrait) + // println(s"to param ref in $clazz for ${tree.symbol} ${tree.symbol.debugFlagString} / ${tree.symbol.outerSource} / ${canBeSupplanted(tree.symbol)}") + if (clazz.isTrait && !(tree.symbol hasAllFlags (ACCESSOR | PARAMACCESSOR))) super.transform(tree) else if (canBeSupplanted(tree.symbol)) - gen.mkAttributedIdent(parameter(tree.symbol.accessed)) setPos tree.pos + gen.mkAttributedIdent(parameter(tree.symbol)) setPos tree.pos else if (tree.symbol.outerSource == clazz) gen.mkAttributedIdent(parameterNamed(nme.OUTER)) setPos tree.pos else @@ -700,7 +702,9 @@ abstract class Constructors extends Statics with Transform with TypingTransforme def omittableStat(stat: Tree) = omittableSym(stat.symbol) // The parameter accessor fields which are members of the class - val paramAccessors = clazz.constrParamAccessors + val paramAccessors = + if (clazz.isTrait) clazz.info.decls.toList.filter(sym => sym.hasAllFlags(STABLE | PARAMACCESSOR)) // since a trait does not have constructor parameters (yet), these can only come from lambdalift -- right? + else clazz.constrParamAccessors // Initialize all parameters fields that must be kept. val paramInits = paramAccessors filterNot omittableSym map { acc => @@ -708,11 +712,15 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // It would be better to mangle the constructor parameter name since // it can only be used internally, but I think we need more robust name // mangling before we introduce more of it. - val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => s.isGetter && !s.isOuterField && s.enclClass.isTrait) + val conflict = clazz.info.nonPrivateMember(acc.name) filter (s => (s ne acc) && s.isGetter && !s.isOuterField && s.enclClass.isTrait) if (conflict ne NoSymbol) reporter.error(acc.pos, "parameter '%s' requires field but conflicts with %s".format(acc.name, conflict.fullLocationString)) - copyParam(acc, parameter(acc)) + val accSetter = + if (clazz.isTrait) acc.setterIn(clazz, hasExpandedName = true) + else acc + + copyParam(accSetter, parameter(acc)) } // Return a pair consisting of (all statements up to and including superclass and trait constr calls, rest) diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 074acc1332e8..2ccc44f234a5 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -254,15 +254,26 @@ abstract class LambdaLift extends InfoTransform { afterOwnPhase { for ((owner, freeValues) <- free.toList) { - val newFlags = SYNTHETIC | ( - if (owner.isClass) PARAMACCESSOR | PrivateLocal - else PARAM) + val newFlags = SYNTHETIC | (if (owner.isClass) PARAMACCESSOR else PARAM) proxies(owner) = for (fv <- freeValues.toList) yield { val proxyName = proxyNames.getOrElse(fv, fv.name) debuglog(s"new proxy ${proxyName} in ${owner.fullLocationString}") - val proxy = owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong) setInfo fv.info + val proxy = + if (owner.isTrait) { + // TODO preserve pre-erasure info for the accessors? + // TODO: do we need SYNTHESIZE_IMPL_IN_SUBCLASS to indicate that `notDeferred(setter)` should hold + val accessorFlags = newFlags.toLong | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS + val setter = owner.newMethod(nme.expandedSetterName(proxyName.setterName, owner), fv.pos, accessorFlags) + setter setInfo MethodType(setter.newSyntheticValueParams(List(fv.info)), UnitTpe) + owner.info.decls enter setter + + val getter = owner.newMethod(proxyName.getterName, fv.pos, accessorFlags | STABLE) + getter setInfo MethodType(Nil, fv.info) + } else + owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong | PrivateLocal) setInfo fv.info + if (owner.isClass) owner.info.decls enter proxy proxy } @@ -320,7 +331,12 @@ abstract class LambdaLift extends InfoTransform { private def proxyRef(sym: Symbol) = { val psym = proxy(sym) - if (psym.isLocalToBlock) gen.mkAttributedIdent(psym) else memberRef(psym) + if (psym.isLocalToBlock) gen.mkAttributedIdent(psym) + else { + val ref = memberRef(psym) + if (psym.isMethod) Apply(ref, Nil) setType ref.tpe.resultType + else ref + } } def freeArgsOrNil(sym: Symbol) = free.getOrElse(sym, Nil).toList @@ -354,7 +370,14 @@ abstract class LambdaLift extends InfoTransform { } case ClassDef(_, _, _, _) => - val freeParamDefs = freeParams(sym) map (p => ValDef(p) setPos tree.pos setType NoType) + val freeParamSyms = freeParams(sym) + val freeParamDefs = + if (tree.symbol.isTrait) { + freeParamSyms flatMap { getter => + val setter = getter.setterIn(tree.symbol, hasExpandedName = true) + List(DefDef(getter, EmptyTree) setPos tree.pos setType NoType, DefDef(setter, EmptyTree) setPos tree.pos setType NoType) + } + } else freeParamSyms map (p => ValDef(p) setPos tree.pos setType NoType) if (freeParamDefs.isEmpty) tree else deriveClassDef(tree)(impl => deriveTemplate(impl)(_ ::: freeParamDefs)) diff --git a/src/compiler/scala/tools/nsc/transform/LazyVals.scala b/src/compiler/scala/tools/nsc/transform/LazyVals.scala index bc9f70679c2a..fc7999bf3b01 100644 --- a/src/compiler/scala/tools/nsc/transform/LazyVals.scala +++ b/src/compiler/scala/tools/nsc/transform/LazyVals.scala @@ -68,7 +68,7 @@ abstract class LazyVals extends Transform with TypingTransformers with ast.TreeD * ``` * // typer * class C { object x } - * // refchecks + * // fields * class C { var x$module; def x() = { x$module = new x; x$module } * // lazyvals * class C { diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index a1441fe7b36e..e6ffe328daf5 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -181,63 +181,22 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { newSym } - /** Add getters and setters for all non-module fields of an implementation - * class to its interface unless they are already present. This is done - * only once per class. The mixedin flag is used to remember whether late - * members have been added to an interface. - * - lazy fields don't get a setter. - */ - def addLateInterfaceMembers(clazz: Symbol) { + def publicizeTraitMethods(clazz: Symbol) { if (treatedClassInfos(clazz) != clazz.info) { treatedClassInfos(clazz) = clazz.info assert(phase == currentRun.mixinPhase, phase) - /* Create a new getter. Getters are never private or local. - * They are always accessors and deferred. - * - * TODO: I guess newGetter and newSetter are needed for fields added after the fields phase (lambdalift) -- can we fix that? - */ - def newGetter(field: Symbol): Symbol = { - //println(s"creating new getter for $field : ${field.info} at ${field.locationString} // mutable: ${field hasFlag MUTABLE}") - val newFlags = field.flags & ~PrivateLocal | ACCESSOR | ( if (field.isMutable) 0 else STABLE ) | SYNTHESIZE_IMPL_IN_SUBCLASS // TODO: do we need SYNTHESIZE_IMPL_IN_SUBCLASS to indicate that `notDeferred(setter)` should hold - // TODO preserve pre-erasure info? - clazz.newMethod(field.getterName, field.pos, newFlags) setInfo MethodType(Nil, field.info) - } - - /* Create a new setter. Setters are never private or local. They are - * always accessors and deferred. */ - def newSetter(field: Symbol): Symbol = { - //println(s"creating new setter for $field ${field.locationString} // mutable: ${field hasFlag MUTABLE}") - val setterName = field.setterName - val newFlags = field.flags & ~PrivateLocal | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS // TODO: do we need SYNTHESIZE_IMPL_IN_SUBCLASS to indicate that `notDeferred(setter)` should hold - val setter = clazz.newMethod(setterName, field.pos, newFlags) - // TODO preserve pre-erasure info? - setter setInfo MethodType(setter.newSyntheticValueParams(List(field.info)), UnitTpe) - if (field.needsExpandedSetterName) - setter.name = nme.expandedSetterName(setter.name, clazz) - - setter - } - - clazz.info // make sure info is up to date, so that implClass is set. - - // TODO: is this needed? can there be fields in a class that don't have accessors yet but need them??? - // can we narrow this down to just getters for lazy vals? param accessors? for (member <- clazz.info.decls) { - if (!member.isMethod && !member.isModule && !member.isModuleVar) { + if (member.isMethod) publicizeTraitMethod(member) + else { assert(member.isTerm && !member.isDeferred, member) - if (member.getterIn(clazz).isPrivate) { - member.makeNotPrivate(clazz) // this will also make getter&setter not private - } - val getter = member.getterIn(clazz) - if (getter == NoSymbol) addMember(clazz, newGetter(member)) - if (!member.tpe.isInstanceOf[ConstantType] && !member.isLazy) { - val setter = member.setterIn(clazz) - if (setter == NoSymbol) addMember(clazz, newSetter(member)) - } + // disable assert to support compiling against code compiled by an older compiler (until we re-starr) + // assert(member hasFlag LAZY | PRESUPER, s"unexpected $member in $clazz ${member.debugFlagString}") + // lazy vals still leave field symbols lying around in traits -- TODO: never emit them to begin with + // ditto for early init vals clazz.info.decls.unlink(member) } - else if (member.isMethod) publicizeTraitMethod(member) + } debuglog("new defs of " + clazz + " = " + clazz.info.decls) } @@ -393,7 +352,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { for (mc <- clazz.mixinClasses ; if mc.isTrait) { // @SEAN: adding trait tracking so we don't have to recompile transitive closures unit.depends += mc - addLateInterfaceMembers(mc) + publicizeTraitMethods(mc) mixinTraitMembers(mc) mixinTraitForwarders(mc) } @@ -493,7 +452,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { if (!currentOwner.isTrait && !isPrimitiveValueClass(currentOwner)) addMixedinMembers(currentOwner, unit) else if (currentOwner.isTrait) - addLateInterfaceMembers(currentOwner) + publicizeTraitMethods(currentOwner) tree diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index a1bec13999b6..49d892e04f7c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -293,11 +293,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT /* * A trait which extends a class and accesses a protected member * of that class cannot implement the necessary accessor method - * because its implementation is in an implementation class (e.g. - * Foo$class) which inherits nothing, and jvm access restrictions - * require the call site to be in an actual subclass. So non-trait - * classes inspect their ancestors for any such situations and - * generate the accessors. See SI-2296. + * because jvm access restrictions require the call site to be + * in an actual subclass, and an interface cannot extenda class. + * So, non-trait classes inspect their ancestors for any such situations + * and generate the accessors. See SI-2296. + * + * TODO: anything we can improve here now that a trait compiles 1:1 to an interface? */ // FIXME - this should be unified with needsProtectedAccessor, but some // subtlety which presently eludes me is foiling my attempts. From 16e7cf335bf9fbd7f68b043b9d670336c6897809 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 7 Jul 2016 16:55:36 -0700 Subject: [PATCH 0216/2793] Align double definition check with spec Remove weird special cases for private-local fields and parameter accessor (fields). One change with the new trait val encoding: ``` scala> trait T { private[this] var x: String = "1" ; def x(): Int = 1 } :11: error: method x is defined twice; the conflicting variable x was defined at line 11:37 trait T { private[this] var x: String = "1" ; def x(): Int = 1 } ^ ``` Whereas: ``` scala> class T { private[this] var x: String = "1" ; def x(): Int = 1 } defined class T ``` Before, both the `class` and `trait` definition were accepted. (Because there is no accessor for a private[this] val/var, and a MethodType does not match the type of a value.) (Dotty accepts neither the class or the trait definition.) --- .../scala/tools/nsc/typechecker/Typers.scala | 61 +++++++++++++------ 1 file changed, 41 insertions(+), 20 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2bbf8ed74e3a..efb0830204f4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2208,6 +2208,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedDefDef(ddef: DefDef): DefDef = { + // an accessor's type completer may mutate a type inside `ddef` (`== context.unit.synthetics(ddef.symbol)`) + // concretely: it sets the setter's parameter type or the getter's return type (when derived from a valdef with empty tpt) val meth = ddef.symbol.initialize reenterTypeParams(ddef.tparams) @@ -3038,13 +3040,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper result } - /* 'accessor' and 'accessed' are so similar it becomes very difficult to - * follow the logic, so I renamed one to something distinct. - */ + // TODO: adapt to new trait field encoding, figure out why this exaemption is made + // 'accessor' and 'accessed' are so similar it becomes very difficult to + //follow the logic, so I renamed one to something distinct. def accesses(looker: Symbol, accessed: Symbol) = accessed.isLocalToThis && ( - (accessed.isParamAccessor) - || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate) - ) + (accessed.isParamAccessor) + || (looker.hasAccessorFlag && !accessed.hasAccessorFlag && accessed.isPrivate) + ) def checkNoDoubleDefs: Unit = { val scope = if (inBlock) context.scope else context.owner.info.decls @@ -3052,20 +3054,39 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper while ((e ne null) && e.owner == scope) { var e1 = scope.lookupNextEntry(e) while ((e1 ne null) && e1.owner == scope) { - if (!accesses(e.sym, e1.sym) && !accesses(e1.sym, e.sym) && - (e.sym.isType || inBlock || (e.sym.tpe matches e1.sym.tpe))) - // default getters are defined twice when multiple overloads have defaults. an - // error for this is issued in RefChecks.checkDefaultsInOverloaded - if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefault && - !e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) { - log("Double definition detected:\n " + - ((e.sym.getClass, e.sym.info, e.sym.ownerChain)) + "\n " + - ((e1.sym.getClass, e1.sym.info, e1.sym.ownerChain))) - - DefDefinedTwiceError(e.sym, e1.sym) - scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779 - } - e1 = scope.lookupNextEntry(e1) + val sym = e.sym + val sym1 = e1.sym + + /** From the spec (refchecks checks other conditions regarding erasing to the same type and default arguments): + * + * A block expression [... its] statement sequence may not contain two definitions or + * declarations that bind the same name --> `inBlock` + * + * It is an error if a template directly defines two matching members. + * + * A member definition $M$ _matches_ a member definition $M'$, if $M$ and $M'$ bind the same name, + * and one of following holds: + * 1. Neither $M$ nor $M'$ is a method definition. + * 2. $M$ and $M'$ define both monomorphic methods with equivalent argument types. + * 3. $M$ defines a parameterless method and $M'$ defines a method with an empty parameter list `()` or _vice versa_. + * 4. $M$ and $M'$ define both polymorphic methods with equal number of argument types $\overline T$, $\overline T'$ + * and equal numbers of type parameters $\overline t$, $\overline t'$, say, + * and $\overline T' = [\overline t'/\overline t]\overline T$. + */ + if (!(accesses(sym, sym1) || accesses(sym1, sym)) // TODO: does this purely defer errors until later? + && (inBlock || !(sym.isMethod || sym1.isMethod) || (sym.tpe matches sym1.tpe)) + // default getters are defined twice when multiple overloads have defaults. + // The error for this is deferred until RefChecks.checkDefaultsInOverloaded + && (!sym.isErroneous && !sym1.isErroneous && !sym.hasDefault && + !sym.hasAnnotation(BridgeClass) && !sym1.hasAnnotation(BridgeClass))) { + log("Double definition detected:\n " + + ((sym.getClass, sym.info, sym.ownerChain)) + "\n " + + ((sym1.getClass, sym1.info, sym1.ownerChain))) + + DefDefinedTwiceError(sym, sym1) + scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779 + } + e1 = scope.lookupNextEntry(e1) } e = e.next } From 9b59f5f9530d54c917479c6bf44aa3007ba0a2df Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 6 Jul 2016 16:11:32 -0700 Subject: [PATCH 0217/2793] Allow 'overriding' deferred var Discovered by scala-js's test suite. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/neg/trait_fields_var_override.check | 5 +++++ test/files/neg/trait_fields_var_override.scala | 2 ++ test/files/pos/trait_fields_var_override_deferred.scala | 2 ++ 4 files changed, 10 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/trait_fields_var_override.check create mode 100644 test/files/neg/trait_fields_var_override.scala create mode 100644 test/files/pos/trait_fields_var_override_deferred.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 7021e12f1ad5..674e0051b473 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -433,7 +433,7 @@ abstract class RefChecks extends Transform { } else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) { overrideError("needs `abstract override' modifiers") } - else if (member.isAnyOverride && (other hasFlag ACCESSOR) && !(other hasFlag STABLE)) { + else if (member.isAnyOverride && (other hasFlag ACCESSOR) && !(other hasFlag STABLE | DEFERRED)) { // The check above used to look at `field` == `other.accessed`, ensuring field.isVariable && !field.isLazy, // which I think is identical to the more direct `!(other hasFlag STABLE)` (given that `other` is a method). // Also, we're moving away from (looking at) underlying fields (vals in traits no longer have them, to begin with) diff --git a/test/files/neg/trait_fields_var_override.check b/test/files/neg/trait_fields_var_override.check new file mode 100644 index 000000000000..7245c78b0913 --- /dev/null +++ b/test/files/neg/trait_fields_var_override.check @@ -0,0 +1,5 @@ +trait_fields_var_override.scala:2: error: overriding variable end in trait SizeChangeEvent of type Int; + variable end cannot override a mutable variable +class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent + ^ +one error found diff --git a/test/files/neg/trait_fields_var_override.scala b/test/files/neg/trait_fields_var_override.scala new file mode 100644 index 000000000000..f61ba09eec24 --- /dev/null +++ b/test/files/neg/trait_fields_var_override.scala @@ -0,0 +1,2 @@ +trait SizeChangeEvent { protected var end: Int = 1 } +class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent diff --git a/test/files/pos/trait_fields_var_override_deferred.scala b/test/files/pos/trait_fields_var_override_deferred.scala new file mode 100644 index 000000000000..0205326506e9 --- /dev/null +++ b/test/files/pos/trait_fields_var_override_deferred.scala @@ -0,0 +1,2 @@ +trait SizeChangeEvent { protected var end: Int } +class BackedUpListIterator[E](override protected var end: Int) extends SizeChangeEvent From 4555681544ec40c5b787b9708266fcbdcc2ff62b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 20 Jul 2016 13:47:43 -0700 Subject: [PATCH 0218/2793] Mixed in getter needs NullaryMethodType too Clone at uncurry to preserve it in its info history. Discovered by the scala-js test suite. --- src/compiler/scala/tools/nsc/transform/Fields.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 1900fcdc16e8..6e064e2a24a5 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -341,7 +341,9 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor // if we don't cloneInfo, method argument symbols are shared between trait and subclasses --> lambalift proxy crash // TODO: use derive symbol variant? // println(s"cloning accessor $member to $clazz") - clonedAccessor setInfo ((clazz.thisType memberType member) cloneInfo clonedAccessor) // accessor.info.cloneInfo(clonedAccessor).asSeenFrom(clazz.thisType, accessor.owner) + // start at uncurry so that we preserve that part of the history where an accessor has a NullaryMethodType + enteringUncurry { clonedAccessor setInfo ((clazz.thisType memberType member) cloneInfo clonedAccessor) } + clonedAccessor } if (member hasFlag MODULE) { From 3540ffc1fb81eef75aeff41a0ba9142b1cce8a53 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sun, 17 Jul 2016 22:04:46 -0700 Subject: [PATCH 0219/2793] Admit @volatile on accessor in trait There's no other place to squirrel away the annotation until we create a field in a subclass. The test documents the idea, but does not capture the regression seen in the wild, as explained in a comment. --- .../scala/tools/nsc/typechecker/Typers.scala | 4 +++- test/files/pos/trait_fields_volatile.scala | 13 +++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/trait_fields_volatile.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index efb0830204f4..1fe8438b5674 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2008,7 +2008,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt)) checkNonCyclic(vdef, tpt1) - if (sym.hasAnnotation(definitions.VolatileAttr) && !sym.isMutable) + // allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to + // the field that's mixed into a subclass + if (sym.hasAnnotation(definitions.VolatileAttr) && !((sym hasFlag MUTABLE) || (sym hasFlag ACCESSOR) && sym.owner.isTrait)) VolatileValueError(vdef) val rhs1 = diff --git a/test/files/pos/trait_fields_volatile.scala b/test/files/pos/trait_fields_volatile.scala new file mode 100644 index 000000000000..030b24f187fe --- /dev/null +++ b/test/files/pos/trait_fields_volatile.scala @@ -0,0 +1,13 @@ +// This test illustrates the intent of what should work (but didn't for a while during the fields refactoring), +// but it does not actually defend against the regression seen in twitter-util's Scheduler, which I cannot reproduce +// outside the project. The whole project consistently fails to build before, and compiles after the commit +// that includes this test, but this single test file (as well as Scheduler.scala with external dependencies removed) +// compiles both before and after.... +// (https://github.com/twitter/util/blob/6398a56923/util-core/src/main/scala/com/twitter/concurrent/Scheduler.scala#L260-L265) +// There's also a run test that checks that the field in C is actually volatile. +trait Vola { + @volatile private[this] var _vola = "tile" + @volatile var vola = "tile" +} + +class C extends Vola From fdc94676928cd9177acfcca8eb7d669e1f4eac48 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 8 Jul 2016 17:24:23 -0700 Subject: [PATCH 0220/2793] Drive accessor synthesis from info transformer Derive/filter/propagate annotations in info transformer, don't rely on having type checked the derived trees in order to see the annotations. Use synthetics mechanism for bean accessors -- the others will soon follow. Propagate inferred tpt from valdef to accessors by setting type in right spot of synthetic tree during the info completer. No need to add trees in derivedTrees, and get rid of some overfactoring in method synthesis, now that we have joined symbol and tree creation. Preserve symbol order because tests are sensitive to it. Drop warning on potentially discarded annotations, I don't think this warrants a warning. Motivated by breaking the scala-js compiler, which relied on annotations appearing when trees are type checked. Now that ordering constraint is gone in the new encoding, we may as well finally fix annotation assignment. --- .../nsc/typechecker/MethodSynthesis.scala | 541 ++++++------------ .../scala/tools/nsc/typechecker/Namers.scala | 259 ++++++--- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../scala/reflect/NameTransformer.scala | 1 + .../reflect/internal/AnnotationInfos.scala | 9 - .../scala/reflect/internal/StdNames.scala | 3 +- .../quasiquotes/TypecheckedProps.scala | 2 +- .../scala/reflect/internal/PrintersTest.scala | 6 +- test/{files => pending}/neg/t6375.check | 0 test/{files => pending}/neg/t6375.flags | 0 test/{files => pending}/neg/t6375.scala | 0 11 files changed, 358 insertions(+), 465 deletions(-) rename test/{files => pending}/neg/t6375.check (100%) rename test/{files => pending}/neg/t6375.flags (100%) rename test/{files => pending}/neg/t6375.scala (100%) diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 0f79bb60ed34..c036a2a9b830 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -116,78 +116,99 @@ trait MethodSynthesis { import NamerErrorGen._ - def enterImplicitWrapper(tree: ClassDef) { - ImplicitClassWrapper(tree).createAndEnterSymbol() + def enterImplicitWrapper(tree: ClassDef): Unit = { + enterSyntheticSym(ImplicitClassWrapper(tree).derivedTree) } - // TODO: see if we can link symbol creation & tree derivation by sharing the Field/Getter/Setter factories - // maybe we can at least reuse some variant of standardAccessors? + // trees are later created by addDerivedTrees (common logic is encapsulated in field/standardAccessors/beanAccessors) def enterGetterSetter(tree: ValDef): Unit = { - tree.symbol = - if (tree.mods.isLazy) { - val lazyValGetter = LazyValGetter(tree).createAndEnterSymbol() - enterLazyVal(tree, lazyValGetter) - } else { - val getter = Getter(tree) - val getterSym = getter.createAndEnterSymbol() - - // Create the setter if necessary. - if (getter.needsSetter) Setter(tree).createAndEnterSymbol() - - // If the getter's abstract, the tree gets the getter's symbol, - // otherwise, create a field (we have to assume the getter requires storage for now). - // NOTE: we cannot look at symbol info, since we're in the process of deriving them - // (luckily, they only matter for lazy vals, which we've ruled out in this else branch, - // and `doNotDeriveField` will skip them if `!mods.isLazy`) - if (Field.noFieldFor(tree)) getterSym setPos tree.pos // TODO: why do setPos? `createAndEnterSymbol` already gave `getterSym` the position `tree.pos.focus` - else enterStrictVal(tree) - } + val getter = Getter(tree) + val getterSym = getter.createSym + val setterSym = if (getter.needsSetter) Setter(tree).createSym else NoSymbol - enterBeans(tree) - } + // a lazy field is linked to its lazy accessor (TODO: can we do the same for field -> getter -> setter) + val fieldSym = if (Field.noFieldFor(tree)) NoSymbol else Field(tree).createSym(getterSym) - import AnnotationInfo.{mkFilter => annotationFilter} + // only one symbol can have `tree.pos`, the others must focus their position + // normally the field gets the range position, but if there is none, give it to the getter + tree.symbol = fieldSym orElse (getterSym setPos tree.pos) + + val namer = if (fieldSym != NoSymbol) namerOf(fieldSym) else namerOf(getterSym) + + // There's no reliable way to detect all kinds of setters from flags or name!!! + // A BeanSetter's name does not end in `_=` -- it does begin with "set", but so could the getter + // for a regular Scala field... TODO: can we add a flag to distinguish getter/setter accessors? + val getterCompleter = namer.accessorTypeCompleter(tree, isSetter = false) + val setterCompleter = namer.accessorTypeCompleter(tree, isSetter = true) - /** This is called for those ValDefs which addDerivedTrees ignores, but - * which might have a warnable annotation situation. - */ - private def warnForDroppedAnnotations(tree: Tree) { - val annotations = tree.symbol.initialize.annotations - val targetClass = defaultAnnotationTarget(tree) - val retained = annotations filter annotationFilter(targetClass, defaultRetention = true) + getterSym setInfo getterCompleter + setterSym andAlso (_ setInfo setterCompleter) + fieldSym andAlso (_ setInfo namer.valTypeCompleter(tree)) - annotations filterNot (retained contains _) foreach (ann => issueAnnotationWarning(tree, ann, targetClass)) + enterInScope(getterSym) + setterSym andAlso (enterInScope(_)) + fieldSym andAlso (enterInScope(_)) + + deriveBeanAccessors(tree, namer) } - private def issueAnnotationWarning(tree: Tree, ann: AnnotationInfo, defaultTarget: Symbol) { - global.reporter.warning(ann.pos, - s"no valid targets for annotation on ${tree.symbol} - it is discarded unused. " + - s"You may specify targets with meta-annotations, e.g. @($ann @${defaultTarget.name})") + + private def deriveBeanAccessors(tree: ValDef, namer: Namer): Unit = { + // TODO: can we look at the annotations symbols? (name-based introduced in 8cc477f8b6, see neg/t3403) + val hasBeanProperty = tree.mods hasAnnotationNamed tpnme.BeanPropertyAnnot + val hasBoolBP = tree.mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot + + if (hasBeanProperty || hasBoolBP) { + if (!tree.name.charAt(0).isLetter) BeanPropertyAnnotationFieldWithoutLetterError(tree) + // avoids name clashes with private fields in traits + else if (tree.mods.isPrivate) BeanPropertyAnnotationPrivateFieldError(tree) + + val derivedPos = tree.pos.focus + val missingTpt = tree.tpt.isEmpty + + def deriveBeanAccessor(prefix: String): Symbol = { + val isSetter = prefix == "set" + val name = newTermName(prefix + tree.name.toString.capitalize) + val setterParam = nme.syntheticParamName(1) + + // note: tree.tpt may be EmptyTree, which will be a problem when use as the tpt of a parameter + // the completer will patch this up (we can't do this now without completing the field) + val tptToPatch = if (missingTpt) TypeTree() else tree.tpt.duplicate + + val (vparams, tpt) = + if (isSetter) (List(ValDef(Modifiers(PARAM | SYNTHETIC), setterParam, tptToPatch, EmptyTree)), TypeTree(UnitTpe)) + else (Nil, tptToPatch) + + val rhs = + if (tree.mods.isDeferred) EmptyTree + else if (isSetter) Apply(Ident(tree.name.setterName), List(Ident(setterParam))) + else Select(This(owner), tree.name) + + val sym = createMethod(tree, name, derivedPos, tree.mods.flags & BeanPropertyFlags) + context.unit.synthetics(sym) = newDefDef(sym, rhs)(tparams = Nil, vparamss = List(vparams), tpt = tpt) + sym + } + + val getterCompleter = namer.beanAccessorTypeCompleter(tree, missingTpt, isSetter = false) + enterInScope(deriveBeanAccessor(if (hasBeanProperty) "get" else "is") setInfo getterCompleter) + + if (tree.mods.isMutable) { + val setterCompleter = namer.beanAccessorTypeCompleter(tree, missingTpt, isSetter = true) + enterInScope(deriveBeanAccessor("set") setInfo setterCompleter) + } + } } + + import AnnotationInfo.{mkFilter => annotationFilter} def addDerivedTrees(typer: Typer, stat: Tree): List[Tree] = stat match { case vd @ ValDef(mods, name, tpt, rhs) if deriveAccessors(vd) && !vd.symbol.isModuleVar => - // If we don't save the annotations, they seem to wander off. - val annotations = stat.symbol.initialize.annotations - val trees = ( - (field(vd) ::: standardAccessors(vd) ::: beanAccessors(vd)) - map (acc => atPos(vd.pos.focus)(acc derive annotations)) - filterNot (_ eq EmptyTree) - ) - // Verify each annotation landed safely somewhere, else warn. - // Filtering when isParamAccessor is a necessary simplification - // because there's a bunch of unwritten annotation code involving - // the propagation of annotations - constructor parameter annotations - // may need to make their way to parameters of the constructor as - // well as fields of the class, etc. - if (!mods.isParamAccessor) annotations foreach (ann => - if (!trees.exists(_.symbol hasAnnotation ann.symbol)) - issueAnnotationWarning(vd, ann, GetterTargetClass) - ) - - trees - case vd: ValDef => - warnForDroppedAnnotations(vd) - vd :: Nil + stat.symbol.initialize // needed! + + val getter = Getter(vd) + getter.validate() + val accessors = getter :: (if (getter.needsSetter) Setter(vd) :: Nil else Nil) + (Field(vd) :: accessors).map(_.derivedTree).filter(_ ne EmptyTree) + case cd @ ClassDef(mods, _, _, _) if mods.isImplicit => val annotations = stat.symbol.initialize.annotations // TODO: need to shuffle annotations between wrapper and class. @@ -209,194 +230,86 @@ trait MethodSynthesis { stat :: Nil } - def standardAccessors(vd: ValDef): List[DerivedFromValDef] = - if (vd.mods.isLazy) List(LazyValGetter(vd)) - else { - val getter = Getter(vd) - if (getter.needsSetter) List(getter, Setter(vd)) - else List(getter) - } - - def beanAccessors(vd: ValDef): List[DerivedFromValDef] = { - val setter = if (vd.mods.isMutable) List(BeanSetter(vd)) else Nil - if (vd.symbol hasAnnotation BeanPropertyAttr) - BeanGetter(vd) :: setter - else if (vd.symbol hasAnnotation BooleanBeanPropertyAttr) - BooleanBeanGetter(vd) :: setter - else Nil - } - def field(vd: ValDef): List[Field] = if (Field.noFieldFor(vd)) Nil else List(Field(vd)) - - /** This trait assembles what's needed for synthesizing derived methods. - * Important: Typically, instances of this trait are created TWICE for each derived - * symbol; once form Namers in an enter method, and once from Typers in addDerivedTrees. - * So it's important that creating an instance of Derived does not have a side effect, - * or if it has a side effect, control that it is done only once. - */ sealed trait Derived { - - /** The tree from which we are deriving a synthetic member. Typically, that's - * given as an argument of the instance. */ - def tree: Tree - - /** The name of the method */ - def name: TermName - - /** The flags that are retained from the original symbol */ - def flagsMask: Long - - /** The flags that the derived symbol has in addition to those retained from - * the original symbol*/ - def flagsExtra: Long - - /** type completer for the synthetic member. - */ - def completer(sym: Symbol): Type - /** The derived symbol. It is assumed that this symbol already exists and has been - * entered in the parent scope when derivedSym is called */ + * entered in the parent scope when derivedSym is called + */ def derivedSym: Symbol /** The definition tree of the derived symbol. */ def derivedTree: Tree } - sealed trait DerivedFromMemberDef extends Derived { - def tree: MemberDef - def enclClass: Symbol - - // Final methods to make the rest easier to reason about. - final def mods = tree.mods - final def basisSym = tree.symbol - final def derivedMods = mods & flagsMask | flagsExtra - } - - sealed trait DerivedFromClassDef extends DerivedFromMemberDef { - def tree: ClassDef - final def enclClass = basisSym.owner.enclClass - } - - sealed trait DerivedFromValDef extends DerivedFromMemberDef { - def tree: ValDef - final def enclClass = basisSym.enclClass - - - // There's no reliable way to detect all kinds of setters from flags or name!!! - // A BeanSetter's name does not end in `_=` -- it does begin with "set", but so could the getter - // for a regular Scala field... TODO: can we add a flag to distinguish getter/setter accessors? - final def completer(sym: Symbol) = namerOf(sym).accessorTypeCompleter(tree, this.isInstanceOf[DerivedSetter]) - final def fieldSelection = Select(This(enclClass), basisSym) - - def derivedSym: Symbol = tree.symbol - def derivedTree: Tree = EmptyTree - - def isDeferred = mods.isDeferred - def validate() { } - def createAndEnterSymbol(): MethodSymbol = { - val sym = owner.newMethod(name, tree.pos.focus, derivedMods.flags) - setPrivateWithin(tree, sym) - enterInScope(sym) - sym setInfo completer(sym) - } - private def logDerived(result: Tree): Tree = { - debuglog("[+derived] " + ojoin(mods.flagString, basisSym.accurateKindString, basisSym.getterName.decode) - + " (" + derivedSym + ")\n " + result) + /** A synthetic method which performs the implicit conversion implied by + * the declaration of an implicit class. + */ + case class ImplicitClassWrapper(tree: ClassDef) extends Derived { + def derivedSym = { + val enclClass = tree.symbol.owner.enclClass + // Only methods will do! Don't want to pick up any stray + // companion objects of the same name. + val result = enclClass.info decl derivedName filter (x => x.isMethod && x.isSynthetic) + if (result == NoSymbol || result.isOverloaded) + context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $derivedName in $enclClass / ${enclClass.info.decls}") result } - final def derive(initial: List[AnnotationInfo]): Tree = { - validate() - - // see scala.annotation.meta's package class for more info - // Annotations on ValDefs can be targeted towards the following: field, getter, setter, beanGetter, beanSetter, param. - // The defaults are: - // - (`val`-, `var`- or plain) constructor parameter annotations end up on the parameter, not on any other entity. - // - val/var member annotations solely end up on the underlying field, except in traits (@since 2.12), - // where there is no field, and the getter thus holds annotations targetting both getter & field. - // As soon as there is a field/getter (in subclasses mixing in the trait), we triage the annotations. - // - // TODO: these defaults can be surprising for annotations not meant for accessors/fields -- should we revisit? - // (In order to have `@foo val X` result in the X getter being annotated with `@foo`, foo needs to be meta-annotated with @getter) - val annotFilter: AnnotationInfo => Boolean = this match { - case _: Param => annotationFilter(ParamTargetClass, defaultRetention = true) - // By default annotations go to the field, except if the field is generated for a class parameter (PARAMACCESSOR). - case _: Field => annotationFilter(FieldTargetClass, defaultRetention = !mods.isParamAccessor) - case _: BaseGetter if owner.isTrait => annotationFilter(List(FieldTargetClass, GetterTargetClass), defaultRetention = true) - case _: BaseGetter => annotationFilter(GetterTargetClass, defaultRetention = false) - case _: Setter => annotationFilter(SetterTargetClass, defaultRetention = false) - case _: BeanSetter => annotationFilter(BeanSetterTargetClass, defaultRetention = false) - // TODO do bean getters need special treatment to collect field-targeting annotations in traits? - case _: AnyBeanGetter => annotationFilter(BeanGetterTargetClass, defaultRetention = false) - } - - // The annotations amongst those found on the original symbol which - // should be propagated to this kind of accessor. - derivedSym setAnnotations (initial filter annotFilter) + def derivedTree = factoryMeth(derivedMods, derivedName, tree) - if (derivedSym.isSetter && owner.isTrait && !isDeferred) - derivedSym addAnnotation TraitSetterAnnotationClass - - logDerived(derivedTree) - } + def derivedName = tree.name.toTermName + def derivedMods = tree.mods & AccessFlags | METHOD | IMPLICIT | SYNTHETIC } - sealed trait DerivedGetter extends DerivedFromValDef { - def needsSetter = mods.isMutable + trait DerivedAccessor extends Derived { + def tree: ValDef + def derivedName: TermName + def derivedFlags: Long + + def derivedPos = tree.pos.focus + def createSym = createMethod(tree, derivedName, derivedPos, derivedFlags) } - sealed trait DerivedSetter extends DerivedFromValDef { - protected def setterParam = derivedSym.paramss match { - case (p :: Nil) :: _ => p - case _ => NoSymbol - } - protected def setterRhs = { - assert(!derivedSym.isOverloaded, s"Unexpected overloaded setter $derivedSym for $basisSym in $enclClass") - if (Field.noFieldFor(tree) || derivedSym.isOverloaded) EmptyTree - else Assign(fieldSelection, Ident(setterParam)) - } + case class Getter(tree: ValDef) extends DerivedAccessor { + def derivedName = tree.name - private def setterDef = DefDef(derivedSym, setterRhs) - override def derivedTree: Tree = if (setterParam == NoSymbol) EmptyTree else setterDef - } + def derivedSym = + if (tree.mods.isLazy) tree.symbol.lazyAccessor + else if (Field.noFieldFor(tree)) tree.symbol + else tree.symbol.getterIn(tree.symbol.enclClass) - /** A synthetic method which performs the implicit conversion implied by - * the declaration of an implicit class. - */ - case class ImplicitClassWrapper(tree: ClassDef) extends DerivedFromClassDef { - def completer(sym: Symbol): Type = ??? // not needed - def createAndEnterSymbol(): Symbol = enterSyntheticSym(derivedTree) - def derivedSym: Symbol = { - // Only methods will do! Don't want to pick up any stray - // companion objects of the same name. - val result = enclClass.info decl name filter (x => x.isMethod && x.isSynthetic) - if (result == NoSymbol || result.isOverloaded) - context.error(tree.pos, s"Internal error: Unable to find the synthetic factory method corresponding to implicit class $name in $enclClass / ${enclClass.info.decls}") - result - } - def derivedTree: DefDef = factoryMeth(derivedMods, name, tree) - def flagsExtra: Long = METHOD | IMPLICIT | SYNTHETIC - def flagsMask: Long = AccessFlags - def name: TermName = tree.name.toTermName - } + def derivedFlags = tree.mods.flags & GetterFlags | ACCESSOR.toLong | ( if (needsSetter) 0 else STABLE ) - sealed abstract class BaseGetter(tree: ValDef) extends DerivedGetter { - def name = tree.name - def flagsMask = GetterFlags - def flagsExtra = ACCESSOR.toLong | ( if (tree.mods.isMutable) 0 else STABLE ) + def needsSetter = tree.mods.isMutable // implies !lazy - override def validate() { - assert(derivedSym != NoSymbol, tree) - if (derivedSym.isOverloaded) - GetterDefinedTwiceError(derivedSym) + override def derivedTree = + if (tree.mods.isLazy) deriveLazyAccessor + else newDefDef(derivedSym, if (Field.noFieldFor(tree)) tree.rhs else Select(This(tree.symbol.enclClass), tree.symbol))(tpt = derivedTpt) + + /** Implements lazy value accessors: + * - for lazy values of type Unit and all lazy fields inside traits, + * the rhs is the initializer itself, because we'll just "compute" the result on every access + * ("computing" unit / constant type is free -- the side-effect is still only run once, using the init bitmap) + * - for all other lazy values z the accessor is a block of this form: + * { z = ; z } where z can be an identifier or a field. + */ + private def deriveLazyAccessor: DefDef = { + val ValDef(_, _, tpt0, rhs0) = tree + val rhs1 = context.unit.transformed.getOrElse(rhs0, rhs0) + val body = + if (tree.symbol.owner.isTrait || Field.noFieldFor(tree)) rhs1 // TODO move tree.symbol.owner.isTrait into noFieldFor + else gen.mkAssignAndReturn(tree.symbol, rhs1) - super.validate() + derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possibly still have NoPosition + val ddefRes = DefDef(derivedSym, new ChangeOwnerTraverser(tree.symbol, derivedSym)(body)) + // ValDef will have its position focused whereas DefDef will have original correct rangepos + // ideally positions would be correct at the creation time but lazy vals are really a special case + // here so for the sake of keeping api clean we fix positions manually in LazyValGetter + ddefRes.tpt.setPos(tpt0.pos) + tpt0.setPos(tpt0.pos.focus) + ddefRes } - } - case class Getter(tree: ValDef) extends BaseGetter(tree) { - override def derivedSym = if (Field.noFieldFor(tree)) basisSym else basisSym.getterIn(enclClass) - private def derivedRhs = if (Field.noFieldFor(tree)) tree.rhs else fieldSelection // TODO: more principled approach -- this is a bit bizarre private def derivedTpt = { @@ -413,61 +326,35 @@ trait MethodSynthesis { // Range position errors ensue if we don't duplicate this in some // circumstances (at least: concrete vals with existential types.) case _: ExistentialType => TypeTree() setOriginal (tree.tpt.duplicate setPos tree.tpt.pos.focus) - case _ if isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field + case _ if tree.mods.isDeferred => TypeTree() setOriginal tree.tpt // keep type tree of original abstract field case _ => TypeTree(getterTp) } tpt setPos tree.tpt.pos.focus } - override def derivedTree: DefDef = newDefDef(derivedSym, derivedRhs)(tpt = derivedTpt) - } - /** Implements lazy value accessors: - * - for lazy values of type Unit and all lazy fields inside traits, - * the rhs is the initializer itself, because we'll just "compute" the result on every access - * ("computing" unit / constant type is free -- the side-effect is still only run once, using the init bitmap) - * - for all other lazy values z the accessor is a block of this form: - * { z = ; z } where z can be an identifier or a field. - */ - case class LazyValGetter(tree: ValDef) extends BaseGetter(tree) { - class ChangeOwnerAndModuleClassTraverser(oldowner: Symbol, newowner: Symbol) - extends ChangeOwnerTraverser(oldowner, newowner) { - - override def traverse(tree: Tree) { - tree match { - case _: DefTree => change(tree.symbol.moduleClass) - case _ => - } - super.traverse(tree) - } + def validate() = { + assert(derivedSym != NoSymbol, tree) + if (derivedSym.isOverloaded) + GetterDefinedTwiceError(derivedSym) } - // todo: in future this should be enabled but now other phases still depend on the flag for various reasons - //override def flagsMask = (super.flagsMask & ~LAZY) - override def derivedSym = basisSym.lazyAccessor - override def derivedTree: DefDef = { - val ValDef(_, _, tpt0, rhs0) = tree - val rhs1 = context.unit.transformed.getOrElse(rhs0, rhs0) - val body = - if (tree.symbol.owner.isTrait || Field.noFieldFor(tree)) rhs1 // TODO move tree.symbol.owner.isTrait into noFieldFor - else gen.mkAssignAndReturn(basisSym, rhs1) - - derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possibly still have NoPosition - val ddefRes = DefDef(derivedSym, new ChangeOwnerAndModuleClassTraverser(basisSym, derivedSym)(body)) - // ValDef will have its position focused whereas DefDef will have original correct rangepos - // ideally positions would be correct at the creation time but lazy vals are really a special case - // here so for the sake of keeping api clean we fix positions manually in LazyValGetter - ddefRes.tpt.setPos(tpt0.pos) - tpt0.setPos(tpt0.pos.focus) - ddefRes - } } - case class Setter(tree: ValDef) extends DerivedSetter { - def name = tree.setterName - def flagsMask = SetterFlags - def flagsExtra = ACCESSOR - // TODO: double check logic behind need for name expansion in context of new fields phase - override def derivedSym = basisSym.setterIn(enclClass) + case class Setter(tree: ValDef) extends DerivedAccessor { + def derivedName = tree.setterName + def derivedSym = tree.symbol.setterIn(tree.symbol.enclClass) + def derivedFlags = tree.mods.flags & SetterFlags | ACCESSOR + def derivedTree = + derivedSym.paramss match { + case (setterParam :: Nil) :: _ => + // assert(!derivedSym.isOverloaded, s"Unexpected overloaded setter $derivedSym for ${tree.symbol} in ${tree.symbol.enclClass}") + val rhs = + if (Field.noFieldFor(tree) || derivedSym.isOverloaded) EmptyTree + else Assign(Select(This(tree.symbol.enclClass), tree.symbol), Ident(setterParam)) + + DefDef(derivedSym, rhs) + case _ => EmptyTree + } } object Field { @@ -495,102 +382,42 @@ trait MethodSynthesis { private def traitFieldFor(vd: ValDef): Boolean = vd.mods.hasFlag(PRESUPER | LAZY) } - case class Field(tree: ValDef) extends DerivedFromValDef { - def name = tree.localName - def flagsMask = FieldFlags - def flagsExtra = PrivateLocal + case class Field(tree: ValDef) extends Derived { + private val isLazy = tree.mods.isLazy - // TODO: override def createAndEnterSymbol (currently never called on Field) - // and do `enterStrictVal(tree)`, so that enterGetterSetter and addDerivedTrees can share some logic... + // If the owner is not a class, this is a lazy val from a method, + // with no associated field. It has an accessor with $lzy appended to its name and + // its flags are set differently. The implicit flag is reset because otherwise + // a local implicit "lazy val x" will create an ambiguity with itself + // via "x$lzy" as can be seen in test #3927. + private val localLazyVal = isLazy && !owner.isClass + private val nameSuffix = + if (!localLazyVal) reflect.NameTransformer.LOCAL_SUFFIX_STRING + else reflect.NameTransformer.LAZY_LOCAL_SUFFIX_STRING - // handle lazy val first for now (we emit a Field even though we probably shouldn't...) - override def derivedTree = - if (mods.isLazy) copyValDef(tree)(mods = mods | flagsExtra, name = this.name, rhs = EmptyTree).setPos(tree.pos.focus) - else if (Field.noFieldFor(tree)) EmptyTree - else copyValDef(tree)(mods = mods | flagsExtra, name = this.name) + def derivedName = tree.name.append(nameSuffix) - } - case class Param(tree: ValDef) extends DerivedFromValDef { - def name = tree.name - def flagsMask = -1L - def flagsExtra = 0L - override def derivedTree = EmptyTree - } - def validateParam(tree: ValDef) { - Param(tree).derive(tree.symbol.annotations) - } - - sealed abstract class BeanAccessor(bean: String) extends DerivedFromValDef { - val name = newTermName(bean + tree.name.toString.capitalize) - def flagsMask = BeanPropertyFlags - def flagsExtra = 0 - override def derivedSym = enclClass.info decl name - } - sealed trait AnyBeanGetter extends BeanAccessor with DerivedGetter { - override def validate() { - if (derivedSym == NoSymbol) { - // the namer decides whether to generate these symbols or not. at that point, we don't - // have symbolic information yet, so we only look for annotations named "BeanProperty". - BeanPropertyAnnotationLimitationError(tree) - } - super.validate() + def createSym(getter: MethodSymbol) = { + val sym = owner.newValue(derivedName, tree.pos, derivedMods.flags) + if (isLazy) sym setLazyAccessor getter + sym } - } - // This trait is mixed into BooleanBeanGetter and BeanGetter by beanAccessorsFromNames, but not by beanAccessors - trait NoSymbolBeanGetter extends AnyBeanGetter { - // Derives a tree without attempting to use the original tree's symbol. - override def derivedTree = { - atPos(tree.pos.focus) { - DefDef(derivedMods mapAnnotations (_ => Nil), name, Nil, ListOfNil, tree.tpt.duplicate, - if (isDeferred) EmptyTree else Select(This(owner), tree.name) - ) - } - } - override def createAndEnterSymbol(): MethodSymbol = enterSyntheticSym(derivedTree).asInstanceOf[MethodSymbol] - } + def derivedSym = tree.symbol - // NoSymbolBeanGetter synthesizes the getter's RHS (which defers to the regular setter) - // (not sure why, but there is one use site of the BeanGetters where NoSymbolBeanGetter is not mixed in) - // TODO: clean this up... - case class BooleanBeanGetter(tree: ValDef) extends BeanAccessor("is") with AnyBeanGetter - case class BeanGetter(tree: ValDef) extends BeanAccessor("get") with AnyBeanGetter + def derivedMods = + if (!localLazyVal) tree.mods & FieldFlags | PrivateLocal | (if (isLazy) MUTABLE else 0) + else (tree.mods | ARTIFACT | MUTABLE) & ~IMPLICIT - // the bean setter's RHS delegates to the setter - case class BeanSetter(tree: ValDef) extends BeanAccessor("set") with DerivedSetter { - override protected def setterRhs = Apply(Ident(tree.name.setterName), List(Ident(setterParam))) - } + // TODO: why is this different from the symbol!? + private def derivedModsForTree = tree.mods | PrivateLocal - // No Symbols available. - private def beanAccessorsFromNames(tree: ValDef) = { - val ValDef(mods, _, _, _) = tree - val hasBP = mods hasAnnotationNamed tpnme.BeanPropertyAnnot - val hasBoolBP = mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot - - if (hasBP || hasBoolBP) { - val getter = ( - if (hasBP) new BeanGetter(tree) with NoSymbolBeanGetter - else new BooleanBeanGetter(tree) with NoSymbolBeanGetter - ) - getter :: { - if (mods.isMutable) List(BeanSetter(tree)) else Nil - } - } - else Nil - } + def derivedTree = + if (Field.noFieldFor(tree)) EmptyTree + else if (isLazy) copyValDef(tree)(mods = derivedModsForTree, name = derivedName, rhs = EmptyTree).setPos(tree.pos.focus) + else copyValDef(tree)(mods = derivedModsForTree, name = derivedName) - protected def enterBeans(tree: ValDef) { - val ValDef(mods, name, _, _) = tree - val beans = beanAccessorsFromNames(tree) - if (beans.nonEmpty) { - if (!name.charAt(0).isLetter) - BeanPropertyAnnotationFieldWithoutLetterError(tree) - else if (mods.isPrivate) // avoids name clashes with private fields in traits - BeanPropertyAnnotationPrivateFieldError(tree) - - // Create and enter the symbols here, add the trees in finishGetterSetter. - beans foreach (_.createAndEnterSymbol()) - } } + } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 98dca1089c45..99c1b6991e75 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -300,7 +300,7 @@ trait Namers extends MethodSynthesis { def assignSymbol(tree: Tree): Symbol = logAssignSymbol(tree, tree match { case PackageDef(pid, _) => createPackageSymbol(tree.pos, pid) - case Import(_, _) => createImportSymbol(tree) + case imp: Import => createImportSymbol(imp) case mdef: MemberDef => createMemberSymbol(mdef, mdef.name, -1L) case _ => abort("Unexpected tree: " + tree) }) @@ -319,6 +319,12 @@ trait Namers extends MethodSynthesis { sym } + def createMethod(accessQual: MemberDef, name: TermName, pos: Position, flags: Long): MethodSymbol = { + val sym = owner.newMethod(name, pos, flags) + setPrivateWithin(accessQual, sym) + sym + } + private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = { if (isPastTyper) sym.name.toTermName match { case nme.IMPORT | nme.OUTER | nme.ANON_CLASS_NAME | nme.ANON_FUN_NAME | nme.CONSTRUCTOR => () @@ -355,11 +361,9 @@ trait Namers extends MethodSynthesis { else owner.newValue(name.toTermName, pos, flags) } } - def createFieldSymbol(tree: ValDef): TermSymbol = - owner.newValue(tree.localName, tree.pos, tree.mods.flags & FieldFlags | PrivateLocal) - def createImportSymbol(tree: Tree) = - NoSymbol.newImport(tree.pos) setInfo completerOf(tree) + def createImportSymbol(tree: Import) = + NoSymbol.newImport(tree.pos) setInfo (namerOf(tree.symbol) importTypeCompleter tree) /** All PackageClassInfoTypes come from here. */ def createPackageSymbol(pos: Position, pid: RefTree): Symbol = { @@ -632,7 +636,7 @@ trait Namers extends MethodSynthesis { } } - def completerOf(tree: Tree): TypeCompleter = { + def completerOf(tree: MemberDef): TypeCompleter = { val mono = namerOf(tree.symbol) monoTypeCompleter tree val tparams = treeInfo.typeParameters(tree) if (tparams.isEmpty) mono @@ -666,25 +670,6 @@ trait Namers extends MethodSynthesis { } } - def enterLazyVal(tree: ValDef, lazyAccessor: Symbol): TermSymbol = { - // If the owner is not a class, this is a lazy val from a method, - // with no associated field. It has an accessor with $lzy appended to its name and - // its flags are set differently. The implicit flag is reset because otherwise - // a local implicit "lazy val x" will create an ambiguity with itself - // via "x$lzy" as can be seen in test #3927. - val sym = ( - if (owner.isClass) createFieldSymbol(tree) - else owner.newValue(tree.name append nme.LAZY_LOCAL, tree.pos, (tree.mods.flags | ARTIFACT) & ~IMPLICIT) - ) - enterValSymbol(tree, sym setFlag MUTABLE setLazyAccessor lazyAccessor) - } - def enterStrictVal(tree: ValDef): TermSymbol = { - enterValSymbol(tree, createFieldSymbol(tree)) - } - def enterValSymbol(tree: ValDef, sym: TermSymbol): TermSymbol = { - enterInScope(sym) - sym setInfo namerOf(sym).monoTypeCompleter(tree) - } def enterPackage(tree: PackageDef) { val sym = assignSymbol(tree) newNamer(context.make(tree, sym.moduleClass, sym.info.decls)) enterSyms tree.stats @@ -771,7 +756,7 @@ trait Namers extends MethodSynthesis { NoSymbol } - def monoTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym => + def monoTypeCompleter(tree: MemberDef) = mkTypeCompleter(tree) { sym => // this early test is there to avoid infinite baseTypes when // adding setters and getters --> bug798 // It is a def in an attempt to provide some insulation against @@ -780,8 +765,9 @@ trait Namers extends MethodSynthesis { // on these flag checks so it can't hurt. def needsCycleCheck = sym.isNonClassType && !sym.isParameter && !sym.isExistential - // logDefinition(sym) { - val tp = typeSig(tree) + val annotations = annotSig(tree.mods.annotations) + + val tp = typeSig(tree, annotations) findCyclicalLowerBound(tp) andAlso { sym => if (needsCycleCheck) { @@ -792,42 +778,140 @@ trait Namers extends MethodSynthesis { sym.initialize } } - sym setInfo { - if (sym.isJavaDefined) RestrictJavaArraysMap(tp) - else tp - } + + sym.setInfo(if (!sym.isJavaDefined) tp else RestrictJavaArraysMap(tp)) + if (needsCycleCheck) { log(s"Needs cycle check: ${sym.debugLocationString}") if (!typer.checkNonCyclic(tree.pos, tp)) sym setInfo ErrorType } - //} validate(sym) } - def moduleClassTypeCompleter(tree: ModuleDef) = { - mkTypeCompleter(tree) { sym => - val moduleSymbol = tree.symbol - assert(moduleSymbol.moduleClass == sym, moduleSymbol.moduleClass) - moduleSymbol.info // sets moduleClass info as a side effect. - } + def moduleClassTypeCompleter(tree: ModuleDef) = mkTypeCompleter(tree) { sym => + val moduleSymbol = tree.symbol + assert(moduleSymbol.moduleClass == sym, moduleSymbol.moduleClass) + moduleSymbol.info // sets moduleClass info as a side effect. + } + + + def importTypeCompleter(imp: Import) = mkTypeCompleter(imp) { sym => + sym setInfo importSig(imp) + } + + import AnnotationInfo.{mkFilter => annotationFilter} + + def valTypeCompleter(tree: ValDef) = mkTypeCompleter(tree) { sym => + val annots = + if (tree.mods.annotations.isEmpty) Nil + else annotSig(tree.mods.annotations) filter annotationFilter(FieldTargetClass, !tree.mods.isParamAccessor) + + sym setInfo typeSig(tree, annots) + + validate(sym) } /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */ def accessorTypeCompleter(tree: ValDef, isSetter: Boolean) = mkTypeCompleter(tree) { sym => + // println(s"triaging for ${sym.debugFlagString} $sym from $valAnnots to $annots") + // typeSig calls valDefSig (because tree: ValDef) // sym is an accessor, while tree is the field (which may have the same symbol as the getter, or maybe it's the field) - val sig = accessorSigFromFieldTp(sym, isSetter, typeSig(tree)) + // TODO: can we make this work? typeSig is called on same tree (valdef) to complete info for field and all its accessors + // reuse work done in valTypeCompleter if we already computed the type signature of the val + // (assuming the field and accessor symbols are distinct -- i.e., we're not in a trait) +// val valSig = +// if ((sym ne tree.symbol) && tree.symbol.isInitialized) tree.symbol.info +// else typeSig(tree, Nil) // don't set annotations for the valdef -- we just want to compute the type sig + + val valSig = typeSig(tree, Nil) // don't set annotations for the valdef -- we just want to compute the type sig + + val sig = accessorSigFromFieldTp(sym, isSetter, valSig) + + val mods = tree.mods + if (mods.annotations.nonEmpty) { + val annotSigs = annotSig(mods.annotations) + + // neg/t3403: check that we didn't get a sneaky type alias/renamed import that we couldn't detect because we only look at names during synthesis + // (TODO: can we look at symbols earlier?) + if (!((mods hasAnnotationNamed tpnme.BeanPropertyAnnot) || (mods hasAnnotationNamed tpnme.BooleanBeanPropertyAnnot)) + && annotSigs.exists(ann => (ann.matches(BeanPropertyAttr)) || ann.matches(BooleanBeanPropertyAttr))) + BeanPropertyAnnotationLimitationError(tree) + + sym setAnnotations (annotSigs filter filterAccessorAnnotations(isSetter)) + } sym setInfo pluginsTypeSigAccessor(sig, typer, tree, sym) validate(sym) } - private def accessorSigFromFieldTp(sym: global.Symbol, isSetter: Boolean, tp: global.Type): global.Type with Product with Serializable = { - if (isSetter) MethodType(List(sym.newSyntheticValueParam(tp)), UnitTpe) else NullaryMethodType(tp) + /* Explicit isSetter required for bean setters (beanSetterSym.isSetter is false) */ + def beanAccessorTypeCompleter(tree: ValDef, missingTpt: Boolean, isSetter: Boolean) = mkTypeCompleter(tree) { sym => + context.unit.synthetics get sym match { + case Some(ddef: DefDef) => + // sym is an accessor, while tree is the field (for traits it's actually the getter, and we're completing the setter) + // reuse work done in valTypeCompleter if we already computed the type signature of the val + // (assuming the field and accessor symbols are distinct -- i.e., we're not in a trait) + val valSig = + if ((sym ne tree.symbol) && tree.symbol.isInitialized) tree.symbol.info + else typeSig(tree, Nil) // don't set annotations for the valdef -- we just want to compute the type sig + + // patch up the accessor's tree if the valdef's tpt was not known back when the tree was synthesized + if (missingTpt) { // can't look at tree.tpt here because it may have been completed by now + if (!isSetter) ddef.tpt setType valSig + else if (ddef.vparamss.nonEmpty && ddef.vparamss.head.nonEmpty) ddef.vparamss.head.head.tpt setType valSig + else throw new TypeError(tree.pos, s"Internal error: could not complete parameter/return type for $ddef from $sym") + } + + val annots = + if (tree.mods.annotations.isEmpty) Nil + else annotSig(tree.mods.annotations) filter filterBeanAccessorAnnotations(isSetter) + + val sig = typeSig(ddef, annots) + + sym setInfo pluginsTypeSigAccessor(sig, typer, tree, sym) + + validate(sym) + + case _ => + throw new TypeError(tree.pos, s"Internal error: no synthetic tree found for bean accessor $sym") + } + } + + + // see scala.annotation.meta's package class for more info + // Annotations on ValDefs can be targeted towards the following: field, getter, setter, beanGetter, beanSetter, param. + // The defaults are: + // - (`val`-, `var`- or plain) constructor parameter annotations end up on the parameter, not on any other entity. + // - val/var member annotations solely end up on the underlying field, except in traits (@since 2.12), + // where there is no field, and the getter thus holds annotations targeting both getter & field. + // As soon as there is a field/getter (in subclasses mixing in the trait), we triage the annotations. + // + // TODO: these defaults can be surprising for annotations not meant for accessors/fields -- should we revisit? + // (In order to have `@foo val X` result in the X getter being annotated with `@foo`, foo needs to be meta-annotated with @getter) + private def filterAccessorAnnotations(isSetter: Boolean): AnnotationInfo => Boolean = + if (isSetter || !owner.isTrait) + annotationFilter(if (isSetter) SetterTargetClass else GetterTargetClass, defaultRetention = false) + else (ann => + annotationFilter(FieldTargetClass, defaultRetention = true)(ann) || + annotationFilter(GetterTargetClass, defaultRetention = true)(ann)) + + private def filterBeanAccessorAnnotations(isSetter: Boolean): AnnotationInfo => Boolean = + if (isSetter || !owner.isTrait) + annotationFilter(if (isSetter) BeanSetterTargetClass else BeanGetterTargetClass, defaultRetention = false) + else (ann => + annotationFilter(FieldTargetClass, defaultRetention = true)(ann) || + annotationFilter(BeanGetterTargetClass, defaultRetention = true)(ann)) + + + private def accessorSigFromFieldTp(sym: Symbol, isSetter: Boolean, tp: Type): Type = + if (isSetter) MethodType(List(sym.newSyntheticValueParam(tp)), UnitTpe) + else NullaryMethodType(tp) + def selfTypeCompleter(tree: Tree) = mkTypeCompleter(tree) { sym => val selftpe = typer.typedType(tree).tpe sym setInfo { @@ -1539,67 +1623,52 @@ trait Namers extends MethodSynthesis { * is then assigned to the corresponding symbol (typeSig itself does not need to assign * the type to the symbol, but it can if necessary). */ - def typeSig(tree: Tree): Type = { - // log("typeSig " + tree) - /* For definitions, transform Annotation trees to AnnotationInfos, assign - * them to the sym's annotations. Type annotations: see Typer.typedAnnotated - * We have to parse definition annotations here (not in the typer when traversing - * the MemberDef tree): the typer looks at annotations of certain symbols; if - * they were added only in typer, depending on the compilation order, they may - * or may not be visible. - */ - def annotate(annotated: Symbol) = { - // typeSig might be called multiple times, e.g. on a ValDef: val, getter, setter - // parse the annotations only once. - if (!annotated.isInitialized) tree match { - case defn: MemberDef => - val ainfos = defn.mods.annotations filterNot (_ eq null) map { ann => - val ctx = typer.context - val annCtx = ctx.makeNonSilent(ann) - // need to be lazy, #1782. beforeTyper to allow inferView in annotation args, SI-5892. - AnnotationInfo lazily { - enteringTyper(newTyper(annCtx) typedAnnotation ann) - } - } - if (ainfos.nonEmpty) { - annotated setAnnotations ainfos - if (annotated.isTypeSkolem) - annotated.deSkolemize setAnnotations ainfos - } - case _ => + def typeSig(tree: Tree, annotSigs: List[AnnotationInfo]): Type = { + if (annotSigs.nonEmpty) annotate(tree.symbol, annotSigs) + + try tree match { + case member: MemberDef => createNamer(tree).memberSig(member) + case imp: Import => importSig(imp) + } catch typeErrorHandler(tree, ErrorType) + } + + /* For definitions, transform Annotation trees to AnnotationInfos, assign + * them to the sym's annotations. Type annotations: see Typer.typedAnnotated + * We have to parse definition annotations here (not in the typer when traversing + * the MemberDef tree): the typer looks at annotations of certain symbols; if + * they were added only in typer, depending on the compilation order, they may + * or may not be visible. + */ + def annotSig(annotations: List[Tree]): List[AnnotationInfo] = + annotations filterNot (_ eq null) map { ann => + val ctx = typer.context + // need to be lazy, #1782. enteringTyper to allow inferView in annotation args, SI-5892. + AnnotationInfo lazily { + enteringTyper { + newTyper(ctx.makeNonSilent(ann)) typedAnnotation ann + } } } - val sym: Symbol = tree.symbol + private def annotate(sym: Symbol, annotSigs: List[AnnotationInfo]): Unit = { + sym setAnnotations annotSigs // TODO: meta-annotations to indicate where module annotations should go (module vs moduleClass) - annotate(sym) - if (sym.isModule) annotate(sym.moduleClass) - - def getSig = tree match { - case cdef: ClassDef => - createNamer(tree).classSig(cdef) - - case mdef: ModuleDef => - createNamer(tree).moduleSig(mdef) - - case ddef: DefDef => - createNamer(tree).methodSig(ddef) - - case vdef: ValDef => - createNamer(tree).valDefSig(vdef) - - case tdef: TypeDef => - createNamer(tree).typeDefSig(tdef) //@M! + if (sym.isModule) sym.moduleClass setAnnotations annotSigs + else if (sym.isTypeSkolem) sym.deSkolemize setAnnotations annotSigs + } - case imp: Import => - importSig(imp) + // TODO OPT: move to method on MemberDef? + private def memberSig(member: MemberDef) = + member match { + case ddef: DefDef => methodSig(ddef) + case vdef: ValDef => valDefSig(vdef) + case tdef: TypeDef => typeDefSig(tdef) + case cdef: ClassDef => classSig(cdef) + case mdef: ModuleDef => moduleSig(mdef) + // skip PackageDef } - try getSig - catch typeErrorHandler(tree, ErrorType) - } - def includeParent(tpe: Type, parent: Symbol): Type = tpe match { case PolyType(tparams, restpe) => PolyType(tparams, includeParent(restpe, parent)) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1fe8438b5674..f42f5bf75d73 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2221,7 +2221,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!isPastTyper && meth.isPrimaryConstructor) { for (vparams <- ddef.vparamss; vd <- vparams) { if (vd.mods.isParamAccessor) { - namer.validateParam(vd) + vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) } } } diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala index a8430548f5cb..ae36f5edc2e6 100644 --- a/src/library/scala/reflect/NameTransformer.scala +++ b/src/library/scala/reflect/NameTransformer.scala @@ -19,6 +19,7 @@ object NameTransformer { val NAME_JOIN_STRING = sys.props.getOrElse("SCALA_NAME_JOIN_STRING", "$") val MODULE_INSTANCE_NAME = "MODULE$" val LOCAL_SUFFIX_STRING = " " + val LAZY_LOCAL_SUFFIX_STRING = "$lzy" val SETTER_SUFFIX_STRING = "_$eq" val TRAIT_SETTER_SEPARATOR_STRING = "$_setter_$" diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index d58cabf3d74c..cfde16475499 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -175,15 +175,6 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => case (Nil, defaults) => defaults contains category case (metas, _) => metas exists (_ matches category) } - - def mkFilter(categories: List[Symbol], defaultRetention: Boolean)(ann: AnnotationInfo) = - (ann.metaAnnotations, ann.defaultTargets) match { - case (Nil, Nil) => defaultRetention - case (Nil, defaults) => categories exists defaults.contains - case (metas, _) => - val metaSyms = metas collect { case ann if !ann.symbol.isInstanceOf[StubSymbol] => ann.symbol } - categories exists (category => metaSyms exists (_ isNonBottomSubClass category)) - } } class CompleteAnnotationInfo( diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 11b5db979371..925018d3a647 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -95,6 +95,8 @@ trait StdNames { val NAME_JOIN_STRING: String = NameTransformer.NAME_JOIN_STRING val MODULE_SUFFIX_STRING: String = NameTransformer.MODULE_SUFFIX_STRING val LOCAL_SUFFIX_STRING: String = NameTransformer.LOCAL_SUFFIX_STRING + val LAZY_LOCAL_SUFFIX_STRING: String = NameTransformer.LAZY_LOCAL_SUFFIX_STRING + val TRAIT_SETTER_SEPARATOR_STRING: String = NameTransformer.TRAIT_SETTER_SEPARATOR_STRING val SINGLETON_SUFFIX: String = ".type" @@ -337,7 +339,6 @@ trait StdNames { val DEFAULT_CASE: NameType = "defaultCase$" val EQEQ_LOCAL_VAR: NameType = "eqEqTemp$" val FAKE_LOCAL_THIS: NameType = "this$" - val LAZY_LOCAL: NameType = "$lzy" val LAZY_SLOW_SUFFIX: NameType = "$lzycompute" val UNIVERSE_BUILD_PREFIX: NameType = "$u.internal.reificationSupport." val UNIVERSE_PREFIX: NameType = "$u." diff --git a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala index 2c4d81f333e8..fe07893a366c 100644 --- a/test/files/scalacheck/quasiquotes/TypecheckedProps.scala +++ b/test/files/scalacheck/quasiquotes/TypecheckedProps.scala @@ -103,7 +103,7 @@ object TypecheckedProps extends QuasiquoteProperties("typechecked") val lazyName = TermName("x") val lazyRhsVal = 42 val lazyRhs = Literal(Constant(lazyRhsVal)) - val q"{lazy val $pname = $rhs}" = typecheck(q"{lazy val $lazyName = $lazyRhsVal}") + val q"{ $_ ; $mods val $pname: $_ = { $_ = $rhs ; $_ } }" = typecheck(q"{lazy val $lazyName = $lazyRhsVal}") assert(pname == lazyName) assert(rhs ≈ lazyRhs) diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index 6f9b711b348c..234f22e9fba9 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -1018,7 +1018,11 @@ class ValAndDefPrintTest { """, typedCode = sm""" |def a = { - | lazy val test: scala.Int = 42; + | lazy val test$$lzy: scala.Int = _; + | lazy val test: Int = { + | test$$lzy = 42; + | test$$lzy + | }; | () |}""") diff --git a/test/files/neg/t6375.check b/test/pending/neg/t6375.check similarity index 100% rename from test/files/neg/t6375.check rename to test/pending/neg/t6375.check diff --git a/test/files/neg/t6375.flags b/test/pending/neg/t6375.flags similarity index 100% rename from test/files/neg/t6375.flags rename to test/pending/neg/t6375.flags diff --git a/test/files/neg/t6375.scala b/test/pending/neg/t6375.scala similarity index 100% rename from test/files/neg/t6375.scala rename to test/pending/neg/t6375.scala From f1cbe8aee820e166283c0948edede2c6b6624d01 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 3 Jun 2016 17:35:14 -0700 Subject: [PATCH 0221/2793] Make fewer trait methods not-{private, protected} No longer making trait methods not-protected. (The backend only does public/private because of the poor mapping between visibility from Scala to the JVM). Note that protected trait members will not receive static forwarders in module classes (when mixed into objects). Historic note: we used to `makeNotPrivate` during explicitouter, now we do it later, which means more private methods must be excluded (e.g., lambdaLIFTED ones). --- .../scala/tools/nsc/transform/Fields.scala | 18 ++++++------- .../scala/tools/nsc/transform/Mixin.scala | 26 ++++++++++++++----- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 2 +- 3 files changed, 28 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 6e064e2a24a5..26e517743adf 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -249,16 +249,14 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val accessorUnderConsideration = !(member hasFlag (DEFERRED | LAZY)) // destructively mangle accessor's name (which may cause rehashing of decls), also sets flags - // TODO: technically, only necessary for stored fields - if (member hasFlag PRIVATE) member makeNotPrivate clazz - - // Need to mark as notPROTECTED, so that it's carried over to the synthesized member in subclasses, - // since the trait member will receive this flag later in ExplicitOuter, but the synthetic subclass member will not. - // If we don't add notPROTECTED to the synthesized one, the member will not be seen as overriding the trait member. - // Therefore, addForwarders's call to membersBasedOnFlags would see the deferred member in the trait, - // instead of the concrete (desired) one in the class - // TODO: encapsulate as makeNotProtected, similar to makeNotPrivate (also do moduleClass, e.g.) - if (member hasFlag PROTECTED) member setFlag notPROTECTED + // this accessor has to be implemented in a subclass -- can't be private + if ((member hasFlag PRIVATE) && fieldMemoization.stored) member makeNotPrivate clazz + + // This must remain in synch with publicizeTraitMethod in Mixins, so that the + // synthesized member in a subclass and the trait member remain in synch regarding access. + // Otherwise, the member will not be seen as overriding the trait member, and `addForwarders`'s call to + // `membersBasedOnFlags` would see the deferred member in the trait, instead of the concrete (desired) one in the class + // not doing: if (member hasFlag PROTECTED) member setFlag notPROTECTED // must not reset LOCAL, as we must maintain protected[this]ness to allow that variance hole // (not sure why this only problem only arose when we started setting the notPROTECTED flag) diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index e6ffe328daf5..dae0deeccd46 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -20,20 +20,32 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { val phaseName: String = "mixin" /** Some trait methods need to be implemented in subclasses, so they cannot be private. + * + * We used to publicize during explicitouter (for some reason), so the condition is a bit more involved now it's done here + * (need to exclude lambdaLIFTED methods, as they do no exist during explicitouter and thus did not need to be excluded...) * * They may be protected, now that traits are compiled 1:1 to interfaces. + * (The same disclaimers about mapping Scala's notion of visibility to Java's apply.) + * * - * TODO: interfaces can also have private members, so there's also less need to make trait members non-private - * can we leave more methods private? - * (they still may need to be implemented in subclasses, though we could make those protected...). + * TODO: can we just set the right flags from the start?? + * could we use the final flag to indicate a private method is really-really-private? */ def publicizeTraitMethod(sym: Symbol): Unit = { - if ((sym hasFlag PRIVATE) && - ( (sym hasFlag SUPERACCESSOR) // super accessors by definition must be implemented in a subclass, so can't have the private (TODO: why are they ever private in a trait to begin with!?!?) - || (sym hasFlag ACCESSOR | MODULE))) // an accessor / module *may* need to be implemented in a subclass, and thus cannot be private + if ((sym hasFlag PRIVATE) && !(sym hasFlag LIFTED) && ( // lambdalifted methods can remain private + // super accessors by definition must be implemented in a subclass, so can't be private + // TODO: why are they ever private in a trait to begin with!?!? (could just name mangle them to begin with) + // TODO: can we add the SYNTHESIZE_IMPL_IN_SUBCLASS flag to super accessors symbols? + (sym hasFlag SUPERACCESSOR) + // an accessor / module *may* need to be implemented in a subclass, and thus cannot be private + // TODO: document how we get here (lambdalift? fields has already made accessors not-private) + || (sym hasFlag ACCESSOR | MODULE) && (sym hasFlag SYNTHESIZE_IMPL_IN_SUBCLASS))) sym.makeNotPrivate(sym.owner) - if (sym hasFlag PROTECTED) sym setFlag notPROTECTED + // no need to make trait methods not-protected + // (we used to have to move them to another class when interfaces could not have concrete methods) + // see note in `synthFieldsAndAccessors` in Fields.scala + // if (sym hasFlag PROTECTED) sym setFlag notPROTECTED } /** This map contains a binding (class -> info) if diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index e03b703dc9d2..56da0e24938c 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -106,7 +106,7 @@ class ScalaInlineInfoTest extends BytecodeTesting { ("x5()I", MethodInlineInfo(true, false,false)), ("x5$(LT;)I", MethodInlineInfo(true ,false,false)), ("L$lzycompute$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true, false,false)), - ("T$$L$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true ,false,false)), + ("L$1(Lscala/runtime/VolatileObjectRef;)LT$L$2$;", MethodInlineInfo(true, false,false)), ("nest$1()I", MethodInlineInfo(true, false,false)), ("$init$(LT;)V", MethodInlineInfo(true,false,false))), None // warning From af02e291b6baf4f673a79f4c32e6da67d31bdd75 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 11 Aug 2016 10:29:47 -0700 Subject: [PATCH 0222/2793] Review feedback from Jason & Lukas --- .../scala/tools/nsc/transform/LambdaLift.scala | 14 +++++++------- src/compiler/scala/tools/nsc/transform/Mixin.scala | 6 +++++- .../tools/nsc/typechecker/MethodSynthesis.scala | 2 +- 3 files changed, 13 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 2ccc44f234a5..74e6c5838833 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -262,17 +262,17 @@ abstract class LambdaLift extends InfoTransform { debuglog(s"new proxy ${proxyName} in ${owner.fullLocationString}") val proxy = if (owner.isTrait) { - // TODO preserve pre-erasure info for the accessors? - // TODO: do we need SYNTHESIZE_IMPL_IN_SUBCLASS to indicate that `notDeferred(setter)` should hold val accessorFlags = newFlags.toLong | ACCESSOR | SYNTHESIZE_IMPL_IN_SUBCLASS + + // TODO do we need to preserve pre-erasure info for the accessors (and a NullaryMethodType for the getter)? + // can't have a field in the trait, so add a setter val setter = owner.newMethod(nme.expandedSetterName(proxyName.setterName, owner), fv.pos, accessorFlags) - setter setInfo MethodType(setter.newSyntheticValueParams(List(fv.info)), UnitTpe) - owner.info.decls enter setter + setter setInfoAndEnter MethodType(setter.newSyntheticValueParams(List(fv.info)), UnitTpe) - val getter = owner.newMethod(proxyName.getterName, fv.pos, accessorFlags | STABLE) - getter setInfo MethodType(Nil, fv.info) + // the getter serves as the proxy -- entered below + owner.newMethod(proxyName.getterName, fv.pos, accessorFlags | STABLE) setInfo MethodType(Nil, fv.info) } else - owner.newValue(proxyName.toTermName, owner.pos, newFlags.toLong | PrivateLocal) setInfo fv.info + owner.newValue(proxyName.toTermName, fv.pos, newFlags.toLong | PrivateLocal) setInfo fv.info if (owner.isClass) owner.info.decls enter proxy proxy diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index dae0deeccd46..6c8904f5d00b 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -25,8 +25,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { * (need to exclude lambdaLIFTED methods, as they do no exist during explicitouter and thus did not need to be excluded...) * * They may be protected, now that traits are compiled 1:1 to interfaces. - * (The same disclaimers about mapping Scala's notion of visibility to Java's apply.) + * The same disclaimers about mapping Scala's notion of visibility to Java's apply: + * we cannot emit PROTECTED methods in interfaces on the JVM, + * but knowing that these trait methods are protected means we won't emit static forwarders. * + * JVMLS: "Methods of interfaces may have any of the flags in Table 4.6-A set + * except ACC_PROTECTED, ACC_FINAL, ACC_SYNCHRONIZED, and ACC_NATIVE (JLS §9.4)." * * TODO: can we just set the right flags from the start?? * could we use the final flag to indicate a private method is really-really-private? diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index c036a2a9b830..ea323d0fba30 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -301,7 +301,7 @@ trait MethodSynthesis { if (tree.symbol.owner.isTrait || Field.noFieldFor(tree)) rhs1 // TODO move tree.symbol.owner.isTrait into noFieldFor else gen.mkAssignAndReturn(tree.symbol, rhs1) - derivedSym setPos tree.pos // cannot set it at createAndEnterSymbol because basisSym can possibly still have NoPosition + derivedSym setPos tree.pos // TODO: can we propagate `tree.pos` to `derivedSym` when the symbol is created? val ddefRes = DefDef(derivedSym, new ChangeOwnerTraverser(tree.symbol, derivedSym)(body)) // ValDef will have its position focused whereas DefDef will have original correct rangepos // ideally positions would be correct at the creation time but lazy vals are really a special case From 636af2e98c9af707b944724e439d6e39bb3b074e Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 Jun 2016 11:55:58 -0700 Subject: [PATCH 0223/2793] SI-8339 drop deprecated fallback `withFilter` -> `filter` You must implement the `withFilter` method to use `if`-guards in a `for`-comprehension. (Drop pos/t7239.scala because it relied on this rewrite.) --- .../tools/nsc/typechecker/RefChecks.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 15 +------- .../reflect/internal/ReificationSupport.scala | 1 + test/files/neg/t6455.flags | 1 - test/files/neg/t6455.scala | 4 +- test/files/pos/t7239.scala | 38 ------------------- 6 files changed, 5 insertions(+), 56 deletions(-) delete mode 100644 test/files/neg/t6455.flags delete mode 100644 test/files/pos/t7239.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index d1764ea4829e..5e38be3a9e70 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1552,7 +1552,7 @@ abstract class RefChecks extends InfoTransform with scala.reflect.internal.trans private def transformApply(tree: Apply): Tree = tree match { case Apply( - Select(qual, nme.filter | nme.withFilter), + Select(qual, nme.withFilter), List(Function( List(ValDef(_, pname, tpt, _)), Match(_, CaseDef(pat1, _, _) :: _)))) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ba104fb7a6b1..c4e44e875789 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4795,16 +4795,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - // temporarily use `filter` as an alternative for `withFilter` - def tryWithFilterAndFilter(tree: Select, qual: Tree): Tree = { - def warn(sym: Symbol) = context.deprecationWarning(tree.pos, sym, s"`withFilter' method does not yet exist on ${qual.tpe.widen}, using `filter' method instead", "2.11.0") - silent(_ => typedSelect(tree, qual, nme.withFilter)) orElse { _ => - silent(_ => typed1(Select(qual, nme.filter) setPos tree.pos, mode, pt)) match { - case SilentResultValue(res) => warn(res.symbol) ; res - case SilentTypeError(err) => WithFilterError(tree, err) - } - } - } def typedSelectOrSuperCall(tree: Select) = tree match { case Select(qual @ Super(_, _), nme.CONSTRUCTOR) => // the qualifier type of a supercall constructor is its first parent class @@ -4818,10 +4808,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else UnstableTreeError(qualTyped) ) - val tree1 = name match { - case nme.withFilter if !settings.future => tryWithFilterAndFilter(tree, qualStableOrError) - case _ => typedSelect(tree, qualStableOrError, name) - } + val tree1 = typedSelect(tree, qualStableOrError, name) def sym = tree1.symbol if (tree.isInstanceOf[PostfixSelect]) checkFeature(tree.pos, PostfixOpsFeature, name.decode) diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 30f2efd7e3d1..f9d9e196f4f1 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -725,6 +725,7 @@ trait ReificationSupport { self: SymbolTable => } // match call to either withFilter or filter + // TODO: now that we no longer rewrite `filter` to `withFilter`, maybe this extractor should only look for `withFilter`? protected object FilterCall { def unapply(tree: Tree): Option[(Tree,Tree)] = tree match { case Apply(Select(obj, nme.withFilter | nme.filter), arg :: Nil) => diff --git a/test/files/neg/t6455.flags b/test/files/neg/t6455.flags deleted file mode 100644 index 112fc720a057..000000000000 --- a/test/files/neg/t6455.flags +++ /dev/null @@ -1 +0,0 @@ --Xfuture \ No newline at end of file diff --git a/test/files/neg/t6455.scala b/test/files/neg/t6455.scala index ebbb37f1cdb6..22e4c30fdd75 100644 --- a/test/files/neg/t6455.scala +++ b/test/files/neg/t6455.scala @@ -1,6 +1,6 @@ object O { def filter(p: Int => Boolean): O.type = this } class Test { - // should not compile because we no longer rewrite withFilter => filter under -Xfuture + // should not compile because we no longer rewrite withFilter => filter O.withFilter(f => true) -} \ No newline at end of file +} diff --git a/test/files/pos/t7239.scala b/test/files/pos/t7239.scala deleted file mode 100644 index 16e9d00f1733..000000000000 --- a/test/files/pos/t7239.scala +++ /dev/null @@ -1,38 +0,0 @@ -object Test { - def BrokenMethod(): HasFilter[(Int, String)] = ??? - - trait HasFilter[B] { - def filter(p: B => Boolean) = ??? - } - - trait HasWithFilter { - def withFilter = ??? - } - - object addWithFilter { - trait NoImplicit - implicit def enrich(v: Any) - (implicit F0: NoImplicit): HasWithFilter = ??? - } - - BrokenMethod().withFilter(_ => true) // okay - BrokenMethod().filter(_ => true) // okay - - locally { - import addWithFilter._ - BrokenMethod().withFilter((_: (Int, String)) => true) // okay - } - - locally { - import addWithFilter._ - // adaptToMemberWithArgs sets the type of the tree `x` - // to ErrorType (while in silent mode, so the error is not - // reported. Later, when the fallback from `withFilter` - // to `filter` is attempted, the closure is taken to have - // have the type ` => Boolean`, which conforms to - // `(B => Boolean)`. Only later during pickling does the - // defensive check for erroneous types in the tree pick up - // the problem. - BrokenMethod().withFilter(x => true) // erroneous or inaccessible type. - } -} From d4c2a091b2b6bdc02c0491d23db6aa933418672f Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 11 Aug 2016 14:28:55 -0700 Subject: [PATCH 0224/2793] Remove doc-version was too long warning It would trigger 3091 times per test run. Use CSS to handle overflow instead. --- .../scala/tools/nsc/doc/html/page/Entity.scala | 14 +------------- .../tools/nsc/doc/html/resource/lib/index.css | 9 +++++++++ 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 9dd2c2184d54..54bf42bbd5c5 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -154,19 +154,7 @@ trait EntityPage extends HtmlPage { def search =
- } + } val version: NodeSeq = orEmpty(comment.version) { From 7b66341cf4ef68aab0e4c4626ed70d6bbe8d4600 Mon Sep 17 00:00:00 2001 From: Edmund Noble Date: Mon, 31 Jul 2017 09:02:01 -0400 Subject: [PATCH 1046/2793] Fix Queue reversed append --- .../scala/collection/immutable/Queue.scala | 4 +++- test/files/run/t10298.scala | 20 ++++++++++--------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index b85e1a673390..876066bb2d76 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -115,7 +115,9 @@ sealed class Queue[+A] protected(protected val in: List[A], protected val out: L val thatQueue: Queue[B] = that.asInstanceOf[Queue[B]] thatQueue.in ++ (thatQueue.out reverse_::: this.in) } else { - (new ListBuffer[B] ++= that.seq).prependToList(this.in) + val lb = new ListBuffer[B] + that.seq.foreach(_ +=: lb) + lb.prependToList(this.in) } new Queue[B](newIn, this.out).asInstanceOf[That] } else { diff --git a/test/files/run/t10298.scala b/test/files/run/t10298.scala index 5d3d1e5bf72c..00b83cd1fe5d 100644 --- a/test/files/run/t10298.scala +++ b/test/files/run/t10298.scala @@ -1,17 +1,19 @@ -import collection.immutable._ +import scala.collection.immutable._ object Test { def main(args: Array[String]): Unit = { - assert((Queue(1) ++ Vector(2)) == Queue(1, 2)) + val inputs: List[(Queue[Int], Vector[Int])] = List( + Queue.empty -> Vector(0, 1, 2), + (Queue.empty :+ 0) -> Vector(1, 2), + (0 +: Queue.empty) -> Vector(1, 2), + (0 +: (Queue.empty :+ 1)) -> Vector(2), + ((0 +: Queue.empty) :+ 1) -> Vector(2), + (0 +: 1 +: Queue.empty) -> Vector(2), + (Queue.empty :+ 0 :+ 1) -> Vector(2) + ) - assert(((Queue(1).++(Vector(2))(collection.breakOut)): Vector[Int]) == Vector(1, 2)) - - assert(((Queue(1) :+ 2) ++ Vector(3)) == Queue(1, 2, 3)) - - assert(((1 +: Queue(2)) ++ Vector(3)) == Queue(1, 2, 3)) - - assert(((1 +: Queue(2)) ++ (3 +: Queue(4))) == Queue(1, 2, 3, 4)) + inputs.foreach { case (q, v) => assert(q ++ v == Queue(0, 1, 2)) } } } From b81bc778822de33e73fda59d5014baa1292856d4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Sep 2017 14:00:06 +1000 Subject: [PATCH 1047/2793] Fix runtime refletion of empty package members under Java 9. We used to rely on `cls.getPackage == null` for `cls` defined in the empty package. Under Java 9, we actually get the empty package back from that call. This commit ensures we use the one true empty package symbol on either Java 8 or 9. --- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 2 +- test/files/run/sd304.check | 1 + test/files/run/sd304/ReflectTest.scala | 8 ++++++++ test/files/run/sd304/Test.java | 5 +++++ 4 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 test/files/run/sd304.check create mode 100644 test/files/run/sd304/ReflectTest.scala create mode 100644 test/files/run/sd304/Test.java diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 8f2aaf5094e5..635be86233ee 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -943,7 +943,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive * The Scala package with given fully qualified name. Unlike `packageNameToScala`, * this one bypasses the cache. */ - private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = gilSynchronized { + private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = if (fullname == "") EmptyPackage else gilSynchronized { val split = fullname lastIndexOf '.' val ownerModule: ModuleSymbol = if (split > 0) packageNameToScala(fullname take split) else this.RootPackage diff --git a/test/files/run/sd304.check b/test/files/run/sd304.check new file mode 100644 index 000000000000..be7795442a7a --- /dev/null +++ b/test/files/run/sd304.check @@ -0,0 +1 @@ +class Test diff --git a/test/files/run/sd304/ReflectTest.scala b/test/files/run/sd304/ReflectTest.scala new file mode 100644 index 000000000000..7685227b7de6 --- /dev/null +++ b/test/files/run/sd304/ReflectTest.scala @@ -0,0 +1,8 @@ +package p1 + +class ReflectTest { + def test(a: AnyRef): Unit = { + val mirror = reflect.runtime.universe.runtimeMirror(a.getClass.getClassLoader) + println(mirror.reflect(a).symbol) + } +} diff --git a/test/files/run/sd304/Test.java b/test/files/run/sd304/Test.java new file mode 100644 index 000000000000..97d523f8fb4b --- /dev/null +++ b/test/files/run/sd304/Test.java @@ -0,0 +1,5 @@ +public class Test { + public static void main(String[] args) { + new p1.ReflectTest().test(new Test()); + } +} From a8a1d6d03788485c5d6f625ec25ab74ba110a2cf Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Sep 2017 14:49:30 +1000 Subject: [PATCH 1048/2793] Adapt to change in Java 9 classloader heirarchy Prior to Java 9, using `null` as the parent of an URLClassLoader would designate the entire boot classpath. This behaviour has changed, and now it only designates the classes encompassed by the `java.base` module. This commit uses reflection to call the newly added method, `ClassLoader.getPlatformClassloader` on Java 9, and uses this as the parent. Tested manually with: ``` for V in 1.8 9; do (java_use $V; java -version; qscalac $(f 'package p1; object Test extends App { println(Class.forName("javax.tools.ToolProvider")) }') && qscala -nobootcp p1.Test); done java version "1.8.0_144" Java(TM) SE Runtime Environment (build 1.8.0_144-b01) Java HotSpot(TM) 64-Bit Server VM (build 25.144-b01, mixed mode) class javax.tools.ToolProvider java version "9" Java(TM) SE Runtime Environment (build 9+181) Java HotSpot(TM) 64-Bit Server VM (build 9+181, mixed mode) class javax.tools.ToolProvider ``` Prior to this change, we ran into: ``` java.lang.ClassNotFoundException: javax.tools.ToolProvider at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:466) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:563) ``` --- .../internal/util/ScalaClassLoader.scala | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala index 22906622b3ec..c18a54e014aa 100644 --- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala @@ -6,8 +6,9 @@ package scala package reflect.internal.util -import scala.language.implicitConversions +import java.lang.invoke.{MethodHandles, MethodType} +import scala.language.implicitConversions import java.lang.{ClassLoader => JClassLoader} import java.lang.reflect.Modifier import java.net.{URLClassLoader => JURLClassLoader} @@ -140,8 +141,9 @@ object ScalaClassLoader { } } - def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader = - new URLClassLoader(urls, parent) + def fromURLs(urls: Seq[URL], parent: ClassLoader = null): URLClassLoader = { + new URLClassLoader(urls, if (parent == null) bootClassLoader else parent) + } /** True if supplied class exists in supplied path */ def classExists(urls: Seq[URL], name: String): Boolean = @@ -150,4 +152,18 @@ object ScalaClassLoader { /** Finding what jar a clazz or instance came from */ def originOfClass(x: Class[_]): Option[URL] = Option(x.getProtectionDomain.getCodeSource) flatMap (x => Option(x.getLocation)) + + private[this] val bootClassLoader: ClassLoader = { + if (!util.Properties.isJavaAtLeast("9")) null + else { + try { + MethodHandles.lookup().findStatic(classOf[ClassLoader], "getPlatformClassLoader", MethodType.methodType(classOf[ClassLoader])).invoke() + } catch { + case _: Throwable => + null + } + } + + + } } From 2e9a5853e9886fd76f7a5c78a9df0b16a7d5f74e Mon Sep 17 00:00:00 2001 From: jvican Date: Sat, 29 Jul 2017 13:11:27 +0200 Subject: [PATCH 1049/2793] Add original tree attachment Adds an original tree attachment that allows external tools (compiler plugins like scalameta & zinc) to keep track of the previous, unadapted tree. The reason why this is required is explained in SD-340: https://github.com/scala/scala-dev/issues/340. This change enables the incremental compiler to detect changes in `final val`s: https://github.com/sbt/zinc/issues/227. It also enables a fix for scala/bug#10426 by allowing the scaladoc compiler to let the compiler adapt literals without losing the tree that will be shown in the Scaladoc UI. To maintainers: I was thinking of the best way to test this, but couldn't come up with an elegant one. Do you suggest a way I could write a test for this? Is there a precedent in testing information carried in the trees? I think @lrytz is the right person to review this, since he suggested this fix in the first place. Fixes scala/bug#7173. --- .../tools/nsc/typechecker/StdAttachments.scala | 17 +++++++++++++++++ .../scala/tools/nsc/typechecker/Typers.scala | 4 ++-- 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 81c84633906b..731ce83c160f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -165,4 +165,21 @@ trait StdAttachments { def markDynamicRewrite(tree: Tree): Tree = tree.updateAttachment(DynamicRewriteAttachment) def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type] def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type].isDefined + + /** + * Marks a tree that has been adapted by typer and sets the original tree that was in place before. + * + * Keeping track of the original trees were is an important feature for some compiler plugins (like + * Scalameta) and the incremental compiler (Zinc). In both cases, adapting trees loses information + * in some sense and do not allow external tools to capture some information stored in user-defined + * trees that are optimized away by early phases (mostly, typer). + * + * See how the absence of this attachment blocks Zinc: https://github.com/sbt/zinc/issues/227. + * Related: https://github.com/scala/scala-dev/issues/340. + * + * This attachment is, at the moment, only used to keep track of constant-folded constants. It + * has a generic wording in the hope that in the future can be reused in the same context to keep + * track of other adapted trees. + */ + case class OriginalTreeAttachment(original: Tree) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index bffc36a9b654..f0d0cd5c54d4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -995,8 +995,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val sym = tree.symbol if (sym != null && sym.isDeprecated) context.deprecationWarning(tree.pos, sym) - - treeCopy.Literal(tree, value) + // Keep the original tree in an annotation to avoid losing tree information for plugins + treeCopy.Literal(tree, value).updateAttachment(OriginalTreeAttachment(original)) } // Ignore type errors raised in later phases that are due to mismatching types with existential skolems From 0aff5cccd021c83e11a85258305ba88e0d74889f Mon Sep 17 00:00:00 2001 From: jvican Date: Mon, 17 Jul 2017 18:51:01 +0200 Subject: [PATCH 1050/2793] Use `ArrayBuffer` for `completingStack` Replace the use of `List` by the use of an already allocated array buffer. I'm curious if this will make a difference (most probably not), but since it's in a such a delicate part of the compiler it's better to rest assured. --- .../scala/reflect/internal/pickling/UnPickler.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index e438ffe9ce6a..f05dc8a39d08 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -246,7 +246,9 @@ abstract class UnPickler { adjust(mirrorThatLoaded(owner).missingHook(owner, name)) orElse { // (4) Create a stub symbol to defer hard failure a little longer. val advice = moduleAdvice(s"${owner.fullName}.$name") - val lazyCompletingSymbol = completingStack.headOption.getOrElse(NoSymbol) + val lazyCompletingSymbol = + if (completingStack.isEmpty) NoSymbol + else completingStack.apply(completingStack.length - 1) val missingMessage = s"""|Symbol '${name.nameKind} ${owner.fullName}.$name' is missing from the classpath. |This symbol is required by '${lazyCompletingSymbol.kindString} ${lazyCompletingSymbol.fullName}'. @@ -701,14 +703,14 @@ abstract class UnPickler { * * Useful for reporting on stub errors and cyclic errors. */ - private var completingStack = List.empty[Symbol] + private var completingStack = new mutable.ArrayBuffer[Symbol](128) /** A lazy type which when completed returns type at index `i`. */ private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter { private val definedAtRunId = currentRunId private val p = phase protected def completeInternal(sym: Symbol) : Unit = try { - completingStack = sym :: completingStack + completingStack += sym val tp = at(i, () => readType(sym.isTerm)) // after NMT_TRANSITION, revert `() => readType(sym.isTerm)` to `readType` // This is a temporary fix allowing to read classes generated by an older, buggy pickler. @@ -732,7 +734,7 @@ abstract class UnPickler { catch { case e: MissingRequirementError => throw toTypeError(e) } finally { - completingStack = completingStack.tail + completingStack.remove(completingStack.length - 1) } override def complete(sym: Symbol) : Unit = { From 9d51bdfc6ef1ef189bfdcc4c7623e45a8be11915 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 24 Sep 2017 14:50:01 -0700 Subject: [PATCH 1051/2793] No warn unused import on macro expansion Creating an import context registers the import for the unused warning. However, if the use site is already typechecked, then on re-typechecking a macro expansion, the use won't be registered. As a quick fix, if there are open macros, don't register an import for unused checking. Fixes scala/bug#10270 --- .../scala/tools/nsc/typechecker/Contexts.scala | 4 ++-- .../scala/tools/nsc/typechecker/Typers.scala | 6 ++---- test/files/neg/t10270.check | 6 ++++++ test/files/neg/t10270.flags | 1 + test/files/neg/t10270/Macros_1.scala | 16 ++++++++++++++++ test/files/neg/t10270/Main_2.scala | 16 ++++++++++++++++ test/files/neg/warn-unused-imports.check | 5 +---- .../warn-unused-imports_2.scala | 2 +- test/files/pos/t10270.flags | 1 + test/files/pos/t10270/Macros_1.scala | 16 ++++++++++++++++ test/files/pos/t10270/Main_2.scala | 15 +++++++++++++++ 11 files changed, 77 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/t10270.check create mode 100644 test/files/neg/t10270.flags create mode 100644 test/files/neg/t10270/Macros_1.scala create mode 100644 test/files/neg/t10270/Main_2.scala create mode 100644 test/files/pos/t10270.flags create mode 100644 test/files/pos/t10270/Macros_1.scala create mode 100644 test/files/pos/t10270/Main_2.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 544b3d182f44..9c93ad2a1ed0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1245,14 +1245,14 @@ trait Contexts { self: Analyzer => trait ImportContext extends Context { private val impInfo: ImportInfo = { val info = new ImportInfo(tree.asInstanceOf[Import], outerDepth) - if (settings.warnUnusedImport && !isRootImport) // excludes java.lang/scala/Predef imports + if (settings.warnUnusedImport && openMacros.isEmpty && !isRootImport) // excludes java.lang/scala/Predef imports allImportInfos(unit) ::= info info } override final def imports = impInfo :: super.imports override final def firstImport = Some(impInfo) override final def isRootImport = !tree.pos.isDefined - override final def toString = super.toString + " with " + s"ImportContext { $impInfo; outer.owner = ${outer.owner} }" + override final def toString = s"${super.toString} with ImportContext { $impInfo; outer.owner = ${outer.owner} }" } /** A reporter for use during type checking. It has multiple modes for handling errors. diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index bffc36a9b654..c98bebee185d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -45,9 +45,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final val shortenImports = false // allows override of the behavior of the resetTyper method w.r.t comments - def resetDocComments() = { - clearDocComments() - } + def resetDocComments() = clearDocComments() def resetTyper() { //println("resetTyper called") @@ -3054,7 +3052,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedImport(imp : Import) : Import = (transformed remove imp) match { case Some(imp1: Import) => imp1 - case _ => log("unhandled import: "+imp+" in "+unit); imp + case _ => log(s"unhandled import: $imp in $unit"); imp } def typedStats(stats: List[Tree], exprOwner: Symbol, warnPure: Boolean = true): List[Tree] = { diff --git a/test/files/neg/t10270.check b/test/files/neg/t10270.check new file mode 100644 index 000000000000..be7da7475698 --- /dev/null +++ b/test/files/neg/t10270.check @@ -0,0 +1,6 @@ +Main_2.scala:5: warning: Unused import + import Implicits._ + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/t10270.flags b/test/files/neg/t10270.flags new file mode 100644 index 000000000000..c4e11e7fe70c --- /dev/null +++ b/test/files/neg/t10270.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Ywarn-unused:imports diff --git a/test/files/neg/t10270/Macros_1.scala b/test/files/neg/t10270/Macros_1.scala new file mode 100644 index 000000000000..056995d2497a --- /dev/null +++ b/test/files/neg/t10270/Macros_1.scala @@ -0,0 +1,16 @@ +import language.experimental.macros +import scala.reflect.macros.blackbox.Context + +// wraps a new Block so typer sees a local import on second typecheck +// +object Macro { + def apply(a: Any): Any = macro impl + + def impl(c: Context)(a: c.Tree): c.Tree = { + import c.universe._ + a match { + case Block(stmts, res) => Block(stmts, res) + case expr => Block(Nil, expr) + } + } +} diff --git a/test/files/neg/t10270/Main_2.scala b/test/files/neg/t10270/Main_2.scala new file mode 100644 index 000000000000..d43392701a20 --- /dev/null +++ b/test/files/neg/t10270/Main_2.scala @@ -0,0 +1,16 @@ + +object Main extends App { + + def f(): Any = Macro { + import Implicits._ + //"world".greeting + "world" + } + +} + +object Implicits { + implicit class `strung out`(val s: String) { + def greeting = s"hello, $s" + } +} diff --git a/test/files/neg/warn-unused-imports.check b/test/files/neg/warn-unused-imports.check index 29d73a6264ba..0a53d7a9cd24 100644 --- a/test/files/neg/warn-unused-imports.check +++ b/test/files/neg/warn-unused-imports.check @@ -51,8 +51,5 @@ warn-unused-imports_2.scala:149: warning: Unused import warn-unused-imports_2.scala:150: warning: Unused import import p1.A // warn ^ -warn-unused-imports_2.scala:158: warning: Unused import - def x = Macro.f // warn, not crash - ^ -17 warnings found +16 warnings found one error found diff --git a/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala b/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala index 58fe0131d967..47db5f5ecab9 100644 --- a/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala +++ b/test/files/neg/warn-unused-imports/warn-unused-imports_2.scala @@ -155,5 +155,5 @@ trait Outsiders { } class MacroClient { - def x = Macro.f // warn, not crash + def x = Macro.f // don't crash; but also don't warn on expansion, see scala/bug#10270 and [pos|neg]/t10270 } diff --git a/test/files/pos/t10270.flags b/test/files/pos/t10270.flags new file mode 100644 index 000000000000..c4e11e7fe70c --- /dev/null +++ b/test/files/pos/t10270.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Ywarn-unused:imports diff --git a/test/files/pos/t10270/Macros_1.scala b/test/files/pos/t10270/Macros_1.scala new file mode 100644 index 000000000000..056995d2497a --- /dev/null +++ b/test/files/pos/t10270/Macros_1.scala @@ -0,0 +1,16 @@ +import language.experimental.macros +import scala.reflect.macros.blackbox.Context + +// wraps a new Block so typer sees a local import on second typecheck +// +object Macro { + def apply(a: Any): Any = macro impl + + def impl(c: Context)(a: c.Tree): c.Tree = { + import c.universe._ + a match { + case Block(stmts, res) => Block(stmts, res) + case expr => Block(Nil, expr) + } + } +} diff --git a/test/files/pos/t10270/Main_2.scala b/test/files/pos/t10270/Main_2.scala new file mode 100644 index 000000000000..33d34b8f7e6b --- /dev/null +++ b/test/files/pos/t10270/Main_2.scala @@ -0,0 +1,15 @@ + +object Main extends App { + + def f(): Any = Macro { + import Implicits._ + "world".greeting + } + +} + +object Implicits { + implicit class `strung out`(val s: String) { + def greeting = s"hello, $s" + } +} From 0a2fef775f19f892ceda7009733902dca33e3e45 Mon Sep 17 00:00:00 2001 From: Eyal Farago Date: Tue, 26 Sep 2017 09:14:09 +0300 Subject: [PATCH 1052/2793] ticket: remove commented out code, move class variables into main (according to review comments) and properly indent the test code. --- test/files/run/t10513.scala | 27 ++++++++++---------- test/junit/scala/concurrent/FutureTest.scala | 12 --------- 2 files changed, 14 insertions(+), 25 deletions(-) diff --git a/test/files/run/t10513.scala b/test/files/run/t10513.scala index fb8aec9d75cf..c9932879aa61 100644 --- a/test/files/run/t10513.scala +++ b/test/files/run/t10513.scala @@ -6,27 +6,28 @@ import scala.util.{Random, Try} import ExecutionContext.Implicits.global /** This test uses recursive calls to Future.flatMap to create arrays whose - * combined size is slightly greater than the JVM heap size. A previous - * implementation of Future.flatMap would retain references to each array, - * resulting in a speedy OutOfMemoryError. Now, each array should be freed soon - * after it is created and the test should complete without problems. - */ + * combined size is slightly greater than the JVM heap size. A previous + * implementation of Future.flatMap would retain references to each array, + * resulting in a speedy OutOfMemoryError. Now, each array should be freed soon + * after it is created and the test should complete without problems. + */ object Test { - val arrSz = 50 * 10000 - val numFutures = 4000 - - val rng = new Random() - def main(args: Array[String]) { + val arrSz = 50 * 10000 + val numFutures = 4000 + + val rng = new Random() val longStandingPromise = Promise[Nothing] - val futures = List.tabulate(numFutures){ i => + val futures = List.tabulate(numFutures) { i => val arr = Array.tabulate(arrSz)(identity) val idx = rng.nextInt(arrSz) - val f1 = Future{ arr } + val f1 = Future { + arr + } val f2 = Future.firstCompletedOf(List(longStandingPromise.future, f1)) - f2.map( arr => arr(idx)) + f2.map(arr => arr(idx)) } val fSeq = Future.sequence(futures) val finalF = fSeq.map(_.sum) diff --git a/test/junit/scala/concurrent/FutureTest.scala b/test/junit/scala/concurrent/FutureTest.scala index ac34dfae51f0..cd687479e3ee 100644 --- a/test/junit/scala/concurrent/FutureTest.scala +++ b/test/junit/scala/concurrent/FutureTest.scala @@ -23,17 +23,5 @@ class FutureTest { assertNotReachable(result, unfulfilled) { quick.complete(Try(result)) } - - /* The test has this structure: - val p = Promise[String] - val q = Promise[String] - val res = Promise[String] - val s = "hi" - p.future.onComplete(t => res.complete(t)) - q.future.onComplete(t => res.complete(t)) - assertNotReachable(s, q) { - p.complete(Try(s)) - } - */ } } From 493ab52bf5553194ddf1bfc4fd8385ae54f57ef0 Mon Sep 17 00:00:00 2001 From: jvican Date: Mon, 25 Sep 2017 14:44:31 +0200 Subject: [PATCH 1053/2793] Clean the original tree attachment in erasure The original tree attachment is useful for compiler plugins and macros. However, this commit constraints compiler plugins to run before erasure if they want to inspect the original trees so that we free up memory as soon as possible. This commit removes the attachment in pre erasure, taking advantage that we modified literals before. --- .../scala/tools/nsc/transform/Erasure.scala | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 34f94f3fa631..edcbb3cd1d85 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1226,13 +1226,17 @@ abstract class Erasure extends InfoTransform case Match(selector, cases) => Match(Typed(selector, TypeTree(selector.tpe)), cases) - case Literal(ct) if ct.tag == ClazzTag - && ct.typeValue.typeSymbol != definitions.UnitClass => - val erased = ct.typeValue.dealiasWiden match { - case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(tr) - case tpe => specialScalaErasure(tpe) - } - treeCopy.Literal(tree, Constant(erased)) + case Literal(ct) => + // We remove the original tree attachments in pre-easure to free up memory + val cleanLiteral = tree.removeAttachment[OriginalTreeAttachment] + + if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != definitions.UnitClass) { + val erased = ct.typeValue.dealiasWiden match { + case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(tr) + case tpe => specialScalaErasure(tpe) + } + treeCopy.Literal(cleanLiteral, Constant(erased)) + } else cleanLiteral case ClassDef(_,_,_,_) => debuglog("defs of " + tree.symbol + " = " + tree.symbol.info.decls) From 623fad3aa4d4e1234e5f16f9d7507a9cd3ef82d1 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 26 Sep 2017 16:26:26 -0700 Subject: [PATCH 1054/2793] Address lrytz's review feedback --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index e46d5dbee19f..e75be0575f92 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -674,7 +674,6 @@ abstract class RefChecks extends Transform { // If there is a concrete method whose name matches the unimplemented // abstract method, and a cursory examination of the difference reveals // something obvious to us, let's make it more obvious to them. - val abstractParams = underlying.tpe.paramTypes val abstractParamLists = underlying.paramLists val matchingName = clazz.tpe.nonPrivateMembersAdmitting(VBRIDGE) val matchingArity = matchingName filter { m => @@ -682,7 +681,6 @@ abstract class RefChecks extends Transform { (m.name == underlying.name) && (m.paramLists.length == abstractParamLists.length) && (m.paramLists.map(_.length).sum == abstractParamLists.map(_.length).sum) && - (m.tpe.paramTypes.size == underlying.tpe.paramTypes.size) && (m.tpe.typeParams.size == underlying.tpe.typeParams.size) } From 5197256b44f34fa50ed468954d0bfb3bc469477d Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sat, 2 Sep 2017 21:40:44 -0700 Subject: [PATCH 1055/2793] Simplify pattern expansion logic The goal is to fix some symbol hygiene issues with extractor patterns that involve an unapply method whose result type depends on the unapply argument. That's coming in a follow-up commit. First need to deal with code smells in the area. Also clean up typedUnapply a bit. --- .../nsc/transform/patmat/MatchCodeGen.scala | 2 +- .../transform/patmat/MatchTranslation.scala | 60 ++--- .../transform/patmat/PatternExpander.scala | 171 ------------- .../transform/patmat/PatternExpansion.scala | 237 ++++++++++++++++++ .../transform/patmat/PatternMatching.scala | 2 +- .../patmat/ScalacPatternExpanders.scala | 163 ------------ .../tools/nsc/typechecker/Checkable.scala | 8 +- .../tools/nsc/typechecker/ContextErrors.scala | 6 +- .../tools/nsc/typechecker/PatternTypers.scala | 131 +++++----- .../scala/reflect/internal/Definitions.scala | 42 +--- test/files/neg/t4425b.check | 8 +- 11 files changed, 344 insertions(+), 486 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala create mode 100644 src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala delete mode 100644 src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index 04648621ad0d..7b8a5fd31a50 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -81,7 +81,7 @@ trait MatchCodeGen extends Interface { def drop(tgt: Tree)(n: Int): Tree = { def callDirect = fn(tgt, nme.drop, LIT(n)) def callRuntime = Apply(REF(currentRun.runDefinitions.traversableDropMethod), tgt :: LIT(n) :: Nil) - def needsRuntime = (tgt.tpe ne null) && (typeOfMemberNamedDrop(tgt.tpe) == NoType) + def needsRuntime = (tgt.tpe ne null) && (elementTypeFromDrop(tgt.tpe) == NoType) if (needsRuntime) callRuntime else callDirect } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 6e19a73d6b40..d7fa5a6e1592 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -114,7 +114,7 @@ trait MatchTranslation { // paramType = the type expected by the unapply // TODO: paramType may contain unbound type params (run/t2800, run/t3530) val makers = { - val paramType = extractor.aligner.wholeType + val paramType = extractor.expectedExtractedType // Statically conforms to paramType if (tpe <:< paramType) treeMaker(binder, false, pos) :: Nil else { @@ -373,19 +373,13 @@ trait MatchTranslation { object ExtractorCall { // TODO: check unargs == args def apply(tree: Tree): ExtractorCall = tree match { - case UnApply(unfun, args) => new ExtractorCallRegular(alignPatterns(context, tree), unfun, args) // extractor - case Apply(fun, args) => new ExtractorCallProd(alignPatterns(context, tree), fun, args) // case class + case UnApply(unfun@Unapplied(fun), args) => new ExtractorCallRegular(fun, args)(unfun) // extractor + case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class } } - abstract class ExtractorCall(val aligner: PatternAligned) { - import aligner._ - def fun: Tree - def args: List[Tree] - - // don't go looking for selectors if we only expect one pattern - def rawSubPatTypes = aligner.extractedTypes - def resultInMonad = if (isBool) UnitTpe else typeOfMemberNamedGet(resultType) + abstract class ExtractorCall(fun: Tree, args: List[Tree]) extends ExtractorAlignment(fun, args)(context) { + def resultInMonad = if (isBool) UnitTpe else elementTypeFromGet(resultType) def resultType = fun.tpe.finalResultType /** Create the TreeMaker that embodies this extractor call @@ -407,15 +401,10 @@ trait MatchTranslation { // never store these in local variables (for PreserveSubPatBinders) lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet - // do repeated-parameter expansion to match up with the expected number of arguments (in casu, subpatterns) - private def nonStarSubPatTypes = aligner.typedNonStarPatterns map (_.tpe) - - def subPatTypes: List[Type] = typedPatterns map (_.tpe) - // there are `productArity` non-seq elements in the tuple. protected def firstIndexingBinder = productArity protected def expectedLength = elementArity - protected def lastIndexingBinder = totalArity - starArity - 1 + protected def lastIndexingBinder = nonStarArity - 1 private def productElemsToN(binder: Symbol, n: Int): List[Tree] = 1 to n map tupleSel(binder) toList private def genTake(binder: Symbol, n: Int): List[Tree] = (0 until n).toList map (codegen index seqTree(binder)) @@ -429,7 +418,7 @@ trait MatchTranslation { // referenced by `binder` protected def subPatRefsSeq(binder: Symbol): List[Tree] = { def lastTrees: List[Tree] = ( - if (!aligner.isStar) Nil + if (!isStar) Nil else if (expectedLength == 0) seqTree(binder) :: Nil else genDrop(binder, expectedLength) ) @@ -462,7 +451,7 @@ trait MatchTranslation { // `binder.lengthCompare(expectedLength)` // ...if binder has a lengthCompare method, otherwise // `scala.math.signum(binder.length - expectedLength)` - def checkExpectedLength = sequenceType member nme.lengthCompare match { + def checkExpectedLength = lengthCompareSym match { case NoSymbol => compareInts(Select(seqTree(binder), nme.length), LIT(expectedLength)) case lencmp => (seqTree(binder) DOT lencmp)(LIT(expectedLength)) } @@ -471,7 +460,7 @@ trait MatchTranslation { // when the last subpattern is a wildcard-star the expectedLength is but a lower bound // (otherwise equality is required) def compareOp: (Tree, Tree) => Tree = - if (aligner.isStar) _ INT_>= _ + if (isStar) _ INT_>= _ else _ INT_== _ // `if (binder != null && $checkExpectedLength [== | >=] 0) then else zero` @@ -487,7 +476,7 @@ trait MatchTranslation { // TODO: to be called when there's a def unapplyProd(x: T): U // U must have N members _1,..., _N -- the _i are type checked, call their type Ti, // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it) - class ExtractorCallProd(aligner: PatternAligned, val fun: Tree, val args: List[Tree]) extends ExtractorCall(aligner) { + class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(fun, args) { /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary @@ -495,16 +484,12 @@ trait MatchTranslation { * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { - val paramAccessors = aligner.wholeType.typeSymbol.constrParamAccessors + val paramAccessors = expectedExtractedType.typeSymbol.constrParamAccessors val numParams = paramAccessors.length def paramAccessorAt(subPatIndex: Int) = paramAccessors(math.min(subPatIndex, numParams - 1)) // binders corresponding to mutable fields should be stored (scala/bug#5158, scala/bug#6070) // make an exception for classes under the scala package as they should be well-behaved, // to optimize matching on List - val hasRepeated = paramAccessors.lastOption match { - case Some(x) => definitions.isRepeated(x) - case _ => false - } val mutableBinders = ( if (!binder.info.typeSymbol.hasTransOwner(ScalaPackageClass) && (paramAccessors exists (x => x.isMutable || definitions.isRepeated(x)))) { @@ -512,7 +497,7 @@ trait MatchTranslation { subPatBinders.zipWithIndex.flatMap { case (binder, idx) => val param = paramAccessorAt(idx) - if (param.isMutable || (definitions.isRepeated(param) && !aligner.isStar)) binder :: Nil + if (param.isMutable || (definitions.isRepeated(param) && !isStar)) binder :: Nil else Nil } } else Nil @@ -524,15 +509,19 @@ trait MatchTranslation { // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component override protected def tupleSel(binder: Symbol)(i: Int): Tree = { - val accessors = aligner.wholeType.typeSymbol.caseFieldAccessors + val accessors = expectedExtractedType.typeSymbol.caseFieldAccessors if (accessors isDefinedAt (i-1)) gen.mkAttributedStableRef(binder) DOT accessors(i-1) else codegen.tupleSel(binder)(i) // this won't type check for case classes, as they do not inherit ProductN } } - class ExtractorCallRegular(aligner: PatternAligned, extractorCallIncludingDummy: Tree, val args: List[Tree]) extends ExtractorCall(aligner) { - val Unapplied(fun) = extractorCallIncludingDummy - + /** + * + * @param fun reference to the unapply method + * @param args the subpatterns + * @param funAppliedToUnapplySelector an application of the unapply method to the (dummy) unapply selector + */ + class ExtractorCallRegular(fun: Tree, args: List[Tree])(funAppliedToUnapplySelector: Tree) extends ExtractorCall(fun, args) { /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary @@ -552,7 +541,7 @@ trait MatchTranslation { // directly from the extractor's result type val binder = freshSym(pos, pureType(resultInMonad)) val potentiallyMutableBinders: Set[Symbol] = - if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !aligner.isSeq) + if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !isSeq) Set.empty else // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala @@ -562,7 +551,7 @@ trait MatchTranslation { subPatBinders, subPatRefs(binder), potentiallyMutableBinders, - aligner.isBool, + isBool, checkedLength, patBinderOrCasted, ignoredSubPatBinders @@ -576,7 +565,7 @@ trait MatchTranslation { // the trees that select the subpatterns on the extractor's result, referenced by `binder` // require (totalArity > 0 && (!lastIsStar || isSeq)) override protected def subPatRefs(binder: Symbol): List[Tree] = - if (aligner.isSingle) REF(binder) :: Nil // special case for extractors + if (isSingle) REF(binder) :: Nil // special case for extractors else super.subPatRefs(binder) protected def spliceApply(binder: Symbol): Tree = { @@ -594,10 +583,9 @@ trait MatchTranslation { super.transform(t) } } - splice transform extractorCallIncludingDummy + splice transform funAppliedToUnapplySelector } - override def rawSubPatTypes = aligner.extractor.varargsTypes } } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala deleted file mode 100644 index 1916050dd8a3..000000000000 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpander.scala +++ /dev/null @@ -1,171 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala -package tools -package nsc -package transform -package patmat - -/** An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*] - * A case matches: P1, P2, ..., Pj, opt[Seq[E]] - * Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]] - * - * Here Pm/Fi is the last pattern to match the fixed arity section. - * - * productArity: the value of i, i.e. the number of non-sequence types in the extractor - * nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition - * elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements - * starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern - * totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition - * - * Note that productArity is a function only of the extractor, and - * nonStar/star/totalArity are all functions of the patterns. The key - * value for aligning and typing the patterns is elementArity, as it - * is derived from both sets of information. - */ -trait PatternExpander[Pattern, Type] { - /** You'll note we're not inside the cake. "Pattern" and "Type" are - * arbitrary types here, and NoPattern and NoType arbitrary values. - */ - def NoPattern: Pattern - def NoType: Type - - /** It's not optimal that we're carrying both sequence and repeated - * type here, but the implementation requires more unraveling before - * it can be avoided. - * - * sequenceType is Seq[T], elementType is T, repeatedType is T*. - */ - sealed case class Repeated(sequenceType: Type, elementType: Type, repeatedType: Type) { - def exists = elementType != NoType - - def elementList = if (exists) elementType :: Nil else Nil - def sequenceList = if (exists) sequenceType :: Nil else Nil - def repeatedList = if (exists) repeatedType :: Nil else Nil - - override def toString = s"${elementType}*" - } - object NoRepeated extends Repeated(NoType, NoType, NoType) { - override def toString = "" - } - - final case class Patterns(fixed: List[Pattern], star: Pattern) { - def hasStar = star != NoPattern - def starArity = if (hasStar) 1 else 0 - def nonStarArity = fixed.length - def totalArity = nonStarArity + starArity - def starPatterns = if (hasStar) star :: Nil else Nil - def all = fixed ::: starPatterns - - override def toString = all mkString ", " - } - - /** An 'extractor' can be a case class or an unapply or unapplySeq method. - * Decoding what it is that they extract takes place before we arrive here, - * so that this class can concentrate only on the relationship between - * patterns and types. - * - * In a case class, the class is the unextracted type and the fixed and - * repeated types are derived from its constructor parameters. - * - * In an unapply, this is reversed: the parameter to the unapply is the - * unextracted type, and the other types are derived based on the return - * type of the unapply method. - * - * In other words, this case class and unapply are encoded the same: - * - * case class Foo(x: Int, y: Int, zs: Char*) - * def unapplySeq(x: Foo): Option[(Int, Int, Seq[Char])] - * - * Both are Extractor(Foo, Int :: Int :: Nil, Repeated(Seq[Char], Char, Char*)) - * - * @param whole The type in its unextracted form - * @param fixed The non-sequence types which are extracted - * @param repeated The sequence type which is extracted - */ - final case class Extractor(whole: Type, fixed: List[Type], repeated: Repeated, typeOfSinglePattern: Type) { - require(whole != NoType, s"expandTypes($whole, $fixed, $repeated)") - - /** A pattern with arity-1 that doesn't match the arity of the Product-like result of the `get` method, - * will match that result in its entirety. Example: - * - * {{{ - * warning: there was one deprecation warning; re-run with -deprecation for details - * scala> object Extractor { def unapply(a: Any): Option[(Int, String)] = Some((1, "2")) } - * defined object Extractor - * - * scala> "" match { case Extractor(x: Int, y: String) => } - * - * scala> "" match { case Extractor(xy : (Int, String)) => } - * warning: there was one deprecation warning; re-run with -deprecation for details - * }}} - * */ - def asSinglePattern: Extractor = copy(fixed = List(typeOfSinglePattern)) - - def productArity = fixed.length - def hasSeq = repeated.exists - def elementType = repeated.elementType - def sequenceType = repeated.sequenceType - def allTypes = fixed ::: repeated.sequenceList - def varargsTypes = fixed ::: repeated.repeatedList - def isErroneous = allTypes contains NoType - - private def typeStrings = fixed.map("" + _) ::: ( if (hasSeq) List("" + repeated) else Nil ) - - def offeringString = if (isErroneous) "" else typeStrings match { - case Nil => "Boolean" - case tp :: Nil => tp - case tps => tps.mkString("(", ", ", ")") - } - override def toString = "%s => %s".format(whole, offeringString) - } - - final case class TypedPat(pat: Pattern, tpe: Type) { - override def toString = s"$pat: $tpe" - } - - /** If elementArity is... - * 0: A perfect match between extractor and the fixed patterns. - * If there is a star pattern it will match any sequence. - * > 0: There are more patterns than products. There will have to be a - * sequence which can populate at least patterns. - * < 0: There are more products than patterns: compile time error. - */ - final case class Aligned(patterns: Patterns, extractor: Extractor) { - def elementArity = patterns.nonStarArity - productArity - def productArity = extractor.productArity - def starArity = patterns.starArity - def totalArity = patterns.totalArity - - def wholeType = extractor.whole - def sequenceType = extractor.sequenceType - def productTypes = extractor.fixed - def extractedTypes = extractor.allTypes - def typedNonStarPatterns = products ::: elements - def typedPatterns = typedNonStarPatterns ::: stars - - def isBool = !isSeq && productArity == 0 - def isSingle = !isSeq && totalArity == 1 - def isStar = patterns.hasStar - def isSeq = extractor.hasSeq - - private def typedAsElement(pat: Pattern) = TypedPat(pat, extractor.elementType) - private def typedAsSequence(pat: Pattern) = TypedPat(pat, extractor.sequenceType) - private def productPats = patterns.fixed take productArity - private def elementPats = patterns.fixed drop productArity - private def products = (productPats, productTypes).zipped map TypedPat - private def elements = elementPats map typedAsElement - private def stars = patterns.starPatterns map typedAsSequence - - override def toString = s""" - |Aligned { - | patterns $patterns - | extractor $extractor - | arities $productArity/$elementArity/$starArity // product/element/star - | typed ${typedPatterns mkString ", "} - |}""".stripMargin.trim - } -} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala new file mode 100644 index 000000000000..0f7ab169e91a --- /dev/null +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -0,0 +1,237 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2013 LAMP/EPFL + * @author Paul Phillips + */ + +package scala +package tools +package nsc +package transform +package patmat + +import scala.tools.nsc.typechecker.Contexts + +/** An 'extractor' can be a case class or an unapply or unapplySeq method. + * + * In a case class, the class is the unextracted type and the fixed and + * repeated types are derived from its constructor parameters. + * + * In an unapply, this is reversed: the parameter to the unapply is the + * unextracted type, and the other types are derived based on the return + * type of the unapply method. + * + * An extractor returns: F1, F2, ..., Fi, opt[Seq[E] or E*] + * A case matches: P1, P2, ..., Pj, opt[Seq[E]] + * Put together: P1/F1, P2/F2, ... Pi/Fi, Pi+1/E, Pi+2/E, ... Pj/E, opt[Seq[E]] + * + * Here Pm/Fi is the last pattern to match the fixed arity section. + * + * productArity: the value of i, i.e. the number of non-sequence types in the extractor + * nonStarArity: the value of j, i.e. the number of non-star patterns in the case definition + * elementArity: j - i, i.e. the number of non-star patterns which must match sequence elements + * starArity: 1 or 0 based on whether there is a star (sequence-absorbing) pattern + * totalArity: nonStarArity + starArity, i.e. the number of patterns in the case definition + * + * Note that productArity is a function only of the extractor, and + * nonStar/star/totalArity are all functions of the patterns. The key + * value for aligning and typing the patterns is elementArity, as it + * is derived from both sets of information. + * + * If elementArity is... + * - zero: A perfect match between extractor and the fixed patterns. + * If there is a star pattern it will match any sequence. + * - positive: There are more patterns than products. There will have to be a + * sequence which can populate at least `elementArity` patterns. + * - negative: There are more products than patterns: compile time error. + * + */ +trait PatternExpansion { + val global: Global + + import global._ + import definitions._ + import treeInfo._ + + def unapplyFormals(fun: Tree, args: List[Tree])(context: Contexts#Context): List[Type] = + new ExtractorAlignment(fun, args)(context).unapplyFormals.map{case NoType => ErrorType case tp => tp} + + /** The arities we can derive looking only at the subpatterns (the args of the unapply node) */ + trait ExtractorSubPatternAlignment { + /** Args will be broken down into the concatenation of: + * `productArity` product patterns (fixed length, corresponding to fields in case class or tuple components in classic unapply, + * or product selectors in product-based unapply) + * `elementArity` element patterns (explicit patterns that pick off the prefix of the final sequence-valued component of the unapply, + * or a repeated case constructor arg) + * `starArity` star patterns (0 or 1, absorbs the remaining variable-length components) + */ + def args: List[Tree] + + // args.length == nonStarArity + starArity + val (nonStarArity, isStar) = args match { + case init :+ last if treeInfo.isStar(last) => (init.length, true) + case _ => (args.length, false) + } + + def starArity = if (isStar) 1 else 0 + def totalArity = nonStarArity + starArity + } + + // Analyze the fun / args of a case class or extractor pattern in terms of repeated patterns etc. + // Extracts some info from signatures of get/apply/head methods (name-based patmat) + class ExtractorAlignment(val fun: Tree, val args: List[Tree])(context: Contexts#Context) extends ExtractorSubPatternAlignment { + def productArity = productTypes.length // values coming from the fixed-length content + + def elementArity = nonStarArity - productArity // number of elements picked off from the sequence (the variable-length values of the extracted parts) + def isSeq = elementType ne NoType + + def isBool = !isSeq && productTypes.isEmpty + def isSingle = !isSeq && totalArity == 1 // a Tuple1 is not decomposed + + // the expected argument type of the unapply method (or the result type of the case class constructor) + def expectedExtractedType = + if (isUnapply || isUnapplySeq) firstParamType(fun.tpe) + else fun.tpe.finalResultType // result type of the case class constructor + + // expected types for subpatterns (using repeated param type to absorb the + // variable-length content, i.e., the elements and the final star pattern) + def unapplyFormals: List[Type] = + if (isSeq) productTypes :+ repeatedType else productTypes + + def subPatTypes: List[Type] = { + val withoutStar = productTypes ::: List.fill(elementArity)(elementType) + if (isStar) withoutStar :+ sequenceType else withoutStar + } + + def lengthCompareSym = sequenceType member nme.lengthCompare + + // rest is private + private val isUnapply = fun.symbol.name == nme.unapply + private val isUnapplySeq = fun.symbol.name == nme.unapplySeq + private def isBooleanUnapply = isUnapply && unapplyResultWithDummyUnapplySelector =:= BooleanTpe + private def isRepeatedCaseClass = caseCtorParamTypes.exists(tpes => tpes.nonEmpty && isScalaRepeatedParamType(tpes.last)) + + private def caseCtorParamTypes: Option[List[Type]] = + if (isUnapply || isUnapplySeq) None else Some(fun.tpe.paramTypes) + + // TODO: the remainder needs to be reviewed regarding use of unapply-selector as a dummy argument, + // on which the unapply method's result type may depend + private def unapplyResultWithDummyUnapplySelector = fun.tpe.finalResultType + + private def resultOfGetInMonad = elementTypeFromGet(unapplyResultWithDummyUnapplySelector) + + // For a traditional extractor that returns an `Option[TupleN[..Ti..]]`, the component types `..Ti..` + // Note, we do not unwrap a Tuple1... (similar for fromProductSelectors -- see pos/t796) + private def fromTupleComponents: Option[List[Type]] = + resultOfGetInMonad match { + case res if isTupleType(res) => + val components = tupleComponents(res) + if (components.lengthCompare(1) > 0) Some(components) + else None + case _ => None + } + private def tupleValuedUnapply = fromTupleComponents.nonEmpty + + private def fromProductSelectors: Option[List[Type]] = { + val res = resultOfGetInMonad + // Can't only check for _1 thanks to pos/t796. + if (res.hasNonPrivateMember(nme._1) && res.hasNonPrivateMember(nme._2)) + Some(Stream.from(1).map(n => res.nonPrivateMember(newTermName("_" + n))). + takeWhile(m => m.isMethod && m.paramLists.isEmpty).toList.map(m => res.memberType(m).resultType)) + else None + } + + private def booleanUnapply = if (isBooleanUnapply) Some(Nil) else None + + // In terms of the (equivalent -- if we're dealing with an unapply) case class, what are the constructor's parameter types? + private val equivConstrParamTypes = + caseCtorParamTypes orElse + booleanUnapply orElse + fromTupleComponents orElse + fromProductSelectors getOrElse + (resultOfGetInMonad :: Nil) // hope for the best + + // The non-sequence types which are extracted + private val productTypes = + if (equivConstrParamTypes.isEmpty) Nil + else if (isUnapplySeq || (!isUnapply && isRepeatedCaseClass)) equivConstrParamTypes.init + // scala/bug#9029 A pattern with arity-1 that doesn't match the arity of + // the Product-like result of the `get` method, will match that result in its entirety. + // + // ``` + // warning: there was one deprecation warning; re-run with -deprecation for details + // scala> object Extractor { def unapply(a: Any): Option[(Int, String)] = Some((1, "2")) } + // defined object Extractor + // + // scala> "" match { case Extractor(x: Int, y: String) => } + // + // scala> "" match { case Extractor(xy : (Int, String)) => } + // warning: there was one deprecation warning; re-run with -deprecation for details + // ``` + else if (totalArity == 1 && equivConstrParamTypes.tail.nonEmpty) { + warnPatternTupling() + (if (tupleValuedUnapply) tupleType(equivConstrParamTypes) else resultOfGetInMonad) :: Nil + } + else equivConstrParamTypes + + private def notRepeated = (NoType, NoType, NoType) + private val (elementType, sequenceType, repeatedType) = + // case class C() is deprecated, but still need to defend against equivConstrParamTypes.isEmpty + if (isUnapply || equivConstrParamTypes.isEmpty) notRepeated + else { + val lastParamTp = equivConstrParamTypes.last + if (isUnapplySeq) { + val elementTp = + elementTypeFromHead(lastParamTp) orElse + elementTypeFromApply(lastParamTp) orElse + definitions.elementType(ArrayClass, lastParamTp) + + (elementTp, lastParamTp, scalaRepeatedType(elementTp)) + } else { + definitions.elementType(RepeatedParamClass, lastParamTp) match { + case NoType => notRepeated + case elementTp => (elementTp, seqType(elementTp), lastParamTp) + } + } + } + + // errors & warnings + + private def err(msg: String) = context.error(fun.pos,msg) + private def warn(msg: String) = context.warning(fun.pos,msg) + private def depr(msg: String, since: String) = currentRun.reporting.deprecationWarning(fun.pos, fun.symbol.owner, msg, since) + + private def warnPatternTupling() = + if (effectivePatternArity(args) == 1 && tupleValuedUnapply) { + val acceptMessage = + if (equivConstrParamTypes contains NoType) "" + else s" to hold ${equivConstrParamTypes.mkString("(", ", ", ")")}" + val sym = fun.symbol.owner + val arr = equivConstrParamTypes.length + depr(s"${sym} expects $arr patterns$acceptMessage but crushing into $arr-tuple to fit single pattern (scala/bug#6675)", "2.11.0") + } + + private def arityError(mismatch: String) = { + val isErroneous = (productTypes contains NoType) && !(isSeq && (sequenceType ne NoType)) + + val offeringString = if (isErroneous) "" else productTypes match { + case tps if isSeq => (tps.map(_.toString) :+ s"${elementType}*").mkString("(", ", ", ")") + case Nil => "Boolean" + case tp :: Nil => tp + case tps => tps.mkString("(", ", ", ")") + } + val offerString = if (isErroneous) "" else s" offering $offeringString" + val expected = (if (isSeq) "at least " else "") + productArity + err(s"$mismatch patterns for ${fun.symbol.owner}$offerString: expected $expected, found $totalArity") + } + + // emit error/warning on mismatch + if (isStar && !isSeq) err("Star pattern must correspond with varargs or unapplySeq") + else if (equivConstrParamTypes == List(NoType)) err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultWithDummyUnapplySelector}") + else if (elementArity < 0) arityError("not enough") + else if (elementArity > 0 && !isSeq) arityError("too many") + else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn( + if (isStar) "Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected." + else "A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime.") + + } +} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 9026221cb813..d60444768cf6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -47,7 +47,7 @@ trait PatternMatching extends Transform with MatchAnalysis with MatchOptimization with MatchWarnings - with ScalacPatternExpanders { + with PatternExpansion { import global._ val phaseName: String = "patmat" diff --git a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala b/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala deleted file mode 100644 index 902015f3c41e..000000000000 --- a/src/compiler/scala/tools/nsc/transform/patmat/ScalacPatternExpanders.scala +++ /dev/null @@ -1,163 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips - */ - -package scala -package tools -package nsc -package transform -package patmat - -/** This is scalac-specific logic layered on top of the scalac-agnostic - * "matching products to patterns" logic defined in PatternExpander. - */ -trait ScalacPatternExpanders { - val global: Global - - import global._ - import definitions._ - import treeInfo._ - import analyzer._ - - type PatternAligned = ScalacPatternExpander#Aligned - - implicit class AlignedOps(val aligned: PatternAligned) { - import aligned._ - def expectedTypes = typedPatterns map (_.tpe) - def unexpandedFormals = extractor.varargsTypes - } - trait ScalacPatternExpander extends PatternExpander[Tree, Type] { - def NoPattern = EmptyTree - def NoType = global.NoType - - def newPatterns(patterns: List[Tree]): Patterns = patterns match { - case init :+ last if isStar(last) => Patterns(init, last) - case _ => Patterns(patterns, NoPattern) - } - def elementTypeOf(tpe: Type) = { - val seq = repeatedToSeq(tpe) - - ( typeOfMemberNamedHead(seq) - orElse typeOfMemberNamedApply(seq) - orElse definitions.elementType(ArrayClass, seq) - ) - } - def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated, typeOfSinglePattern: Type): Extractor = - logResult(s"newExtractor($whole, $fixed, $repeated, $typeOfSinglePattern")(Extractor(whole, fixed, repeated, typeOfSinglePattern)) - def newExtractor(whole: Type, fixed: List[Type], repeated: Repeated): Extractor = newExtractor(whole, fixed, repeated, tupleType(fixed)) - - // Turn Seq[A] into Repeated(Seq[A], A, A*) - def repeatedFromSeq(seqType: Type): Repeated = { - val elem = elementTypeOf(seqType) - val repeated = scalaRepeatedType(elem) - - Repeated(seqType, elem, repeated) - } - // Turn A* into Repeated(Seq[A], A, A*) - def repeatedFromVarargs(repeated: Type): Repeated = - Repeated(repeatedToSeq(repeated), repeatedToSingle(repeated), repeated) - - /** In this case we are basing the pattern expansion on a case class constructor. - * The argument is the MethodType carried by the primary constructor. - */ - def applyMethodTypes(method: Type): Extractor = { - val whole = method.finalResultType - - method.paramTypes match { - case init :+ last if isScalaRepeatedParamType(last) => newExtractor(whole, init, repeatedFromVarargs(last)) - case tps => newExtractor(whole, tps, NoRepeated) - } - } - - /** In this case, expansion is based on an unapply or unapplySeq method. - * Unfortunately the MethodType does not carry the information of whether - * it was unapplySeq, so we have to funnel that information in separately. - */ - def unapplyMethodTypes(context: Context, whole: Type, result: Type, isSeq: Boolean): Extractor = { - if (result =:= BooleanTpe) newExtractor(whole, Nil, NoRepeated) - else { - val getResult = typeOfMemberNamedGet(result) - def noGetError() = { - val name = "unapply" + (if (isSeq) "Seq" else "") - context.error(context.tree.pos, s"The result type of an $name method must contain a member `get` to be used as an extractor pattern, no such member exists in ${result}") - } - val expanded = getResult match { - case global.NoType => noGetError(); Nil - case rawGet if !hasSelectors(rawGet) => rawGet :: Nil - case rawGet => typesOfSelectors(rawGet) - } - expanded match { - case init :+ last if isSeq => newExtractor(whole, init, repeatedFromSeq(last), getResult) - case tps => newExtractor(whole, tps, NoRepeated, getResult) - } - } - } - } - object alignPatterns extends ScalacPatternExpander { - private def validateAligned(context: Context, tree: Tree, aligned: Aligned): Aligned = { - import aligned._ - - def owner = tree.symbol.owner - def offering = extractor.offeringString - def symString = tree.symbol.fullLocationString - def offerString = if (extractor.isErroneous) "" else s" offering $offering" - def arityExpected = ( if (extractor.hasSeq) "at least " else "" ) + productArity - - def err(msg: String) = context.error(tree.pos, msg) - def warn(msg: String) = context.warning(tree.pos, msg) - def arityError(what: String) = err(s"$what patterns for $owner$offerString: expected $arityExpected, found $totalArity") - - if (isStar && !isSeq) - err("Star pattern must correspond with varargs or unapplySeq") - else if (elementArity < 0) - arityError("not enough") - else if (elementArity > 0 && !isSeq) - arityError("too many") - else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn { - if (isStar) "Sequence wildcard (_*) does not align with repeated case parameter or extracted sequence; the result may be unexpected." - else "A repeated case parameter or extracted sequence is not matched by a sequence wildcard (_*), and may fail at runtime." - } - - aligned - } - - def apply(context: Context, sel: Tree, args: List[Tree]): Aligned = { - val fn = sel match { - case Unapplied(fn) => fn - case _ => sel - } - val patterns = newPatterns(args) - val isUnapply = sel.symbol.name == nme.unapply - - val extractor = sel.symbol.name match { - case nme.unapply => unapplyMethodTypes(context, firstParamType(fn.tpe), sel.tpe, isSeq = false) - case nme.unapplySeq => unapplyMethodTypes(context, firstParamType(fn.tpe), sel.tpe, isSeq = true) - case _ => applyMethodTypes(fn.tpe) - } - - /** Rather than let the error that is scala/bug#6675 pollute the entire matching - * process, we will tuple the extractor before creation Aligned so that - * it contains known good values. - */ - def productArity = extractor.productArity - def acceptMessage = if (extractor.isErroneous) "" else s" to hold ${extractor.offeringString}" - val requiresTupling = isUnapply && patterns.totalArity == 1 && productArity > 1 - - val normalizedExtractor = if (requiresTupling) { - val tupled = extractor.asSinglePattern - if (effectivePatternArity(args) == 1 && isTupleType(extractor.typeOfSinglePattern)) { - val sym = sel.symbol.owner - currentRun.reporting.deprecationWarning(sel.pos, sym, s"${sym} expects $productArity patterns$acceptMessage but crushing into $productArity-tuple to fit single pattern (scala/bug#6675)", "2.11.0") - } - tupled - } else extractor - validateAligned(context, fn, Aligned(patterns, normalizedExtractor)) - } - - def apply(context: Context, tree: Tree): Aligned = tree match { - case Apply(fn, args) => apply(context, fn, args) - case UnApply(fn, args) => apply(context, fn, args) - } - } -} diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 97a972506877..ce9923ee7f05 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -289,9 +289,11 @@ trait Checkable { ) /** TODO: much better error positions. - * Kind of stuck right now because they just pass us the one tree. - * TODO: Eliminate inPattern, canRemedy, which have no place here. - */ + * Kind of stuck right now because they just pass us the one tree. + * TODO: Eliminate inPattern, canRemedy, which have no place here. + * + * Instead of the canRemedy flag, annotate uncheckable types that have become checkable because of the availability of a class tag? + */ def checkCheckable(tree: Tree, P0: Type, X0: Type, inPattern: Boolean, canRemedy: Boolean = false) { if (uncheckedOk(P0)) return def where = if (inPattern) "pattern " else "" diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 5b562dac9980..c0e5aa53dca1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -607,13 +607,13 @@ trait ContextErrors { //doTypedApply - patternMode def TooManyArgsPatternError(fun: Tree) = - NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity) + issueNormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity) def BlackboxExtractorExpansion(fun: Tree) = - NormalTypeError(fun, "extractor macros can only be whitebox") + issueNormalTypeError(fun, "extractor macros can only be whitebox") def WrongShapeExtractorExpansion(fun: Tree) = - NormalTypeError(fun, "extractor macros can only expand into extractor calls") + issueNormalTypeError(fun, "extractor macros can only expand into extractor calls") def WrongNumberOfArgsError(tree: Tree, fun: Tree) = NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun)) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 1f69f28089ff..3ff22a4117d8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -76,7 +76,7 @@ trait PatternTypers { val caseClass = companionSymbolOf(fun.tpe.typeSymbol.sourceModule, context) val member = unapplyMember(fun.tpe) def resultType = (fun.tpe memberType member).finalResultType - def isEmptyType = resultOfMatchingMethod(resultType, nme.isEmpty)() + def isEmptyType = resultOfIsEmpty(resultType) def isOkay = ( resultType.isErroneous || (resultType <:< BooleanTpe) @@ -262,73 +262,70 @@ trait PatternTypers { } } - def doTypedUnapply(tree: Tree, fun0: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { - def duplErrTree = setError(treeCopy.Apply(tree, fun0, args)) - def duplErrorTree(err: AbsTypeError) = { context.issue(err); duplErrTree } - - if (args.length > MaxTupleArity) - return duplErrorTree(TooManyArgsPatternError(fun)) - - def freshArgType(tp: Type): Type = tp match { - case MethodType(param :: _, _) => param.tpe - case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(genPolyType) - case OverloadedType(_, _) => OverloadedUnapplyError(fun) ; ErrorType - case _ => UnapplyWithSingleArgError(fun) ; ErrorType - } - val unapplyMethod = unapplyMember(fun.tpe) - val unapplyType = fun.tpe memberType unapplyMethod - val unapplyParamType = firstParamType(unapplyType) - def isSeq = unapplyMethod.name == nme.unapplySeq - - def extractor = extractorForUncheckedType(fun.pos, unapplyParamType) - def canRemedy = unapplyParamType match { - case RefinedType(_, decls) if !decls.isEmpty => false - case RefinedType(parents, _) if parents exists isUncheckable => false - case _ => extractor.nonEmpty - } - - def freshUnapplyArgType(): Type = { - val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree)) - val unapplyContext = context.makeNewScope(context.tree, context.owner) - freeVars foreach unapplyContext.scope.enter - val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy) - // turn any unresolved type variables in freevars into existential skolems - val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) - pattp.substSym(freeVars, skolems) - } - - val unapplyArg = ( - context.owner.newValue(nme.SELECTOR_DUMMY, fun.pos, Flags.SYNTHETIC) setInfo ( - if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt - else freshUnapplyArgType() - ) - ) - val unapplyArgTree = Ident(unapplyArg) updateAttachment SubpatternsAttachment(args) - - // clearing the type is necessary so that ref will be stabilized; see bug 881 - val fun1 = typedPos(fun.pos)(Apply(Select(fun.clearType(), unapplyMethod), unapplyArgTree :: Nil)) - - def makeTypedUnapply() = { - // the union of the expected type and the inferred type of the argument to unapply - val glbType = glb(ensureFullyDefined(pt) :: unapplyArg.tpe_* :: Nil) - val wrapInTypeTest = canRemedy && !(fun1.symbol.owner isNonBottomSubClass ClassTagClass) - val formals = patmat.alignPatterns(context.asInstanceOf[analyzer.Context], fun1, args).unexpandedFormals - val args1 = typedArgsForFormals(args, formals, mode) - val result = UnApply(fun1, args1) setPos tree.pos setType glbType - - if (wrapInTypeTest) - wrapClassTagUnapply(result, extractor, glbType) - else - result + def doTypedUnapply(tree: Tree, funOrig: Tree, funOverloadResolved: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { + def errorTree: Tree = treeCopy.Apply(tree, funOrig, args) setType ErrorType + + if (args.lengthCompare(MaxTupleArity) > 0) { + TooManyArgsPatternError(funOverloadResolved); errorTree + } else { + val extractorPos = funOverloadResolved.pos + val extractorTp = funOverloadResolved.tpe + + val unapplyMethod = unapplyMember(extractorTp) + val unapplyType = extractorTp memberType unapplyMethod + + lazy val remedyUncheckedWithClassTag = extractorForUncheckedType(extractorPos, firstParamType(unapplyType)) + def canRemedy = remedyUncheckedWithClassTag != EmptyTree + + val selectorDummySym = + context.owner.newValue(nme.SELECTOR_DUMMY, extractorPos, Flags.SYNTHETIC) setInfo { + if (isApplicableSafe(Nil, unapplyType, pt :: Nil, WildcardType)) pt + else { + def freshArgType(tp: Type): Type = tp match { + case MethodType(param :: _, _) => param.tpe + case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe))(genPolyType) + case OverloadedType(_, _) => OverloadedUnapplyError(funOverloadResolved); ErrorType + case _ => UnapplyWithSingleArgError(funOverloadResolved); ErrorType + } + + val GenPolyType(freeVars, unappFormal) = freshArgType(unapplyType.skolemizeExistential(context.owner, tree)) + val unapplyContext = context.makeNewScope(context.tree, context.owner) + freeVars foreach unapplyContext.scope.enter + val pattp = newTyper(unapplyContext).infer.inferTypedPattern(tree, unappFormal, pt, canRemedy) + // turn any unresolved type variables in freevars into existential skolems + val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) + pattp.substSym(freeVars, skolems) + } + } + + // Clearing the type is necessary so that ref will be stabilized; see scala/bug#881. + val selectUnapply = Select(funOverloadResolved.clearType(), unapplyMethod) + + // NOTE: The symbol of unapplyArgTree (``) may be referenced in `fun1.tpe` + // the pattern matcher deals with this in ExtractorCallRegular -- SI-6130 + val unapplyArg = Ident(selectorDummySym) updateAttachment SubpatternsAttachment(args) // attachment is for quasiquotes + + val typedApplied = typedPos(extractorPos)(Apply(selectUnapply, unapplyArg :: Nil)) + + if (typedApplied.tpe.isErroneous || unapplyMethod.isMacro && !typedApplied.isInstanceOf[Apply]) { + if (unapplyMethod.isMacro) { + if (isBlackbox(unapplyMethod)) BlackboxExtractorExpansion(tree) + else WrongShapeExtractorExpansion(tree) + } + errorTree + } else { + val unapplyArgTypeInferred = selectorDummySym.tpe_* + // the union of the expected type and the inferred type of the argument to unapply + val extractedTp = glb(ensureFullyDefined(pt) :: unapplyArgTypeInferred :: Nil) + val formals = patmat.unapplyFormals(typedApplied, args)(context) + val typedUnapply = UnApply(typedApplied, typedArgsForFormals(args, formals, mode)) setPos tree.pos setType extractedTp + + if (canRemedy && !(typedApplied.symbol.owner isNonBottomSubClass ClassTagClass)) + wrapClassTagUnapply(typedUnapply, remedyUncheckedWithClassTag, extractedTp) + else + typedUnapply + } } - - if (fun1.tpe.isErroneous) - duplErrTree - else if (unapplyMethod.isMacro && !fun1.isInstanceOf[Apply]) { - if (isBlackbox(unapplyMethod)) duplErrorTree(BlackboxExtractorExpansion(tree)) - else duplErrorTree(WrongShapeExtractorExpansion(tree)) - } else - makeTypedUnapply() } def wrapClassTagUnapply(uncheckedPattern: Tree, classTagExtractor: Tree, pt: Type): Tree = { diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index a194be0fdf61..c54cf3a88073 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -732,9 +732,6 @@ trait Definitions extends api.StandardDefinitions { case tp => tp } - def getterMemberTypes(tpe: Type, getters: List[Symbol]): List[Type] = - getters map (m => dropNullaryMethod(tpe memberType m)) - def dropNullaryMethod(tp: Type) = tp match { case NullaryMethodType(restpe) => restpe case _ => tp @@ -893,16 +890,13 @@ trait Definitions extends api.StandardDefinitions { def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) def seqType(arg: Type) = appliedType(SeqClass, arg) - // FYI the long clunky name is because it's really hard to put "get" into the - // name of a method without it sounding like the method "get"s something, whereas - // this method is about a type member which just happens to be named get. - def typeOfMemberNamedGet(tp: Type) = typeArgOfBaseTypeOr(tp, OptionClass)(resultOfMatchingMethod(tp, nme.get)()) - def typeOfMemberNamedHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)()) - def typeOfMemberNamedApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe)) - def typeOfMemberNamedDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe)) - def typesOfSelectors(tp: Type) = - if (isTupleType(tp)) tupleComponents(tp) - else getterMemberTypes(tp, productSelectors(tp)) + // For name-based pattern matching, derive the "element type" (type argument of Option/Seq) + // from the relevant part of the signature of various members (get/head/apply/drop) + def elementTypeFromGet(tp: Type) = typeArgOfBaseTypeOr(tp, OptionClass)(resultOfMatchingMethod(tp, nme.get)()) + def elementTypeFromHead(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.head)()) + def elementTypeFromApply(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.apply)(IntTpe)) + def elementTypeFromDrop(tp: Type) = typeArgOfBaseTypeOr(tp, SeqClass)(resultOfMatchingMethod(tp, nme.drop)(IntTpe)) + def resultOfIsEmpty(tp: Type) = resultOfMatchingMethod(tp, nme.isEmpty)() // scala/bug#8128 Still using the type argument of the base type at Seq/Option if this is an old-style (2.10 compatible) // extractor to limit exposure to regressions like the reported problem with existentials. @@ -915,32 +909,12 @@ trait Definitions extends api.StandardDefinitions { case _ => or } - // Can't only check for _1 thanks to pos/t796. - def hasSelectors(tp: Type) = ( - (tp.members containsName nme._1) - && (tp.members containsName nme._2) - ) - - /** Returns the method symbols for members _1, _2, ..., _N - * which exist in the given type. - */ - def productSelectors(tpe: Type): List[Symbol] = { - def loop(n: Int): List[Symbol] = tpe member TermName("_" + n) match { - case NoSymbol => Nil - case m if m.paramss.nonEmpty => Nil - case m => m :: loop(n + 1) - } - // Since ErrorType always returns a symbol from a call to member, we - // had better not start looking for _1, _2, etc. expecting it to run out. - if (tpe.isErroneous) Nil else loop(1) - } - /** If `tp` has a term member `name`, the first parameter list of which * matches `paramTypes`, and which either has no further parameter * lists or only an implicit one, then the result type of the matching * method. Otherwise, NoType. */ - def resultOfMatchingMethod(tp: Type, name: TermName)(paramTypes: Type*): Type = { + private def resultOfMatchingMethod(tp: Type, name: TermName)(paramTypes: Type*): Type = { def matchesParams(member: Symbol) = member.paramss match { case Nil => paramTypes.isEmpty case ps :: rest => (rest.isEmpty || isImplicitParamss(rest)) && (ps corresponds paramTypes)(_.tpe =:= _) diff --git a/test/files/neg/t4425b.check b/test/files/neg/t4425b.check index a204467586e1..79ebe0a0cbba 100644 --- a/test/files/neg/t4425b.check +++ b/test/files/neg/t4425b.check @@ -23,15 +23,9 @@ Note: def unapply(x: String)(y: String): Nothing exists in object X, but it cann println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" }) ^ t4425b.scala:18: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing - println( "" match { case _ X _ => "ok" ; case _ => "fail" }) - ^ -t4425b.scala:18: error: too many patterns for object X offering Boolean: expected 0, found 2 println( "" match { case _ X _ => "ok" ; case _ => "fail" }) ^ t4425b.scala:19: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing - println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) - ^ -t4425b.scala:19: error: too many patterns for object X offering Boolean: expected 0, found 2 println((X: Any) match { case _ X _ => "ok" ; case _ => "fail" }) ^ t4425b.scala:20: error: The result type of an unapply method must contain a member `get` to be used as an extractor pattern, no such member exists in Nothing @@ -58,4 +52,4 @@ t4425b.scala:35: error: too many patterns for object X offering Nothing: expecte t4425b.scala:36: error: too many patterns for object X offering Nothing: expected 1, found 2 println((X: Any) match { case X(_, _) => "ok" ; case _ => "fail" }) ^ -18 errors found +16 errors found From 7c50a335a52029e909b4a2ae10c22382d2373c70 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 14 Sep 2017 16:09:48 -0700 Subject: [PATCH 1056/2793] Replace dummy `unapply-selector` by ref to real val Once a pattern match is expanded, we have a `val` in hand that we can use to represent the input to the unapply method. We already spliced that `val` into the call to the extractor, but neglected to substitute in the types. For `unapply`s with dependent result types, that left a bunch of types containing `.type` in the trees. No more! The substitution is performed in three ways: - when splicing the argument (as mentioned above); - when determining the types of binders for subpatterns (this area was cleaned up in parent commit) - an additional SubstTreeMaker chained before the extractor treemaker (this one is for good measure; we could actually see if we truly need it) --- .../transform/patmat/MatchTranslation.scala | 59 +++++++++++-------- .../transform/patmat/PatternExpansion.scala | 41 +++++++++---- test/files/run/t6130.scala | 58 ++++++++++++++++++ 3 files changed, 121 insertions(+), 37 deletions(-) create mode 100644 test/files/run/t6130.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index d7fa5a6e1592..660e64121b56 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -61,11 +61,6 @@ trait MatchTranslation { } } - def newBoundTree(tree: Tree, pt: Type): BoundTree = tree match { - case SymbolBound(sym, expr) => BoundTree(setVarInfo(sym, pt), expr) - case _ => BoundTree(setVarInfo(freshSym(tree.pos, prefix = "p"), pt), tree) - } - final case class BoundTree(binder: Symbol, tree: Tree) { private lazy val extractor = ExtractorCall(tree) @@ -109,14 +104,14 @@ trait MatchTranslation { // example check: List[Int] <:< ::[Int] private def extractorStep(): TranslationStep = { - import extractor.treeMaker + import extractor.treeMakers // paramType = the type expected by the unapply // TODO: paramType may contain unbound type params (run/t2800, run/t3530) - val makers = { + val (makers, unappBinder) = { val paramType = extractor.expectedExtractedType // Statically conforms to paramType - if (tpe <:< paramType) treeMaker(binder, false, pos) :: Nil + if (tpe <:< paramType) (treeMakers(binder, false, pos), binder) else { // chain a type-testing extractor before the actual extractor call // it tests the type, checks the outer pointer and casts to the expected type @@ -128,10 +123,15 @@ trait MatchTranslation { // check whether typetest implies binder is not null, // even though the eventual null check will be on typeTest.nextBinder // it'll be equal to binder casted to paramType anyway (and the type test is on binder) - typeTest :: treeMaker(typeTest.nextBinder, binderKnownNonNull, pos) :: Nil + val unappBinder = typeTest.nextBinder + (typeTest :: treeMakers(unappBinder, binderKnownNonNull, pos), unappBinder) } } + foreach2(extractor.subBoundTrees, extractor.subPatTypes(unappBinder)) { (bt, pt) => + setVarInfo(bt.binder, pt) + } + step(makers: _*)(extractor.subBoundTrees: _*) } @@ -374,20 +374,17 @@ trait MatchTranslation { // TODO: check unargs == args def apply(tree: Tree): ExtractorCall = tree match { case UnApply(unfun@Unapplied(fun), args) => new ExtractorCallRegular(fun, args)(unfun) // extractor - case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class + case Apply(fun, args) => new ExtractorCallProd(fun, args) // case class } } abstract class ExtractorCall(fun: Tree, args: List[Tree]) extends ExtractorAlignment(fun, args)(context) { - def resultInMonad = if (isBool) UnitTpe else elementTypeFromGet(resultType) - def resultType = fun.tpe.finalResultType - /** Create the TreeMaker that embodies this extractor call * * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ - def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker + def treeMakers(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] // `subPatBinders` are the variables bound by this pattern in the following patterns // subPatBinders are replaced by references to the relevant part of the extractor's result (tuple component, seq element, the result as-is) @@ -396,7 +393,10 @@ trait MatchTranslation { // (it will later result in a type test when `tp` is not a subtype of `b.info`) // TODO: can we simplify this, together with the Bound case? def subPatBinders = subBoundTrees map (_.binder) - lazy val subBoundTrees = (args, subPatTypes).zipped map newBoundTree + lazy val subBoundTrees: List[BoundTree] = args map { + case SymbolBound(sym, expr) => BoundTree(sym, expr) + case tree => BoundTree(freshSym(tree.pos, prefix = "p"), tree) + } // never store these in local variables (for PreserveSubPatBinders) lazy val ignoredSubPatBinders: Set[Symbol] = subPatBinders zip args collect { case (b, PatternBoundToUnderscore()) => b } toSet @@ -483,7 +483,7 @@ trait MatchTranslation { * `binderKnownNonNull` indicates whether the cast implies `binder` cannot be null * when `binderKnownNonNull` is `true`, `ProductExtractorTreeMaker` does not do a (redundant) null check on binder */ - def treeMaker(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { + def treeMakers(binder: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] = { val paramAccessors = expectedExtractedType.typeSymbol.constrParamAccessors val numParams = paramAccessors.length def paramAccessorAt(subPatIndex: Int) = paramAccessors(math.min(subPatIndex, numParams - 1)) @@ -504,7 +504,7 @@ trait MatchTranslation { ) // checks binder ne null before chaining to the next extractor - ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders) + ProductExtractorTreeMaker(binder, lengthGuard(binder))(subPatBinders, subPatRefs(binder), mutableBinders, binderKnownNonNull, ignoredSubPatBinders) :: Nil } // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component @@ -519,9 +519,15 @@ trait MatchTranslation { * * @param fun reference to the unapply method * @param args the subpatterns - * @param funAppliedToUnapplySelector an application of the unapply method to the (dummy) unapply selector + * @param unapplyAppliedToDummy an application of the unapply method to the (dummy) unapply selector */ - class ExtractorCallRegular(fun: Tree, args: List[Tree])(funAppliedToUnapplySelector: Tree) extends ExtractorCall(fun, args) { + class ExtractorCallRegular(fun: Tree, args: List[Tree])(unapplyAppliedToDummy: Tree) extends ExtractorCall(fun, args) { + override lazy val unapplySelector = + unapplyAppliedToDummy match { + case Apply(_, (dummy@Ident(nme.SELECTOR_DUMMY)) :: Nil) => dummy.symbol + case _ => NoSymbol // if the unapply is applied to .toXXXX, we can't use the selector dummy's symbol + } + /** Create the TreeMaker that embodies this extractor call * * `binder` has been casted to `paramType` if necessary @@ -532,14 +538,14 @@ trait MatchTranslation { * case class Binder(sym: Symbol, knownNotNull: Boolean). * Perhaps it hasn't reached critical mass, but it would already clean things up a touch. */ - def treeMaker(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): TreeMaker = { + def treeMakers(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] = { // the extractor call (applied to the binder bound by the flatMap corresponding // to the previous (i.e., enclosing/outer) pattern) val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely // wrong when isSeq, and resultInMonad should always be correct since it comes // directly from the extractor's result type - val binder = freshSym(pos, pureType(resultInMonad)) + val binder = freshSym(pos, pureType(resultInMonad(patBinderOrCasted))) val potentiallyMutableBinders: Set[Symbol] = if (extractorApply.tpe.typeSymbol.isNonBottomSubClass(OptionClass) && !isSeq) Set.empty @@ -547,7 +553,8 @@ trait MatchTranslation { // Ensures we capture unstable bound variables eagerly. These can arise under name based patmat or by indexing into mutable Seqs. See run t9003.scala subPatBinders.toSet - ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)( + // types may refer to the dummy symbol unapplySelector (in case of dependent method type for the unapply method) + SubstOnlyTreeMaker(unapplySelector, patBinderOrCasted) :: ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)( subPatBinders, subPatRefs(binder), potentiallyMutableBinders, @@ -555,7 +562,7 @@ trait MatchTranslation { checkedLength, patBinderOrCasted, ignoredSubPatBinders - ) + ) :: Nil } override protected def seqTree(binder: Symbol): Tree = @@ -575,15 +582,17 @@ trait MatchTranslation { override def transform(t: Tree) = t match { // duplicated with the extractor Unapplied case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) => - treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) + // in case the result type depended on the unapply's argument, plug in the new symbol + treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) modifyType(_.substSym(List(i.symbol), List(binder))) // scala/bug#7868 Account for numeric widening, e.g. .toInt case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) => + // not substituting `binder` for `i.symbol`: widening conversion implies the binder could not be used as a path treeCopy.Apply(t, x, treeCopy.Select(sel, binderRef(i.pos), name) :: Nil) case _ => super.transform(t) } } - splice transform funAppliedToUnapplySelector + splice transform unapplyAppliedToDummy } } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index 0f7ab169e91a..e56110cb6bb2 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -52,6 +52,10 @@ trait PatternExpansion { import definitions._ import treeInfo._ + // SI-6130 -- TODO: what should we do when a type in `formals` depends on the symbol `unapplyArg` (that references the unapply selector) + // One solution could be to widen all expected types for sub-patterns since the extractor's result type + // may contain singleton types that depend on `arg` () + // `formals mapConserve (_.widen)` def unapplyFormals(fun: Tree, args: List[Tree])(context: Contexts#Context): List[Type] = new ExtractorAlignment(fun, args)(context).unapplyFormals.map{case NoType => ErrorType case tp => tp} @@ -79,6 +83,8 @@ trait PatternExpansion { // Analyze the fun / args of a case class or extractor pattern in terms of repeated patterns etc. // Extracts some info from signatures of get/apply/head methods (name-based patmat) class ExtractorAlignment(val fun: Tree, val args: List[Tree])(context: Contexts#Context) extends ExtractorSubPatternAlignment { + def unapplySelector: Symbol = NoSymbol + def productArity = productTypes.length // values coming from the fixed-length content def elementArity = nonStarArity - productArity // number of elements picked off from the sequence (the variable-length values of the extracted parts) @@ -92,14 +98,21 @@ trait PatternExpansion { if (isUnapply || isUnapplySeq) firstParamType(fun.tpe) else fun.tpe.finalResultType // result type of the case class constructor + def resultInMonad(extractedBinder: Symbol) = + if (isBool) UnitTpe else resultOfGetInMonad(extractedBinder) + // expected types for subpatterns (using repeated param type to absorb the // variable-length content, i.e., the elements and the final star pattern) def unapplyFormals: List[Type] = if (isSeq) productTypes :+ repeatedType else productTypes - def subPatTypes: List[Type] = { + def subPatTypes(extractedBinder: Symbol): List[Type] = { + def replaceUnapplySelector(tps: List[Type]) = + if (unapplySelector == NoSymbol) tps + else tps.map(_.substSym(List(unapplySelector), List(extractedBinder))) + val withoutStar = productTypes ::: List.fill(elementArity)(elementType) - if (isStar) withoutStar :+ sequenceType else withoutStar + replaceUnapplySelector(if (isStar) withoutStar :+ sequenceType else withoutStar) } def lengthCompareSym = sequenceType member nme.lengthCompare @@ -107,22 +120,26 @@ trait PatternExpansion { // rest is private private val isUnapply = fun.symbol.name == nme.unapply private val isUnapplySeq = fun.symbol.name == nme.unapplySeq - private def isBooleanUnapply = isUnapply && unapplyResultWithDummyUnapplySelector =:= BooleanTpe + private def isBooleanUnapply = isUnapply && unapplyResultType() =:= BooleanTpe private def isRepeatedCaseClass = caseCtorParamTypes.exists(tpes => tpes.nonEmpty && isScalaRepeatedParamType(tpes.last)) private def caseCtorParamTypes: Option[List[Type]] = if (isUnapply || isUnapplySeq) None else Some(fun.tpe.paramTypes) - // TODO: the remainder needs to be reviewed regarding use of unapply-selector as a dummy argument, - // on which the unapply method's result type may depend - private def unapplyResultWithDummyUnapplySelector = fun.tpe.finalResultType + // bug#6130 can't really say what the result type is without referring to the binder we're extracting, + // as an unapply's result type could depend on its argument, e.g. crazy stuff like `def unapply(x: T): Option[(x.T, x.U)]` + // NOTE: we skip a potential implicit method type here -- could this be another avenue of craziness where the result type depends on the input? + private def unapplyResultType(extractedBinder: Symbol = unapplySelector): Type = + if (extractedBinder == NoSymbol) fun.tpe.finalResultType + else fun.tpe.resultType(List(SingleType(NoPrefix, extractedBinder))).finalResultType - private def resultOfGetInMonad = elementTypeFromGet(unapplyResultWithDummyUnapplySelector) + private def resultOfGetInMonad(arg: Symbol = unapplySelector) = + elementTypeFromGet(unapplyResultType(arg)) // For a traditional extractor that returns an `Option[TupleN[..Ti..]]`, the component types `..Ti..` // Note, we do not unwrap a Tuple1... (similar for fromProductSelectors -- see pos/t796) private def fromTupleComponents: Option[List[Type]] = - resultOfGetInMonad match { + resultOfGetInMonad() match { case res if isTupleType(res) => val components = tupleComponents(res) if (components.lengthCompare(1) > 0) Some(components) @@ -132,7 +149,7 @@ trait PatternExpansion { private def tupleValuedUnapply = fromTupleComponents.nonEmpty private def fromProductSelectors: Option[List[Type]] = { - val res = resultOfGetInMonad + val res = resultOfGetInMonad() // Can't only check for _1 thanks to pos/t796. if (res.hasNonPrivateMember(nme._1) && res.hasNonPrivateMember(nme._2)) Some(Stream.from(1).map(n => res.nonPrivateMember(newTermName("_" + n))). @@ -148,7 +165,7 @@ trait PatternExpansion { booleanUnapply orElse fromTupleComponents orElse fromProductSelectors getOrElse - (resultOfGetInMonad :: Nil) // hope for the best + (resultOfGetInMonad() :: Nil) // hope for the best // The non-sequence types which are extracted private val productTypes = @@ -169,7 +186,7 @@ trait PatternExpansion { // ``` else if (totalArity == 1 && equivConstrParamTypes.tail.nonEmpty) { warnPatternTupling() - (if (tupleValuedUnapply) tupleType(equivConstrParamTypes) else resultOfGetInMonad) :: Nil + (if (tupleValuedUnapply) tupleType(equivConstrParamTypes) else resultOfGetInMonad()) :: Nil } else equivConstrParamTypes @@ -226,7 +243,7 @@ trait PatternExpansion { // emit error/warning on mismatch if (isStar && !isSeq) err("Star pattern must correspond with varargs or unapplySeq") - else if (equivConstrParamTypes == List(NoType)) err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultWithDummyUnapplySelector}") + else if (equivConstrParamTypes == List(NoType)) err(s"The result type of an ${fun.symbol.name} method must contain a member `get` to be used as an extractor pattern, no such member exists in ${unapplyResultType()}") else if (elementArity < 0) arityError("not enough") else if (elementArity > 0 && !isSeq) arityError("too many") else if (settings.warnStarsAlign && isSeq && productArity > 0 && elementArity > 0) warn( diff --git a/test/files/run/t6130.scala b/test/files/run/t6130.scala new file mode 100644 index 000000000000..d20ff9208d91 --- /dev/null +++ b/test/files/run/t6130.scala @@ -0,0 +1,58 @@ +import scala.tools.partest._ + +object Test extends StoreReporterDirectTest { + override def extraSettings: String = "-usejavacp -Xprint:patmat -Ystop-after:patmat" + + override def code = + """trait T { type T ; val t: T } + |object tInt extends T { type T = Int; val t = 1 } + | + |trait TU { type U } + | + |object XT { + | def unapply(x: T): Option[(x.T, x.T)] = Some(((x.t, x.t))) + |} + | + |object XTU { + | def unapply(t: TU): Option[t.U] = ??? + |} + | + |object XA { + | def unapply(x: AnyRef): Option[x.type] = Some(x) + |} + | + | + |// TODO: show that `` is gone from the following lines (after patmat) + |class Test { + | // val o9: scala.this.Option[scala.this.Tuple2[.T,.T]] = XT.unapply(p2); + | // val a: .T = o9.get._1; + | def t: Int = Some(tInt) match { case Some(XT(a, _ )) => a } + | + | def tu = (null: Any) match { + | // val o8: scala.this.Option[.U] = XTU.unapply(x2); + | case XTU(otherExRep) => + | // val otherExRep: .U = o8.get; + | println(otherExRep) + | } + | + | def anyref(z: AnyRef) = { + | z match { + | // val o8: scala.this.Option[.type] = XA.unapply(x1); + | case XA(x) => x + | case _ => () + | } + | } + |} + | + | + """.stripMargin + + def show(): Unit = { + val baos = new java.io.ByteArrayOutputStream() + Console.withOut(baos)(Console.withErr(baos)(compile())) + val out = baos.toString("UTF-8") + + val unapplySelectorDummies = out.lines.filter(_.contains("")).map(_.trim).toList + assert(unapplySelectorDummies.isEmpty, unapplySelectorDummies) + } +} From d831810956f6f940fa46dd23807ed724a28763fc Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 26 Sep 2017 14:51:20 +0200 Subject: [PATCH 1057/2793] Make Lazy* classes serializable The instances may be captured by closures, which should be serializable. Fixes scala/bug#10522 --- .../mima-filters/2.12.0.forwards.excludes | 24 +++++++++++- src/library/scala/runtime/LazyRef.scala | 30 ++++++++++----- test/files/run/t10522.check | 12 ++++++ test/files/run/t10522.scala | 38 +++++++++++++++++++ 4 files changed, 93 insertions(+), 11 deletions(-) create mode 100644 test/files/run/t10522.check create mode 100644 test/files/run/t10522.scala diff --git a/src/library/mima-filters/2.12.0.forwards.excludes b/src/library/mima-filters/2.12.0.forwards.excludes index 9d4ddfbb14e7..dbd58849da8a 100644 --- a/src/library/mima-filters/2.12.0.forwards.excludes +++ b/src/library/mima-filters/2.12.0.forwards.excludes @@ -15,4 +15,26 @@ ProblemFilters.exclude[MissingClassProblem]("scala.annotation.showAsInfix$") ProblemFilters.exclude[MissingClassProblem]("scala.annotation.showAsInfix") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.PropertiesTrait.coloredOutputEnabled") -ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.Properties.coloredOutputEnabled") \ No newline at end of file +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.util.Properties.coloredOutputEnabled") + +# https://github.com/scala/scala/pull/6101 +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyRef") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyDouble") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyChar") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyUnit") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyShort") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyInt") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyByte") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyLong") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyBoolean") +ProblemFilters.exclude[MissingTypesProblem]("scala.runtime.LazyFloat") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyRef.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyDouble.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyChar.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyUnit.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyShort.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyInt.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyByte.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyLong.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyBoolean.serialVersionUID") +ProblemFilters.exclude[MissingFieldProblem]("scala.runtime.LazyFloat.serialVersionUID") diff --git a/src/library/scala/runtime/LazyRef.scala b/src/library/scala/runtime/LazyRef.scala index 5a0bd5442c64..6057afef7594 100644 --- a/src/library/scala/runtime/LazyRef.scala +++ b/src/library/scala/runtime/LazyRef.scala @@ -10,7 +10,8 @@ package scala.runtime /** Classes used as holders for lazy vals defined in methods. */ -class LazyRef[T] { +@SerialVersionUID(1l) +class LazyRef[T] extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -25,7 +26,8 @@ class LazyRef[T] { override def toString = s"LazyRef ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyBoolean { +@SerialVersionUID(1l) +class LazyBoolean extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -40,7 +42,8 @@ class LazyBoolean { override def toString = s"LazyBoolean ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyByte { +@SerialVersionUID(1l) +class LazyByte extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -57,7 +60,8 @@ class LazyByte { override def toString = s"LazyByte ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyChar { +@SerialVersionUID(1l) +class LazyChar extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -72,7 +76,8 @@ class LazyChar { override def toString = s"LazyChar ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyShort { +@SerialVersionUID(1l) +class LazyShort extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -87,7 +92,8 @@ class LazyShort { override def toString = s"LazyShort ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyInt { +@SerialVersionUID(1l) +class LazyInt extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -102,7 +108,8 @@ class LazyInt { override def toString = s"LazyInt ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyLong { +@SerialVersionUID(1l) +class LazyLong extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -117,7 +124,8 @@ class LazyLong { override def toString = s"LazyLong ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyFloat { +@SerialVersionUID(1l) +class LazyFloat extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -132,7 +140,8 @@ class LazyFloat { override def toString = s"LazyFloat ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyDouble { +@SerialVersionUID(1l) +class LazyDouble extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized @@ -147,7 +156,8 @@ class LazyDouble { override def toString = s"LazyDouble ${if (_initialized) s"of: ${_value}" else "thunk"}" } -class LazyUnit { +@SerialVersionUID(1l) +class LazyUnit extends Serializable { @volatile private[this] var _initialized: Boolean = _ def initialized = _initialized diff --git a/test/files/run/t10522.check b/test/files/run/t10522.check new file mode 100644 index 000000000000..bd34339bde05 --- /dev/null +++ b/test/files/run/t10522.check @@ -0,0 +1,12 @@ +gi init x +1 +1 +gi init x +1 +1 +gs init x +hi +hi +gs init x +hi +hi diff --git a/test/files/run/t10522.scala b/test/files/run/t10522.scala new file mode 100644 index 000000000000..7e801a580834 --- /dev/null +++ b/test/files/run/t10522.scala @@ -0,0 +1,38 @@ +object Test { + def serializeDeserialize[T <: AnyRef](obj: T): T = { + import java.io._ + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject.asInstanceOf[T] + } + + def gi: () => Int = { + lazy val x = { println("gi init x"); 1 } + serializeDeserialize(() => x) + } + + def gs: () => String = { + lazy val x = { println("gs init x"); "hi" } + serializeDeserialize(() => x) + } + + def main(args: Array[String]): Unit = { + val fi1 = gi + println(fi1()) + println(fi1()) + + val fi2 = gi + println(fi2()) + println(fi2()) + + val fs1 = gs + println(fs1()) + println(fs1()) + + val fs2 = gs + println(fs2()) + println(fs2()) + } +} \ No newline at end of file From 6ae05173af6498f8ff72004cf1d21484417680d6 Mon Sep 17 00:00:00 2001 From: jvican Date: Wed, 30 Aug 2017 15:12:06 +0200 Subject: [PATCH 1058/2793] Initialize statistics per global As described in the bug report, statistics are currently shared by different instances of `Global` because they are located in objects. This approach has an important disadvantage. Statistics from different global instances step on each other's toes, rendering the statistics infrastructure unusable in most of the Scala developers' workflows (think sbt running in parallel several compilations). When the data is not useless, it produces races conditions and kills compilation. This patch addresses the issue by creating a protected abstract `statistics` field in `SymbolTable` that is overriden in `Global` and `JavaUniverse` and hence available to most of the compiler internals. This object contains all the statistics at the known definition site (`Global` defines more statistics that are created in `scala.tools.nsc` instead of `scala.reflect`). All statistics are accessible via `import statistics._`, following the existing idiom for other pieces of the compiler like `definitions`. The main goal of this implementation was to avoid moving the definition of statistics from the places where they are used. Currently, they are defined alongside the classes that have the call-sites (see, for example, `Implicits.scala`, `Macros.scala` or `Types.scala`). This commit produces a binary incompatible change because of the removal of `IOStats`. This change is whitelisted because `IOStats` is `private[io]`. `AliasingFrame` and `IOStats` are removed because the stats defined there are completely disconnected from `Global` and `SymbolTable`. Later approaches will try to bring `IOStats` back to life again, since it's the most useful of the two. Fixes scala/bug#10460. --- src/compiler/scala/tools/nsc/Global.scala | 59 +++++++-- src/compiler/scala/tools/nsc/MainBench.scala | 3 +- .../tools/nsc/backend/jvm/BackendStats.scala | 11 +- .../nsc/backend/jvm/ClassfileWriter.scala | 7 +- .../tools/nsc/backend/jvm/GenBCode.scala | 14 +- .../tools/nsc/backend/jvm/PostProcessor.scala | 9 +- .../backend/jvm/analysis/AliasingFrame.scala | 11 -- .../tools/nsc/settings/ScalaSettings.scala | 6 +- .../tools/nsc/symtab/SymbolLoaders.scala | 17 +-- .../tools/nsc/transform/patmat/Logic.scala | 8 +- .../nsc/transform/patmat/MatchAnalysis.scala | 10 +- .../transform/patmat/MatchTranslation.scala | 6 +- .../transform/patmat/PatternMatching.scala | 17 +-- .../tools/nsc/transform/patmat/Solving.scala | 8 +- .../tools/nsc/typechecker/Analyzer.scala | 7 +- .../tools/nsc/typechecker/Implicits.scala | 120 +++++++++--------- .../scala/tools/nsc/typechecker/Macros.scala | 16 +-- .../scala/tools/nsc/typechecker/Typers.scala | 79 ++++++------ .../scala/tools/nsc/util/StatisticsInfo.scala | 40 ------ .../mima-filters/2.12.0.backwards.excludes | 3 + .../mima-filters/2.12.0.forwards.excludes | 1 + .../scala/reflect/internal/BaseTypeSeqs.scala | 15 ++- .../scala/reflect/internal/SymbolTable.scala | 22 +++- .../scala/reflect/internal/Symbols.scala | 44 +++---- .../scala/reflect/internal/Trees.scala | 14 +- .../scala/reflect/internal/Types.scala | 102 +++++++-------- .../internal/settings/MutableSettings.scala | 5 + .../reflect/internal/tpe/FindMembers.scala | 13 +- .../scala/reflect/internal/tpe/GlbLubs.scala | 20 +-- .../reflect/internal/tpe/TypeComparers.scala | 7 +- .../reflect/internal/util/Statistics.scala | 24 +++- .../scala/reflect/io/AbstractFile.scala | 3 +- src/reflect/scala/reflect/io/IOStats.scala | 13 +- src/reflect/scala/reflect/io/Path.scala | 11 +- .../scala/reflect/runtime/JavaUniverse.scala | 2 + .../reflect/runtime/JavaUniverseForce.scala | 1 + .../symtab/SymbolTableForUnitTesting.scala | 3 + 37 files changed, 384 insertions(+), 367 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/util/StatisticsInfo.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 779165a2b7bc..ba4b0754f374 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -13,7 +13,7 @@ import java.nio.charset.{Charset, CharsetDecoder, IllegalCharsetNameException, U import scala.collection.{immutable, mutable} import io.{AbstractFile, Path, SourceReader} import reporters.Reporter -import util.{ClassPath, StatisticsInfo, returning} +import util.{ClassPath, returning} import scala.reflect.ClassTag import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile} import scala.reflect.internal.pickling.PickleBuffer @@ -26,7 +26,7 @@ import typechecker._ import transform.patmat.PatternMatching import transform._ import backend.{JavaPlatform, ScalaPrimitives} -import backend.jvm.GenBCode +import backend.jvm.{GenBCode, BackendStats} import scala.concurrent.Future import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} @@ -159,10 +159,19 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // Components for collecting and generating output - /** Some statistics (normally disabled) set with -Ystatistics */ - object statistics extends { - val global: Global.this.type = Global.this - } with StatisticsInfo + import scala.reflect.internal.util.Statistics + import scala.tools.nsc.transform.patmat.PatternMatchingStats + trait GlobalStats extends ReflectStats + with TypersStats + with ImplicitsStats + with MacrosStats + with BackendStats + with PatternMatchingStats { self: Statistics => } + + /** Redefine statistics to include all known global + reflect stats. */ + object statistics extends Statistics(Global.this, settings) with GlobalStats + + // Components for collecting and generating output /** Print tree in detailed form */ object nodePrinters extends { @@ -1214,10 +1223,15 @@ class Global(var currentSettings: Settings, var reporter: Reporter) checkPhaseSettings(including = true, inclusions.toSeq: _*) checkPhaseSettings(including = false, exclusions map (_.value): _*) + // Enable statistics if settings are true + if (settings.YstatisticsEnabled) + statistics.enabled = true + if (settings.YhotStatisticsEnabled) + statistics.hotEnabled = true + // Report the overhead of statistics measurements per every run - import scala.reflect.internal.util.Statistics - if (Statistics.canEnable) - Statistics.reportStatisticsOverhead(reporter) + if (statistics.canEnable) + statistics.reportStatisticsOverhead(reporter) phase = first //parserPhase first @@ -1465,8 +1479,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) runCheckers() // output collected statistics - if (settings.YstatisticsEnabled) - statistics.print(phase) + if (settings.YstatisticsEnabled && settings.Ystatistics.contains(phase.name)) + printStatisticsFor(phase) advancePhase() } @@ -1558,6 +1572,29 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } if (!pclazz.isRoot) resetPackageClass(pclazz.owner) } + + private val parserStats = { + import statistics._ + Seq(treeNodeCount, nodeByType, retainedCount, retainedByType) + } + + final def printStatisticsFor(phase: Phase) = { + inform("*** Cumulative statistics at phase " + phase) + + if (settings.YhotStatisticsEnabled) { + // High overhead, only enable retained stats under hot stats + statistics.retainedCount.value = 0 + for (c <- statistics.retainedByType.keys) + statistics.retainedByType(c).value = 0 + for (u <- currentRun.units; t <- u.body) { + statistics.retainedCount.value += 1 + statistics.retainedByType(t.getClass).value += 1 + } + } + + val quants = if (phase.name == "parser") parserStats else statistics.allQuantities + for (q <- quants if q.showAt(phase.name)) inform(q.line) + } } // class Run def printAllUnits() { diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index f01de0cbe118..3bfb24699e75 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -25,7 +25,8 @@ object MainBench extends Driver with EvalLoop { for (i <- 0 until NIter) { if (i == NIter-1) { theCompiler.settings.Ystatistics.default.get foreach theCompiler.settings.Ystatistics.add - Statistics.enabled = true + theCompiler.statistics.enabled = true + theCompiler.statistics.hotEnabled = true } process(args) val end = System.nanoTime() diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala index 8d0547b60731..9f4af0b7993e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala @@ -9,17 +9,12 @@ package backend.jvm import scala.reflect.internal.util.Statistics // Enable with `-Ystatistics:jvm` -object BackendStats { - import Statistics.{newTimer, newSubTimer} - val bcodeTimer = newTimer("time in backend", "jvm") +trait BackendStats { + self: Statistics => + val bcodeTimer = newTimer("time in backend", "jvm") val bcodeInitTimer = newSubTimer("bcode initialization", bcodeTimer) val bcodeGenStat = newSubTimer("code generation", bcodeTimer) val methodOptTimer = newSubTimer("intra-method optimizations", bcodeTimer) val bcodeWriteTimer = newSubTimer("classfile writing", bcodeTimer) - - def timed[T](timer: Statistics.Timer)(body: => T): T = { - val start = Statistics.startTimer(timer) - try body finally Statistics.stopTimer(timer, start) - } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala index 3c42c9af11eb..a7b32b597eeb 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala @@ -10,7 +10,8 @@ import scala.reflect.io._ import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.io.{AbstractFile, Jar, JarWriter} -class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { +class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess, + statistics: Statistics with BackendStats) { import frontendAccess.{backendReporting, compilerSettings} // if non-null, asm text files are written to this directory @@ -90,7 +91,7 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { } def write(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = try { - val writeStart = Statistics.startTimer(BackendStats.bcodeWriteTimer) + val writeStart = statistics.startTimer(statistics.bcodeWriteTimer) if (jarWriter == null) { val outFolder = compilerSettings.outputDirectoryFor(sourceFile) val outFile = getFile(outFolder, className, ".class") @@ -101,7 +102,7 @@ class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess) { try out.write(bytes, 0, bytes.length) finally out.flush() } - Statistics.stopTimer(BackendStats.bcodeWriteTimer, writeStart) + statistics.stopTimer(statistics.bcodeWriteTimer, writeStart) if (asmOutputDir != null) { val asmpFile = getFile(asmOutputDir, className, ".asmp") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 6fc3d7aebd47..f4c21449de18 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -7,12 +7,12 @@ package scala.tools.nsc package backend package jvm -import scala.reflect.internal.util.Statistics import scala.tools.asm.Opcodes abstract class GenBCode extends SubComponent { self => import global._ + import statistics._ val postProcessorFrontendAccess: PostProcessorFrontendAccess = new PostProcessorFrontendAccess.PostProcessorFrontendAccessImpl(global) @@ -20,7 +20,9 @@ abstract class GenBCode extends SubComponent { val codeGen: CodeGen[global.type] = new { val bTypes: self.bTypes.type = self.bTypes } with CodeGen[global.type](global) - val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { val bTypes: self.bTypes.type = self.bTypes } with PostProcessor + val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { + val bTypes: self.bTypes.type = self.bTypes + } with PostProcessor(statistics) val phaseName = "jvm" @@ -37,7 +39,7 @@ abstract class GenBCode extends SubComponent { } def apply(unit: CompilationUnit): Unit = { - val generated = BackendStats.timed(BackendStats.bcodeGenStat) { + val generated = statistics.timed(bcodeGenStat) { codeGen.genUnit(unit) } if (globalOptsEnabled) postProcessor.generatedClasses ++= generated @@ -45,7 +47,7 @@ abstract class GenBCode extends SubComponent { } override def run(): Unit = { - BackendStats.timed(BackendStats.bcodeTimer) { + statistics.timed(bcodeTimer) { try { initialize() super.run() // invokes `apply` for each compilation unit @@ -63,13 +65,13 @@ abstract class GenBCode extends SubComponent { * it depends on frontend data that may change between runs: Symbols, Types, Settings. */ private def initialize(): Unit = { - val initStart = Statistics.startTimer(BackendStats.bcodeInitTimer) + val initStart = statistics.startTimer(bcodeInitTimer) scalaPrimitives.init() bTypes.initialize() codeGen.initialize() postProcessorFrontendAccess.initialize() postProcessor.initialize() - Statistics.stopTimer(BackendStats.bcodeInitTimer, initStart) + statistics.stopTimer(bcodeInitTimer, initStart) } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index ed9cca7637db..e14b0824072b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -2,7 +2,7 @@ package scala.tools.nsc package backend.jvm import scala.collection.mutable.ListBuffer -import scala.reflect.internal.util.NoPosition +import scala.reflect.internal.util.{NoPosition, Statistics} import scala.reflect.io.AbstractFile import scala.tools.asm.ClassWriter import scala.tools.asm.tree.ClassNode @@ -13,7 +13,7 @@ import scala.tools.nsc.backend.jvm.opt._ * Implements late stages of the backend that don't depend on a Global instance, i.e., * optimizations, post-processing and classfile serialization and writing. */ -abstract class PostProcessor extends PerRunInit { +abstract class PostProcessor(statistics: Statistics with BackendStats) extends PerRunInit { self => val bTypes: BTypes @@ -30,7 +30,8 @@ abstract class PostProcessor extends PerRunInit { val bTypesFromClassfile : BTypesFromClassfile { val postProcessor: self.type } = new { val postProcessor: self.type = self } with BTypesFromClassfile // re-initialized per run because it reads compiler settings that might change - lazy val classfileWriter: LazyVar[ClassfileWriter] = perRunLazy(this)(new ClassfileWriter(frontendAccess)) + lazy val classfileWriter: LazyVar[ClassfileWriter] = + perRunLazy(this)(new ClassfileWriter(frontendAccess, statistics)) lazy val generatedClasses = recordPerRunCache(new ListBuffer[GeneratedClass]) @@ -91,7 +92,7 @@ abstract class PostProcessor extends PerRunInit { } def localOptimizations(classNode: ClassNode): Unit = { - BackendStats.timed(BackendStats.methodOptTimer)(localOpt.methodOptimizations(classNode)) + statistics.timed(statistics.methodOptTimer)(localOpt.methodOptimizations(classNode)) } def setInnerClasses(classNode: ClassNode): Unit = { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala index 086946e4e368..db14c1fe683e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala @@ -388,17 +388,6 @@ class AliasingFrame[V <: Value](nLocals: Int, nStack: Int) extends Frame[V](nLoc } } -object AliasingFrame { -// val start1 = AliasingFrame.timer1.start() -// AliasingFrame.timer1.stop(start1) - import scala.reflect.internal.util.Statistics._ - val timer1 = newTimer("t1", "jvm") - val timer2 = newTimer("t2", "jvm") - val timer3 = newTimer("t3", "jvm") - val timers = List(timer1, timer2, timer3) - def reset(): Unit = for (t <- timers) { t.nanos = 0; t.timings = 0 } -} - /** * An analyzer that uses AliasingFrames instead of bare Frames. This can be used when an analysis * needs to track aliases, but doesn't require a more specific Frame subclass. diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 28e6e5dd2430..e687476a7ef6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -380,12 +380,12 @@ trait ScalaSettings extends AbsScalaSettings descr = description, domain = YstatisticsPhases, default = Some(List("_")) - ).withPostSetHook(_ => Statistics.enabled = true) + ) } + override def YstatisticsEnabled = Ystatistics.value.nonEmpty - def YstatisticsEnabled = Ystatistics.value.nonEmpty val YhotStatistics = BooleanSetting("-Yhot-statistics", "Print hot compiler statistics for all relevant phases") - .withPostSetHook(_ => Statistics.hotEnabled = true) + override def YhotStatisticsEnabled = YhotStatistics.value val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") val YprofileDestination = StringSetting("-Yprofile-destination", "file", "where to send profiling output - specify a file, default is to the console.", ""). diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index a69d4c05cca7..44a9c62b0e59 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -9,9 +9,10 @@ package symtab import classfile.ClassfileParser import java.io.IOException import scala.reflect.internal.MissingRequirementError -import scala.reflect.internal.util.Statistics import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} +import scala.reflect.internal.TypesStats +import scala.reflect.internal.util.Statistics /** This class ... * @@ -25,7 +26,9 @@ abstract class SymbolLoaders { val platform: backend.Platform { val symbolTable: SymbolLoaders.this.symbolTable.type } + import symbolTable._ + /** * Required by ClassfileParser. Check documentation in that class for details. */ @@ -36,7 +39,6 @@ abstract class SymbolLoaders { * interface. */ protected def compileLate(srcfile: AbstractFile): Unit - import SymbolLoadersStats._ protected def enterIfNew(owner: Symbol, member: Symbol, completer: SymbolLoader): Symbol = { assert(owner.info.decls.lookup(member.name) == NoSymbol, owner.fullName + "." + member.name) @@ -312,7 +314,7 @@ abstract class SymbolLoaders { protected def description = "class file "+ classfile.toString protected def doComplete(root: Symbol) { - val start = if (Statistics.canEnable) Statistics.startTimer(classReadNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.classReadNanos) else null classfileParser.parse(classfile, clazz, module) if (root.associatedFile eq NoAbstractFile) { root match { @@ -324,7 +326,7 @@ abstract class SymbolLoaders { debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass)) } } - if (Statistics.canEnable) Statistics.stopTimer(classReadNanos, start) + if (statistics.canEnable) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile } @@ -344,9 +346,4 @@ abstract class SymbolLoaders { /** used from classfile parser to avoid cycles */ var parentsLevel = 0 var pendingLoadActions: List[() => Unit] = Nil -} - -object SymbolLoadersStats { - import scala.reflect.internal.TypesStats.typerNanos - val classReadNanos = Statistics.newSubTimer ("time classfilereading", typerNanos) -} +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 4599917e19c5..d791af802241 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -10,10 +10,10 @@ package tools.nsc.transform.patmat import scala.language.postfixOps import scala.collection.mutable -import scala.reflect.internal.util.{NoPosition, Position, Statistics, HashSet} +import scala.reflect.internal.util.{NoPosition, Position, HashSet} trait Logic extends Debugging { - import PatternMatchingStats._ + import global.statistics private def max(xs: Seq[Int]) = if (xs isEmpty) 0 else xs max private def alignedColumns(cols: Seq[Any]): Seq[String] = { @@ -334,7 +334,7 @@ trait Logic extends Debugging { // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable) // may throw an AnalysisBudget.Exception def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { - val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaVarEq) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaVarEq) else null val vars = new mutable.HashSet[Var] @@ -404,7 +404,7 @@ trait Logic extends Debugging { debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}") debug.patmat(s"pure:${pure.mkString("\n")}") - if (Statistics.canEnable) Statistics.stopTimer(patmatAnaVarEq, start) + if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaVarEq, start) (And(eqAxioms: _*), pure) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 2523afe55098..76da534f0144 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -7,7 +7,6 @@ package scala.tools.nsc.transform.patmat import scala.collection.mutable -import scala.reflect.internal.util.Statistics trait TreeAndTypeAnalysis extends Debugging { import global._ @@ -426,7 +425,6 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT } trait MatchAnalysis extends MatchApproximation { - import PatternMatchingStats._ import global._ import global.definitions._ @@ -450,7 +448,7 @@ trait MatchAnalysis extends MatchApproximation { // thus, the case is unreachable if there is no model for -(-P /\ C), // or, equivalently, P \/ -C, or C => P def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = { - val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaReach) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaReach) else null // use the same approximator so we share variables, // but need different conditions depending on whether we're conservatively looking for failure or success @@ -499,7 +497,7 @@ trait MatchAnalysis extends MatchApproximation { } } - if (Statistics.canEnable) Statistics.stopTimer(patmatAnaReach, start) + if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaReach, start) if (reachable) None else Some(caseIndex) } catch { @@ -518,7 +516,7 @@ trait MatchAnalysis extends MatchApproximation { // - back off (to avoid crying exhaustive too often) when: // - there are guards --> // - there are extractor calls (that we can't secretly/soundly) rewrite - val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaExhaust) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaExhaust) else null var backoff = false val approx = new TreeMakersToPropsIgnoreNullChecks(prevBinder) @@ -572,7 +570,7 @@ trait MatchAnalysis extends MatchApproximation { // since e.g. List(_, _) would cover List(1, _) val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString) - if (Statistics.canEnable) Statistics.stopTimer(patmatAnaExhaust, start) + if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaExhaust, start) pruned } catch { case ex: AnalysisBudget.Exception => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 6e19a73d6b40..8f1ff629b204 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -8,14 +8,12 @@ package scala.tools.nsc.transform.patmat import scala.language.postfixOps -import scala.reflect.internal.util.Statistics /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers. */ trait MatchTranslation { self: PatternMatching => - import PatternMatchingStats._ import global._ import definitions._ import treeInfo.{ Unapplied, unbind } @@ -211,7 +209,7 @@ trait MatchTranslation { debug.patmat("translating "+ cases.mkString("{", "\n", "}")) - val start = if (Statistics.canEnable) Statistics.startTimer(patmatNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatNanos) else null val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations)) @@ -227,7 +225,7 @@ trait MatchTranslation { // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride) - if (Statistics.canEnable) Statistics.stopTimer(patmatNanos, start) + if (statistics.canEnable) statistics.stopTimer(statistics.patmatNanos, start) combined } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 9026221cb813..6bee1dd4ece6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -273,12 +273,13 @@ trait Interface extends ast.TreeDSL { } } -object PatternMatchingStats { - val patmatNanos = Statistics.newTimer ("time spent in patmat", "patmat") - val patmatAnaDPLL = Statistics.newSubTimer (" of which DPLL", patmatNanos) - val patmatCNF = Statistics.newSubTimer (" of which in CNF conversion", patmatNanos) - val patmatCNFSizes = Statistics.newQuantMap[Int, Statistics.Counter](" CNF size counts", "patmat")(Statistics.newCounter("")) - val patmatAnaVarEq = Statistics.newSubTimer (" of which variable equality", patmatNanos) - val patmatAnaExhaust = Statistics.newSubTimer (" of which in exhaustivity", patmatNanos) - val patmatAnaReach = Statistics.newSubTimer (" of which in unreachability", patmatNanos) +trait PatternMatchingStats { + self: Statistics => + val patmatNanos = newTimer ("time spent in patmat", "patmat") + val patmatAnaDPLL = newSubTimer (" of which DPLL", patmatNanos) + val patmatCNF = newSubTimer (" of which in CNF conversion", patmatNanos) + val patmatCNFSizes = newQuantMap[Int, Counter](" CNF size counts", "patmat")(newCounter("")) + val patmatAnaVarEq = newSubTimer (" of which variable equality", patmatNanos) + val patmatAnaExhaust = newSubTimer (" of which in exhaustivity", patmatNanos) + val patmatAnaReach = newSubTimer (" of which in unreachability", patmatNanos) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 09c7f4961f53..7f3451fe3fd2 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -7,7 +7,6 @@ package scala.tools.nsc.transform.patmat import scala.collection.mutable.ArrayBuffer -import scala.reflect.internal.util.Statistics import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Collections._ @@ -33,8 +32,7 @@ object Lit { /** Solve pattern matcher exhaustivity problem via DPLL. */ trait Solving extends Logic { - - import PatternMatchingStats._ + import global.statistics trait CNF extends PropositionalLogic { @@ -473,7 +471,7 @@ trait Solving extends Logic { debug.patmat(s"DPLL\n${cnfString(clauses)}") - val start = if (Statistics.canEnable) Statistics.startTimer(patmatAnaDPLL) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaDPLL) else null val satisfiableWithModel: TseitinModel = if (clauses isEmpty) EmptyTseitinModel @@ -509,7 +507,7 @@ trait Solving extends Logic { } } - if (Statistics.canEnable) Statistics.stopTimer(patmatAnaDPLL, start) + if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b8ef439e03a3..c0f6cad29ff1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -6,7 +6,6 @@ package scala.tools.nsc package typechecker -import scala.reflect.internal.util.Statistics /** The main attribution phase. */ @@ -76,7 +75,7 @@ trait Analyzer extends AnyRef object typerFactory extends { val global: Analyzer.this.global.type = Analyzer.this.global } with SubComponent { - import scala.reflect.internal.TypesStats.typerNanos + import global.statistics val phaseName = "typer" val runsAfter = List[String]() val runsRightAfter = Some("packageobjects") @@ -88,13 +87,13 @@ trait Analyzer extends AnyRef // compiler run). This is good enough for the resident compiler, which was the most affected. undoLog.clear() override def run() { - val start = if (Statistics.canEnable) Statistics.startTimer(typerNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) for (unit <- currentRun.units) { applyPhase(unit) undoLog.clear() } - if (Statistics.canEnable) Statistics.stopTimer(typerNanos, start) + if (statistics.canEnable) statistics.stopTimer(statistics.typerNanos, start) } def apply(unit: CompilationUnit) { try { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 5a44b062cfc1..9e2ddee950e2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -18,6 +18,7 @@ import mutable.{ LinkedHashMap, ListBuffer } import scala.util.matching.Regex import symtab.Flags._ import scala.reflect.internal.util.{TriState, Statistics} +import scala.reflect.internal.TypesStats import scala.language.implicitConversions /** This trait provides methods to find various kinds of implicits. @@ -30,7 +31,7 @@ trait Implicits { import global._ import definitions._ - import ImplicitsStats._ + import statistics._ import typingStack.printTyping import typeDebug._ @@ -82,10 +83,10 @@ trait Implicits { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty - val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeImpl) else null - val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberImpl) else null - val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeImpl) else null - val start = if (Statistics.canEnable) Statistics.startTimer(implicitNanos) else null + val rawTypeStart = if (statistics.canEnable) statistics.startCounter(rawTypeImpl) else null + val findMemberStart = if (statistics.canEnable) statistics.startCounter(findMemberImpl) else null + val subtypeStart = if (statistics.canEnable) statistics.startCounter(subtypeImpl) else null + val start = if (statistics.canEnable) statistics.startTimer(implicitNanos) else null if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) @@ -100,10 +101,10 @@ trait Implicits { // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result. context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct - if (Statistics.canEnable) Statistics.stopTimer(implicitNanos, start) - if (Statistics.canEnable) Statistics.stopCounter(rawTypeImpl, rawTypeStart) - if (Statistics.canEnable) Statistics.stopCounter(findMemberImpl, findMemberStart) - if (Statistics.canEnable) Statistics.stopCounter(subtypeImpl, subtypeStart) + if (statistics.canEnable) statistics.stopTimer(implicitNanos, start) + if (statistics.canEnable) statistics.stopCounter(rawTypeImpl, rawTypeStart) + if (statistics.canEnable) statistics.stopCounter(findMemberImpl, findMemberStart) + if (statistics.canEnable) statistics.stopCounter(subtypeImpl, subtypeStart) result } @@ -369,7 +370,7 @@ trait Implicits { } import infer._ - if (Statistics.canEnable) Statistics.incCounter(implicitSearchCount) + if (statistics.canEnable) statistics.incCounter(implicitSearchCount) /** The type parameters to instantiate */ val undetParams = if (isView) Nil else context.outer.undetparams @@ -397,12 +398,12 @@ trait Implicits { /** Is implicit info `info1` better than implicit info `info2`? */ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = { - if (Statistics.canEnable) Statistics.incCounter(improvesCount) + if (statistics.canEnable) statistics.incCounter(improvesCount) (info2 == NoImplicitInfo) || (info1 != NoImplicitInfo) && { if (info1.sym.isStatic && info2.sym.isStatic) { improvesCache get ((info1, info2)) match { - case Some(b) => if (Statistics.canEnable) Statistics.incCounter(improvesCachedCount); b + case Some(b) => if (statistics.canEnable) statistics.incCounter(improvesCachedCount); b case None => val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym) improvesCache((info1, info2)) = result @@ -518,14 +519,14 @@ trait Implicits { * This method is performance critical: 5-8% of typechecking time. */ private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = { - val start = if (Statistics.canEnable) Statistics.startTimer(matchesPtNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) case _ => false } } - if (Statistics.canEnable) Statistics.stopTimer(matchesPtNanos, start) + if (statistics.canEnable) statistics.stopTimer(matchesPtNanos, start) result } private def matchesPt(info: ImplicitInfo): Boolean = ( @@ -622,7 +623,7 @@ trait Implicits { } private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = { - if (Statistics.canEnable) Statistics.incCounter(plausiblyCompatibleImplicits) + if (statistics.canEnable) statistics.incCounter(plausiblyCompatibleImplicits) val ok = ptChecked || matchesPt(info) && { def word = if (isLocalToCallsite) "local " else "" typingLog("match", s"$word$info") @@ -632,7 +633,7 @@ trait Implicits { } private def typedImplicit1(info: ImplicitInfo, isLocalToCallsite: Boolean): SearchResult = { - if (Statistics.canEnable) Statistics.incCounter(matchingImplicits) + if (statistics.canEnable) statistics.incCounter(matchingImplicits) // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints val isScaladoc = context.tree == EmptyTree @@ -688,7 +689,7 @@ trait Implicits { case None => } - if (Statistics.canEnable) Statistics.incCounter(typedImplicits) + if (statistics.canEnable) statistics.incCounter(typedImplicits) val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee else adapt(itree2, EXPRmode, wildPt) @@ -767,7 +768,7 @@ trait Implicits { fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) - if (Statistics.canEnable) Statistics.incCounter(foundImplicits) + if (statistics.canEnable) statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result } @@ -1013,11 +1014,11 @@ trait Implicits { * @return map from infos to search results */ def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): mutable.LinkedHashMap[ImplicitInfo, SearchResult] = { - val start = if (Statistics.canEnable) Statistics.startCounter(subtypeAppInfos) else null + val start = if (statistics.canEnable) statistics.startCounter(subtypeAppInfos) else null val computation = new ImplicitComputation(iss, isLocalToCallsite) { } val applicable = computation.findAll() - if (Statistics.canEnable) Statistics.stopCounter(subtypeAppInfos, start) + if (statistics.canEnable) statistics.stopCounter(subtypeAppInfos, start) applicable } @@ -1146,13 +1147,13 @@ trait Implicits { * such that some part of `tp` has C as one of its superclasses. */ private def implicitsOfExpectedType: Infoss = { - if (Statistics.canEnable) Statistics.incCounter(implicitCacheAccs) + if (statistics.canEnable) statistics.incCounter(implicitCacheAccs) implicitsCache get pt match { case Some(implicitInfoss) => - if (Statistics.canEnable) Statistics.incCounter(implicitCacheHits) + if (statistics.canEnable) statistics.incCounter(implicitCacheHits) implicitInfoss case None => - val start = if (Statistics.canEnable) Statistics.startTimer(subtypeETNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(subtypeETNanos) else null // val implicitInfoss = companionImplicits(pt) val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList // val is1 = implicitInfoss.flatten.toSet @@ -1161,7 +1162,7 @@ trait Implicits { // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) // for (i <- is2) // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) - if (Statistics.canEnable) Statistics.stopTimer(subtypeETNanos, start) + if (statistics.canEnable) statistics.stopTimer(subtypeETNanos, start) implicitsCache(pt) = implicitInfoss1 if (implicitsCache.size >= sizeLimit) implicitsCache -= implicitsCache.keysIterator.next @@ -1388,23 +1389,23 @@ trait Implicits { * If all fails return SearchFailure */ def bestImplicit: SearchResult = { - val stats = Statistics.canEnable - val failstart = if (stats) Statistics.startTimer(inscopeFailNanos) else null - val succstart = if (stats) Statistics.startTimer(inscopeSucceedNanos) else null + val stats = statistics.canEnable + val failstart = if (stats) statistics.startTimer(inscopeFailNanos) else null + val succstart = if (stats) statistics.startTimer(inscopeSucceedNanos) else null var result = searchImplicit(context.implicitss, isLocalToCallsite = true) if (stats) { - if (result.isFailure) Statistics.stopTimer(inscopeFailNanos, failstart) + if (result.isFailure) statistics.stopTimer(inscopeFailNanos, failstart) else { - Statistics.stopTimer(inscopeSucceedNanos, succstart) - Statistics.incCounter(inscopeImplicitHits) + statistics.stopTimer(inscopeSucceedNanos, succstart) + statistics.incCounter(inscopeImplicitHits) } } if (result.isFailure) { - val failstart = if (stats) Statistics.startTimer(oftypeFailNanos) else null - val succstart = if (stats) Statistics.startTimer(oftypeSucceedNanos) else null + val failstart = if (stats) statistics.startTimer(oftypeFailNanos) else null + val succstart = if (stats) statistics.startTimer(oftypeSucceedNanos) else null // scala/bug#6667, never search companions after an ambiguous error in in-scope implicits val wasAmbiguous = result.isAmbiguousFailure @@ -1424,10 +1425,10 @@ trait Implicits { context.reporter ++= previousErrs if (stats) { - if (result.isFailure) Statistics.stopTimer(oftypeFailNanos, failstart) + if (result.isFailure) statistics.stopTimer(oftypeFailNanos, failstart) else { - Statistics.stopTimer(oftypeSucceedNanos, succstart) - Statistics.incCounter(oftypeImplicitHits) + statistics.stopTimer(oftypeSucceedNanos, succstart) + statistics.incCounter(oftypeImplicitHits) } } } @@ -1557,31 +1558,30 @@ trait Implicits { } } -object ImplicitsStats { +trait ImplicitsStats { + self: TypesStats with Statistics => - import scala.reflect.internal.TypesStats._ - - val rawTypeImpl = Statistics.newSubCounter (" of which in implicits", rawTypeCount) - val subtypeImpl = Statistics.newSubCounter(" of which in implicit", subtypeCount) - val findMemberImpl = Statistics.newSubCounter(" of which in implicit", findMemberCount) - val subtypeAppInfos = Statistics.newSubCounter(" of which in app impl", subtypeCount) - val implicitSearchCount = Statistics.newCounter ("#implicit searches", "typer") + val rawTypeImpl = newSubCounter (" of which in implicits", rawTypeCount) + val subtypeImpl = newSubCounter(" of which in implicit", subtypeCount) + val findMemberImpl = newSubCounter(" of which in implicit", findMemberCount) + val subtypeAppInfos = newSubCounter(" of which in app impl", subtypeCount) + val implicitSearchCount = newCounter ("#implicit searches", "typer") val plausiblyCompatibleImplicits - = Statistics.newSubCounter(" #plausibly compatible", implicitSearchCount) - val matchingImplicits = Statistics.newSubCounter(" #matching", implicitSearchCount) - val typedImplicits = Statistics.newSubCounter(" #typed", implicitSearchCount) - val foundImplicits = Statistics.newSubCounter(" #found", implicitSearchCount) - val improvesCount = Statistics.newSubCounter("implicit improves tests", implicitSearchCount) - val improvesCachedCount = Statistics.newSubCounter("#implicit improves cached ", implicitSearchCount) - val inscopeImplicitHits = Statistics.newSubCounter("#implicit inscope hits", implicitSearchCount) - val oftypeImplicitHits = Statistics.newSubCounter("#implicit oftype hits ", implicitSearchCount) - val implicitNanos = Statistics.newSubTimer ("time spent in implicits", typerNanos) - val inscopeSucceedNanos = Statistics.newSubTimer (" successful in scope", typerNanos) - val inscopeFailNanos = Statistics.newSubTimer (" failed in scope", typerNanos) - val oftypeSucceedNanos = Statistics.newSubTimer (" successful of type", typerNanos) - val oftypeFailNanos = Statistics.newSubTimer (" failed of type", typerNanos) - val subtypeETNanos = Statistics.newSubTimer (" assembling parts", typerNanos) - val matchesPtNanos = Statistics.newSubTimer (" matchesPT", typerNanos) - val implicitCacheAccs = Statistics.newCounter ("implicit cache accesses", "typer") - val implicitCacheHits = Statistics.newSubCounter("implicit cache hits", implicitCacheAccs) + = newSubCounter(" #plausibly compatible", implicitSearchCount) + val matchingImplicits = newSubCounter(" #matching", implicitSearchCount) + val typedImplicits = newSubCounter(" #typed", implicitSearchCount) + val foundImplicits = newSubCounter(" #found", implicitSearchCount) + val improvesCount = newSubCounter("implicit improves tests", implicitSearchCount) + val improvesCachedCount = newSubCounter("#implicit improves cached ", implicitSearchCount) + val inscopeImplicitHits = newSubCounter("#implicit inscope hits", implicitSearchCount) + val oftypeImplicitHits = newSubCounter("#implicit oftype hits ", implicitSearchCount) + val implicitNanos = newSubTimer ("time spent in implicits", typerNanos) + val inscopeSucceedNanos = newSubTimer (" successful in scope", typerNanos) + val inscopeFailNanos = newSubTimer (" failed in scope", typerNanos) + val oftypeSucceedNanos = newSubTimer (" successful of type", typerNanos) + val oftypeFailNanos = newSubTimer (" failed of type", typerNanos) + val subtypeETNanos = newSubTimer (" assembling parts", typerNanos) + val matchesPtNanos = newSubTimer (" matchesPT", typerNanos) + val implicitCacheAccs = newCounter ("implicit cache accesses", "typer") + val implicitCacheHits = newSubCounter("implicit cache hits", implicitCacheAccs) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 27f466690d60..82cdc6b3fadd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -6,6 +6,7 @@ import symtab.Flags._ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.runtime.ReflectionUtils import scala.reflect.internal.util.Statistics +import scala.reflect.internal.TypesStats import scala.reflect.macros.util._ import scala.util.control.ControlThrowable import scala.reflect.internal.util.ListOfNil @@ -45,7 +46,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { import global._ import definitions._ import treeInfo.{isRepeatedParamType => _, _} - import MacrosStats._ lazy val fastTrack = new FastTrack[self.type](self) @@ -575,8 +575,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { if (macroDebugVerbose) println(s"macroExpand: ${summary()}") linkExpandeeAndDesugared(expandee, desugared) - val start = if (Statistics.canEnable) Statistics.startTimer(macroExpandNanos) else null - if (Statistics.canEnable) Statistics.incCounter(macroExpandCount) + val start = if (statistics.canEnable) statistics.startTimer(statistics.macroExpandNanos) else null + if (statistics.canEnable) statistics.incCounter(statistics.macroExpandCount) try { withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) { @@ -609,7 +609,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } } } finally { - if (Statistics.canEnable) Statistics.stopTimer(macroExpandNanos, start) + if (statistics.canEnable) statistics.stopTimer(statistics.macroExpandNanos, start) } } } @@ -911,10 +911,10 @@ trait Macros extends MacroRuntimes with Traces with Helpers { }.transform(expandee) } -object MacrosStats { - import scala.reflect.internal.TypesStats.typerNanos - val macroExpandCount = Statistics.newCounter ("#macro expansions", "typer") - val macroExpandNanos = Statistics.newSubTimer("time spent in macroExpand", typerNanos) +trait MacrosStats { + self: TypesStats with Statistics => + val macroExpandCount = newCounter ("#macro expansions", "typer") + val macroExpandNanos = newSubTimer("time spent in macroExpand", typerNanos) } class Fingerprint private[Fingerprint](val value: Int) extends AnyVal { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0c81eb9b391f..57e46d95d7e8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -15,6 +15,7 @@ package typechecker import scala.collection.{immutable, mutable} import scala.reflect.internal.util.{ListOfNil, Statistics} +import scala.reflect.internal.TypesStats import mutable.ListBuffer import symtab.Flags._ import Mode._ @@ -32,7 +33,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper import global._ import definitions._ - import TypersStats._ + import statistics._ final def forArgMode(fun: Tree, mode: Mode) = if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode @@ -671,15 +672,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def silent[T](op: Typer => T, reportAmbiguousErrors: Boolean = context.ambiguousErrors, newtree: Tree = context.tree): SilentResult[T] = { - val rawTypeStart = if (Statistics.canEnable) Statistics.startCounter(rawTypeFailed) else null - val findMemberStart = if (Statistics.canEnable) Statistics.startCounter(findMemberFailed) else null - val subtypeStart = if (Statistics.canEnable) Statistics.startCounter(subtypeFailed) else null - val failedSilentStart = if (Statistics.canEnable) Statistics.startTimer(failedSilentNanos) else null + val rawTypeStart = if (statistics.canEnable) statistics.startCounter(rawTypeFailed) else null + val findMemberStart = if (statistics.canEnable) statistics.startCounter(findMemberFailed) else null + val subtypeStart = if (statistics.canEnable) statistics.startCounter(subtypeFailed) else null + val failedSilentStart = if (statistics.canEnable) statistics.startTimer(failedSilentNanos) else null def stopStats() = { - if (Statistics.canEnable) Statistics.stopCounter(rawTypeFailed, rawTypeStart) - if (Statistics.canEnable) Statistics.stopCounter(findMemberFailed, findMemberStart) - if (Statistics.canEnable) Statistics.stopCounter(subtypeFailed, subtypeStart) - if (Statistics.canEnable) Statistics.stopTimer(failedSilentNanos, failedSilentStart) + if (statistics.canEnable) statistics.stopCounter(rawTypeFailed, rawTypeStart) + if (statistics.canEnable) statistics.stopCounter(findMemberFailed, findMemberStart) + if (statistics.canEnable) statistics.stopCounter(subtypeFailed, subtypeStart) + if (statistics.canEnable) statistics.stopTimer(failedSilentNanos, failedSilentStart) } @inline def wrapResult(reporter: ContextReporter, result: T) = if (reporter.hasErrors) { @@ -3885,9 +3886,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def isCapturedExistential(sym: Symbol) = ( (sym hasAllFlags EXISTENTIAL | CAPTURED) && { - val start = if (Statistics.canEnable) Statistics.startTimer(isReferencedNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(isReferencedNanos) else null try !isReferencedFrom(context, sym) - finally if (Statistics.canEnable) Statistics.stopTimer(isReferencedNanos, start) + finally if (statistics.canEnable) statistics.stopTimer(isReferencedNanos, start) } ) @@ -4580,10 +4581,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * insert an implicit conversion. */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { - val start = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null + val start = if (statistics.canEnable) statistics.startTimer(failedApplyNanos) else null def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String)]): Tree = { - if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, start) + if (statistics.canEnable) statistics.stopTimer(failedApplyNanos, start) // If the problem is with raw types, convert to existentials and try again. // See #4712 for a case where this situation arises, @@ -4644,8 +4645,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)` val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable val funpt = if (mode.inPatternMode) pt else WildcardType - val appStart = if (Statistics.canEnable) Statistics.startTimer(failedApplyNanos) else null - val opeqStart = if (Statistics.canEnable) Statistics.startTimer(failedOpEqNanos) else null + val appStart = if (statistics.canEnable) statistics.startTimer(failedApplyNanos) else null + val opeqStart = if (statistics.canEnable) statistics.startTimer(failedOpEqNanos) else null def isConversionCandidate(qual: Tree, name: Name): Boolean = !mode.inPatternMode && nme.isOpAssignmentName(TermName(name.decode)) && !qual.exists(_.isErroneous) @@ -4675,7 +4676,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Select(qual, name) if isConversionCandidate(qual, name) => val qual1 = typedQualifier(qual) if (treeInfo.isVariableOrGetter(qual1)) { - if (Statistics.canEnable) Statistics.stopTimer(failedOpEqNanos, opeqStart) + if (statistics.canEnable) statistics.stopTimer(failedOpEqNanos, opeqStart) val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous) if (erred) reportError(error) else { val convo = convertToAssignment(fun, qual1, name, args) @@ -4687,7 +4688,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } else { - if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart) + if (statistics.canEnable) statistics.stopTimer(failedApplyNanos, appStart) val Apply(Select(qual2, _), args2) = tree val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous) reportError { @@ -4695,7 +4696,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } case _ => - if (Statistics.canEnable) Statistics.stopTimer(failedApplyNanos, appStart) + if (statistics.canEnable) statistics.stopTimer(failedApplyNanos, appStart) reportError(error) } val silentResult = silent( @@ -4706,7 +4707,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper silentResult match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 - if (Statistics.canEnable) Statistics.incCounter(typedApplyCount) + if (statistics.canEnable) statistics.incCounter(typedApplyCount) val noSecondTry = ( isPastTyper || context.inSecondTry @@ -4998,7 +4999,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (name.isTypeName) typedSelect(tree, typedTypeSelectionQualifier(tree.qualifier, WildcardType), name) else { - if (Statistics.canEnable) Statistics.incCounter(typedSelectCount) + if (statistics.canEnable) statistics.incCounter(typedSelectCount) val qualTyped = checkDead(typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) @@ -5086,7 +5087,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedIdentOrWildcard(tree: Ident) = { val name = tree.name - if (Statistics.canEnable) Statistics.incCounter(typedIdentCount) + if (statistics.canEnable) statistics.incCounter(typedIdentCount) if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || (name == tpnme.WILDCARD && mode.inTypeMode)) tree setType makeFullyDefined(pt) @@ -5550,10 +5551,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else typedInternal(tree, mode, pt) ) - val startByType = if (Statistics.hotEnabled) Statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null - if (Statistics.hotEnabled) Statistics.incCounter(visitsByType, tree.getClass) + val startByType = if (statistics.hotEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null + if (statistics.hotEnabled) statistics.incCounter(visitsByType, tree.getClass) try body - finally if (Statistics.hotEnabled) Statistics.popTimer(byTypeStack, startByType) + finally if (statistics.hotEnabled) statistics.popTimer(byTypeStack, startByType) } private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = { @@ -5795,19 +5796,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } -object TypersStats { - import scala.reflect.internal.TypesStats._ - val typedIdentCount = Statistics.newCounter("#typechecked identifiers") - val typedSelectCount = Statistics.newCounter("#typechecked selections") - val typedApplyCount = Statistics.newCounter("#typechecked applications") - val rawTypeFailed = Statistics.newSubCounter (" of which in failed", rawTypeCount) - val subtypeFailed = Statistics.newSubCounter(" of which in failed", subtypeCount) - val findMemberFailed = Statistics.newSubCounter(" of which in failed", findMemberCount) - val failedSilentNanos = Statistics.newSubTimer("time spent in failed", typerNanos) - val failedApplyNanos = Statistics.newSubTimer(" failed apply", typerNanos) - val failedOpEqNanos = Statistics.newSubTimer(" failed op=", typerNanos) - val isReferencedNanos = Statistics.newSubTimer("time spent ref scanning", typerNanos) - val visitsByType = Statistics.newByClass("#visits by tree node", "typer")(Statistics.newCounter("")) - val byTypeNanos = Statistics.newByClass("time spent by tree node", "typer")(Statistics.newStackableTimer("", typerNanos)) - val byTypeStack = Statistics.newTimerStack() +trait TypersStats { + self: TypesStats with Statistics => + val typedIdentCount = newCounter("#typechecked identifiers") + val typedSelectCount = newCounter("#typechecked selections") + val typedApplyCount = newCounter("#typechecked applications") + val rawTypeFailed = newSubCounter (" of which in failed", rawTypeCount) + val subtypeFailed = newSubCounter(" of which in failed", subtypeCount) + val findMemberFailed = newSubCounter(" of which in failed", findMemberCount) + val failedSilentNanos = newSubTimer("time spent in failed", typerNanos) + val failedApplyNanos = newSubTimer(" failed apply", typerNanos) + val failedOpEqNanos = newSubTimer(" failed op=", typerNanos) + val isReferencedNanos = newSubTimer("time spent ref scanning", typerNanos) + val visitsByType = newByClass("#visits by tree node", "typer")(newCounter("")) + val byTypeNanos = newByClass("time spent by tree node", "typer")(newStackableTimer("", typerNanos)) + val byTypeStack = newTimerStack() } diff --git a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala b/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala deleted file mode 100644 index b1a060ae5df0..000000000000 --- a/src/compiler/scala/tools/nsc/util/StatisticsInfo.scala +++ /dev/null @@ -1,40 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - */ - -package scala.tools.nsc -package util - -import scala.reflect.internal.util.Statistics - -abstract class StatisticsInfo { - - val global: Global - import global._ - import scala.reflect.internal.TreesStats.nodeByType - - val retainedCount = Statistics.newCounter("#retained tree nodes") - val retainedByType = Statistics.newByClass("#retained tree nodes by type")(Statistics.newCounter("")) - - def print(phase: Phase) = if (settings.Ystatistics contains phase.name) { - inform("*** Cumulative statistics at phase " + phase) - - if (settings.YhotStatistics.value) { - // High overhead, only enable retained stats under hot stats - retainedCount.value = 0 - for (c <- retainedByType.keys) - retainedByType(c).value = 0 - for (u <- currentRun.units; t <- u.body) { - retainedCount.value += 1 - retainedByType(t.getClass).value += 1 - } - } - - val quants = - if (phase.name == "parser") Seq(treeNodeCount, nodeByType, retainedCount, retainedByType) - else Statistics.allQuantities - - for (q <- quants if q.showAt(phase.name)) inform(q.line) - } -} diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index 579dd33644ca..c476274834f4 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -6,3 +6,6 @@ ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.runtime.Symbo ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.lateMap") ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$exists") + +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index 0f4142213f96..d3bea5e2cf9a 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -8,6 +8,7 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.PlainNioFile") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps.newMappedBaseTypeSeq") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.JavaUniverse.newMappedBaseTypeSeq") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.JavaUniverse.statistics") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$LazyEntry") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.closeZipFile") diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 67ebb90f7806..7dcc2ebf0ecb 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -28,7 +28,7 @@ import util.Statistics trait BaseTypeSeqs { this: SymbolTable => import definitions._ - import BaseTypeSeqsStats._ + import statistics._ protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) = new BaseTypeSeq(parents, elems) @@ -42,8 +42,8 @@ trait BaseTypeSeqs { */ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => - if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqCount) - if (Statistics.canEnable) Statistics.incCounter(baseTypeSeqLenTotal, elems.length) + if (statistics.canEnable) statistics.incCounter(baseTypeSeqCount) + if (statistics.canEnable) statistics.incCounter(baseTypeSeqLenTotal, elems.length) private[this] val typeSymbols = { val tmp = new Array[Int](elems.length) var i = 0 @@ -171,7 +171,7 @@ trait BaseTypeSeqs { /** A marker object for a base type sequence that's no yet computed. * used to catch inheritance cycles */ - val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array()) + lazy val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array()) /** Create a base type sequence consisting of a single type */ def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp)) @@ -265,7 +265,8 @@ trait BaseTypeSeqs { val CyclicInheritance = new Throwable } -object BaseTypeSeqsStats { - val baseTypeSeqCount = Statistics.newCounter("#base type seqs") - val baseTypeSeqLenTotal = Statistics.newRelCounter("avg base type seq length", baseTypeSeqCount) +trait BaseTypeSeqsStats { + self: Statistics => + val baseTypeSeqCount = newCounter("#base type seqs") + val baseTypeSeqLenTotal = newRelCounter("avg base type seq length", baseTypeSeqCount) } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 9636a84b08f0..f8220acf99e5 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -51,6 +51,15 @@ abstract class SymbolTable extends macros.Universe val gen = new InternalTreeGen { val global: SymbolTable.this.type = SymbolTable.this } + trait ReflectStats extends BaseTypeSeqsStats + with TypesStats + with SymbolTableStats + with TreesStats + with SymbolsStats { self: Statistics => } + + /** Some statistics (normally disabled) set with -Ystatistics */ + val statistics: Statistics with ReflectStats + def log(msg: => AnyRef): Unit protected def elapsedMessage(msg: String, start: Long) = @@ -178,8 +187,8 @@ abstract class SymbolTable extends macros.Universe final def atPhaseStack: List[Phase] = List.tabulate(phStackIndex)(i => phStack(i)) final def phase: Phase = { - if (Statistics.canEnable) - Statistics.incCounter(SymbolTableStats.phaseCounter) + if (statistics.canEnable) + statistics.incCounter(statistics.phaseCounter) ph } @@ -432,6 +441,11 @@ abstract class SymbolTable extends macros.Universe implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps } -object SymbolTableStats { - val phaseCounter = Statistics.newCounter("#phase calls") +trait SymbolTableStats { + self: TypesStats with Statistics => + + val phaseCounter = newCounter("#phase calls") + // Defined here because `SymbolLoaders` is defined in `scala.tools.nsc` + // and only has access to the `statistics` definition from `scala.reflect`. + val classReadNanos = newSubTimer("time classfilereading", typerNanos) } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c9300a9a78f9..6e72a62a1d06 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -17,9 +17,10 @@ import Variance._ trait Symbols extends api.Symbols { self: SymbolTable => import definitions._ - import SymbolsStats._ + import statistics._ protected var ids = 0 + def getCurrentSymbolIdCount: Int = ids protected def nextId() = { ids += 1; ids } @@ -766,7 +767,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } final def flags: Long = { - if (Statistics.canEnable) Statistics.incCounter(flagsCount) + if (statistics.canEnable) statistics.incCounter(flagsCount) val fs = _rawflags & phase.flagMask (fs | ((fs & LateFlags) >>> LateShift)) & ~((fs & AntiFlags) >>> AntiShift) } @@ -1196,7 +1197,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * `assertOwner` aborts compilation immediately if called on NoSymbol. */ def owner: Symbol = { - if (Statistics.canEnable) Statistics.incCounter(ownerCount) + if (statistics.canEnable) statistics.incCounter(ownerCount) rawowner } final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner @@ -2765,7 +2766,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => private[this] var _rawname: TermName = initName def rawname = _rawname def name = { - if (Statistics.canEnable) Statistics.incCounter(nameCount) + if (statistics.canEnable) statistics.incCounter(nameCount) _rawname } override def name_=(name: Name) { @@ -2899,13 +2900,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def moduleClass = referenced override def owner = { - if (Statistics.canEnable) Statistics.incCounter(ownerCount) + if (statistics.canEnable) statistics.incCounter(ownerCount) // a non-static module symbol gets the METHOD flag in uncurry's info transform -- see isModuleNotMethod if (!isMethod && needsFlatClasses) rawowner.owner else rawowner } override def name: TermName = { - if (Statistics.canEnable) Statistics.incCounter(nameCount) + if (statistics.canEnable) statistics.incCounter(nameCount) if (!isMethod && needsFlatClasses) { if (flatname eq null) flatname = nme.flattenedName(rawowner.name, rawname) @@ -3037,7 +3038,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def rawname = _rawname def name = { - if (Statistics.canEnable) Statistics.incCounter(nameCount) + if (statistics.canEnable) statistics.incCounter(nameCount) _rawname } final def asNameType(n: Name) = n.toTypeName @@ -3164,7 +3165,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * info for T in Test1 should be >: Nothing <: Test3[_] */ - if (Statistics.canEnable) Statistics.incCounter(typeSymbolCount) + if (statistics.canEnable) statistics.incCounter(typeSymbolCount) } implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol]) @@ -3324,12 +3325,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => } override def owner: Symbol = { - if (Statistics.canEnable) Statistics.incCounter(ownerCount) + if (statistics.canEnable) statistics.incCounter(ownerCount) if (needsFlatClasses) rawowner.owner else rawowner } override def name: TypeName = { - if (Statistics.canEnable) Statistics.incCounter(nameCount) + if (statistics.canEnable) statistics.incCounter(nameCount) if (needsFlatClasses) { if (flatname eq null) flatname = tpnme.flattenedName(rawowner.name, rawname) @@ -3385,7 +3386,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else super.toString ) - if (Statistics.canEnable) Statistics.incCounter(classSymbolCount) + if (statistics.canEnable) statistics.incCounter(classSymbolCount) } implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol]) @@ -3719,12 +3720,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => case _ => true } - -// -------------- Statistics -------------------------------------------------------- - - Statistics.newView("#symbols")(ids) - - // -------------- Completion -------------------------------------------------------- // is used to differentiate levels of thread-safety in `Symbol.isThreadsafe` @@ -3743,10 +3738,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => def markAllCompleted(syms: Symbol*): Unit = forEachRelevantSymbols(syms, _.markAllCompleted) } -object SymbolsStats { - val typeSymbolCount = Statistics.newCounter("#type symbols") - val classSymbolCount = Statistics.newCounter("#class symbols") - val flagsCount = Statistics.newCounter("#flags ops") - val ownerCount = Statistics.newCounter("#owner ops") - val nameCount = Statistics.newCounter("#name ops") +trait SymbolsStats { + self: Statistics => + val symbolTable: SymbolTable + val symbolsCount = newView("#symbols")(symbolTable.getCurrentSymbolIdCount) + val typeSymbolCount = newCounter("#type symbols") + val classSymbolCount = newCounter("#class symbols") + val flagsCount = newCounter("#flags ops") + val ownerCount = newCounter("#owner ops") + val nameCount = newCounter("#name ops") } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 59e936f03992..bae5d438356a 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -38,7 +38,7 @@ trait Trees extends api.Trees { val id = nodeCount // TODO: add to attachment? nodeCount += 1 - if (Statistics.hotEnabled) Statistics.incCounter(TreesStats.nodeByType, getClass) + if (statistics.hotEnabled) statistics.incCounter(statistics.nodeByType, getClass) final override def pos: Position = rawatt.pos @@ -1914,11 +1914,13 @@ trait Trees extends api.Trees { implicit val UnApplyTag = ClassTag[UnApply](classOf[UnApply]) implicit val ValDefTag = ClassTag[ValDef](classOf[ValDef]) implicit val ValOrDefDefTag = ClassTag[ValOrDefDef](classOf[ValOrDefDef]) - - val treeNodeCount = Statistics.newView("#created tree nodes")(nodeCount) } -object TreesStats { - // statistics - val nodeByType = Statistics.newByClass("#created tree nodes by type")(Statistics.newCounter("")) +trait TreesStats { + self: Statistics => + val symbolTable: SymbolTable + val treeNodeCount = newView("#created tree nodes")(symbolTable.nodeCount) + val nodeByType = newByClass("#created tree nodes by type")(newCounter("")) + val retainedCount = newCounter("#retained tree nodes") + val retainedByType = newByClass("#retained tree nodes by type")(newCounter("")) } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 4510b1dbcf0c..d1f7e2573836 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -86,7 +86,7 @@ trait Types with util.Collections { self: SymbolTable => import definitions._ - import TypesStats._ + import statistics._ private var explainSwitch = false private final val emptySymbolSet = immutable.Set.empty[Symbol] @@ -680,7 +680,7 @@ trait Types * = Int */ def asSeenFrom(pre: Type, clazz: Symbol): Type = { - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null try { val trivial = ( this.isTrivial @@ -696,7 +696,7 @@ trait Types if (m.capturedSkolems.isEmpty) tp1 else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1) } - } finally if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + } finally if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } /** The info of `sym`, seen as a member of this type. @@ -802,7 +802,7 @@ trait Types /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { - if (Statistics.canEnable) stat_<:<(that) + if (statistics.canEnable) stat_<:<(that) else { (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) @@ -834,26 +834,26 @@ trait Types }) def stat_<:<(that: Type): Boolean = { - if (Statistics.canEnable) Statistics.incCounter(subtypeCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (statistics.canEnable) statistics.incCounter(subtypeCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) else isSubType(this, that)) - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) result } /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long. */ def weak_<:<(that: Type): Boolean = { - if (Statistics.canEnable) Statistics.incCounter(subtypeCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (statistics.canEnable) statistics.incCounter(subtypeCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = ((this eq that) || (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that) else isWeakSubType(this, that))) - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) result } @@ -1097,7 +1097,7 @@ trait Types override def isTrivial = false override def widen: Type = underlying.widen override def baseTypeSeq: BaseTypeSeq = { - if (Statistics.canEnable) Statistics.incCounter(singletonBaseTypeSeqCount) + if (statistics.canEnable) statistics.incCounter(singletonBaseTypeSeqCount) underlying.baseTypeSeq prepend this } override def isHigherKinded = false // singleton type classifies objects, thus must be kind * @@ -1500,8 +1500,8 @@ trait Types val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq tpe.baseTypeSeqCache = bts lateMap paramToVar } else { - if (Statistics.canEnable) Statistics.incCounter(compoundBaseTypeSeqCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (statistics.canEnable) statistics.incCounter(compoundBaseTypeSeqCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = @@ -1510,7 +1510,7 @@ trait Types else compoundBaseTypeSeq(tpe) } finally { - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } // [Martin] suppressing memoization solves the problem with "same type after erasure" errors // when compiling with @@ -1577,13 +1577,13 @@ trait Types else { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseClassesNanos) else null + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null try { tpe.baseClassesCache = null tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail) } finally { - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } } } @@ -2469,13 +2469,13 @@ trait Types if (period != currentPeriod) { tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - if (Statistics.canEnable) Statistics.incCounter(typerefBaseTypeSeqCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (statistics.canEnable) statistics.incCounter(typerefBaseTypeSeqCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl } finally { - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } } } @@ -3805,8 +3805,10 @@ trait Types private var uniques: util.WeakHashSet[Type] = _ private var uniqueRunId = NoRunId + final def howManyUniqueTypes: Int = if (uniques == null) 0 else uniques.size + protected def unique[T <: Type](tp: T): T = { - if (Statistics.canEnable) Statistics.incCounter(rawTypeCount) + if (statistics.canEnable) statistics.incCounter(rawTypeCount) if (uniqueRunId != currentRunId) { uniques = util.WeakHashSet[Type](initialUniquesCapacity) // JZ: We used to register this as a perRunCache so it would be cleared eagerly at @@ -4815,11 +4817,6 @@ trait Types implicit val TypeBoundsTag = ClassTag[TypeBounds](classOf[TypeBounds]) implicit val TypeRefTag = ClassTag[TypeRef](classOf[TypeRef]) implicit val TypeTagg = ClassTag[Type](classOf[Type]) - -// -------------- Statistics -------------------------------------------------------- - - Statistics.newView("#unique types") { if (uniques == null) 0 else uniques.size } - } object TypeConstants { @@ -4828,35 +4825,28 @@ object TypeConstants { final val LogVolatileThreshold = DefaultLogThreshhold } -object TypesStats { - import BaseTypeSeqsStats._ - val rawTypeCount = Statistics.newCounter ("#raw type creations") - val subtypeCount = Statistics.newCounter ("#subtype ops") - val sametypeCount = Statistics.newCounter ("#sametype ops") - val lubCount = Statistics.newCounter ("#toplevel lubs/glbs") - val nestedLubCount = Statistics.newCounter ("#all lubs/glbs") - val findMemberCount = Statistics.newCounter ("#findMember ops") - val findMembersCount = Statistics.newCounter ("#findMembers ops") - val noMemberCount = Statistics.newSubCounter(" of which not found", findMemberCount) - val multMemberCount = Statistics.newSubCounter(" of which multiple overloaded", findMemberCount) - val typerNanos = Statistics.newTimer ("time spent typechecking", "typer") - val lubNanos = Statistics.newStackableTimer("time spent in lubs", typerNanos) - val subtypeNanos = Statistics.newStackableTimer("time spent in <:<", typerNanos) - val findMemberNanos = Statistics.newStackableTimer("time spent in findmember", typerNanos) - val findMembersNanos = Statistics.newStackableTimer("time spent in findmembers", typerNanos) - val asSeenFromNanos = Statistics.newStackableTimer("time spent in asSeenFrom", typerNanos) - val baseTypeSeqNanos = Statistics.newStackableTimer("time spent in baseTypeSeq", typerNanos) - val baseClassesNanos = Statistics.newStackableTimer("time spent in baseClasses", typerNanos) - val compoundBaseTypeSeqCount = Statistics.newSubCounter(" of which for compound types", baseTypeSeqCount) - val typerefBaseTypeSeqCount = Statistics.newSubCounter(" of which for typerefs", baseTypeSeqCount) - val singletonBaseTypeSeqCount = Statistics.newSubCounter(" of which for singletons", baseTypeSeqCount) - val typeOpsStack = Statistics.newTimerStack() - - /* Commented out, because right now this does not inline, so creates a closure which will distort statistics - @inline final def timedTypeOp[T](c: Statistics.StackableTimer)(op: => T): T = { - val start = Statistics.pushTimer(typeOpsStack, c) - try op - finally - } - */ +trait TypesStats { + self: BaseTypeSeqsStats with Statistics => + val uniqueTypesView = newView ("#unique types")(symbolTable.howManyUniqueTypes) + val rawTypeCount = newCounter ("#raw type creations") + val subtypeCount = newCounter ("#subtype ops") + val sametypeCount = newCounter ("#sametype ops") + val lubCount = newCounter ("#toplevel lubs/glbs") + val nestedLubCount = newCounter ("#all lubs/glbs") + val findMemberCount = newCounter ("#findMember ops") + val findMembersCount = newCounter ("#findMembers ops") + val noMemberCount = newSubCounter(" of which not found", findMemberCount) + val multMemberCount = newSubCounter(" of which multiple overloaded", findMemberCount) + val typerNanos = newTimer ("time spent typechecking", "typer") + val lubNanos = newStackableTimer("time spent in lubs", typerNanos) + val subtypeNanos = newStackableTimer("time spent in <:<", typerNanos) + val findMemberNanos = newStackableTimer("time spent in findmember", typerNanos) + val findMembersNanos = newStackableTimer("time spent in findmembers", typerNanos) + val asSeenFromNanos = newStackableTimer("time spent in asSeenFrom", typerNanos) + val baseTypeSeqNanos = newStackableTimer("time spent in baseTypeSeq", typerNanos) + val baseClassesNanos = newStackableTimer("time spent in baseClasses", typerNanos) + val compoundBaseTypeSeqCount = newSubCounter(" of which for compound types", baseTypeSeqCount) + val typerefBaseTypeSeqCount = newSubCounter(" of which for typerefs", baseTypeSeqCount) + val singletonBaseTypeSeqCount = newSubCounter(" of which for singletons", baseTypeSeqCount) + val typeOpsStack = newTimerStack() } diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index ab933ae61709..d9c174e9f2d0 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -56,6 +56,11 @@ abstract class MutableSettings extends AbsSettings { def YpartialUnification: BooleanSetting def Yvirtpatmat: BooleanSetting + // Define them returning a `Boolean` to avoid breaking bincompat change + // TODO: Add these fields typed as `BooleanSetting` for 2.13.x + def YhotStatisticsEnabled: Boolean = false + def YstatisticsEnabled: Boolean = false + def Yrecursion: IntSetting def maxClassfileName: IntSetting diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index 4711dc2961ea..29f0dd1f38f4 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -6,11 +6,10 @@ package scala.reflect.internal package tpe import Flags._ -import util.Statistics -import TypesStats._ trait FindMembers { this: SymbolTable => + import statistics._ /** Implementation of `Type#{findMember, findMembers}` */ private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) { @@ -43,10 +42,10 @@ trait FindMembers { // Main entry point def apply(): T = { - if (Statistics.canEnable) Statistics.incCounter(findMemberCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, findMemberNanos) else null + if (statistics.canEnable) statistics.incCounter(findMemberCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, findMemberNanos) else null try searchConcreteThenDeferred - finally if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + finally if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } protected def result: T @@ -276,11 +275,11 @@ trait FindMembers { // Assemble the result from the hand-rolled ListBuffer protected def result: Symbol = if (members eq null) { if (member0 == NoSymbol) { - if (Statistics.canEnable) Statistics.incCounter(noMemberCount) + if (statistics.canEnable) statistics.incCounter(noMemberCount) NoSymbol } else member0 } else { - if (Statistics.canEnable) Statistics.incCounter(multMemberCount) + if (statistics.canEnable) statistics.incCounter(multMemberCount) lastM.tl = Nil initBaseClasses.head.newOverloaded(tpe, members) } diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 93edab99b6a8..1aafde5d6076 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -5,13 +5,13 @@ package tpe import scala.collection.mutable import scala.annotation.tailrec -import util.Statistics import Variance._ private[internal] trait GlbLubs { self: SymbolTable => + import definitions._ - import TypesStats._ + import statistics._ private final val printLubs = scala.sys.props contains "scalac.debug.lub" private final val strictInference = settings.strictInference @@ -254,8 +254,8 @@ private[internal] trait GlbLubs { case Nil => NothingTpe case t :: Nil => t case _ => - if (Statistics.canEnable) Statistics.incCounter(lubCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null + if (statistics.canEnable) statistics.incCounter(lubCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, lubNanos) else null try { val res = lub(ts, lubDepth(ts)) // If the number of unapplied type parameters in all incoming @@ -273,7 +273,7 @@ private[internal] trait GlbLubs { finally { lubResults.clear() glbResults.clear() - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } } @@ -396,7 +396,7 @@ private[internal] trait GlbLubs { indent = indent + " " assert(indent.length <= 100) } - if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) + if (statistics.canEnable) statistics.incCounter(nestedLubCount) val res = lub0(ts) if (printLubs) { indent = indent stripSuffix " " @@ -421,14 +421,14 @@ private[internal] trait GlbLubs { case List() => AnyTpe case List(t) => t case ts0 => - if (Statistics.canEnable) Statistics.incCounter(lubCount) - val start = if (Statistics.canEnable) Statistics.pushTimer(typeOpsStack, lubNanos) else null + if (statistics.canEnable) statistics.incCounter(lubCount) + val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, lubNanos) else null try { glbNorm(ts0, lubDepth(ts0)) } finally { lubResults.clear() glbResults.clear() - if (Statistics.canEnable) Statistics.popTimer(typeOpsStack, start) + if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) } } @@ -542,7 +542,7 @@ private[internal] trait GlbLubs { } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - if (Statistics.canEnable) Statistics.incCounter(nestedLubCount) + if (statistics.canEnable) statistics.incCounter(nestedLubCount) glb0(ts) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 37d05c08a7c2..82541bdf5cb2 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -4,13 +4,14 @@ package internal package tpe import scala.collection.{ mutable } -import util.{ Statistics, TriState } +import util.TriState import scala.annotation.tailrec trait TypeComparers { self: SymbolTable => + import definitions._ - import TypesStats._ + import statistics._ private final val LogPendingSubTypesThreshold = TypeConstants.DefaultLogThreshhold @@ -90,7 +91,7 @@ trait TypeComparers { /** Do `tp1` and `tp2` denote equivalent types? */ def isSameType(tp1: Type, tp2: Type): Boolean = try { - if (Statistics.canEnable) Statistics.incCounter(sametypeCount) + if (statistics.canEnable) statistics.incCounter(sametypeCount) subsametypeRecursions += 1 //OPT cutdown on Function0 allocation //was: diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index b15ae31044df..146f16883b07 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -3,9 +3,21 @@ package reflect.internal.util import scala.collection.mutable +import scala.reflect.internal.SymbolTable +import scala.reflect.internal.settings.MutableSettings import java.lang.invoke.{SwitchPoint, MethodHandle, MethodHandles, MethodType} -object Statistics { +abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { + + init() + + def init(): Unit = { + // Turn on statistics on this class if settings say so + if (settings.YstatisticsEnabled) + enabled = true + if (settings.YhotStatisticsEnabled) + hotEnabled = true + } type TimerSnapshot = (Long, Long) @@ -112,7 +124,7 @@ quant) * Quantities with non-empty prefix are printed in the statistics info. */ trait Quantity { - if (enabled && prefix.nonEmpty) { + if (canEnable && prefix.nonEmpty) { val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" qs(key) = this } @@ -275,7 +287,7 @@ quant) import scala.reflect.internal.Reporter /** Reports the overhead of measuring statistics via the nanoseconds variation. */ - def reportStatisticsOverhead(reporter: Reporter): Unit = { + final def reportStatisticsOverhead(reporter: Reporter): Unit = { val start = System.nanoTime() var total = 0L for (i <- 1 to 10000) { @@ -286,4 +298,10 @@ quant) val variation = s"${total/10000.0}ns to ${total2/10000.0}ns" reporter.echo(NoPosition, s"Enabling statistics, measuring overhead = $variation per timer") } + + /** Helper for measuring the overhead of a concrete thunk `body`. */ + final def timed[T](timer: Timer)(body: => T): T = { + val start = startTimer(timer) + try body finally stopTimer(timer, start) + } } diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index ee0bc129f848..e56aa0fc9b1f 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -11,7 +11,6 @@ package io import java.io.{ IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream } import java.io.{ File => JFile } import java.net.URL -import scala.reflect.internal.util.Statistics /** * An abstraction over files for use in the reflection/compiler libraries. @@ -116,7 +115,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file denote an existing file? */ def exists: Boolean = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + //if (statistics.canEnable) statistics.incCounter(IOStats.fileExistsCount) (file eq null) || file.exists } diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala index 71f8be330d02..0e4b9690cabd 100644 --- a/src/reflect/scala/reflect/io/IOStats.scala +++ b/src/reflect/scala/reflect/io/IOStats.scala @@ -25,8 +25,11 @@ import scala.reflect.internal.util.Statistics // + final val canEnable = true // _enabled // // We can commit this change as the first diff reverts a fix for an IDE memory leak. -private[io] object IOStats { - val fileExistsCount = Statistics.newCounter("# File.exists calls") - val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls") - val fileIsFileCount = Statistics.newCounter("# File.isFile calls") -} + +// The following has been commented out because IOStats cannot be used in the +// call-sites since they are disconnected from the statistics infrastructure. +//private[io] object IOStats { +// val fileExistsCount = Statistics.newCounter("# File.exists calls") +// val fileIsDirectoryCount = Statistics.newCounter("# File.isDirectory calls") +// val fileIsFileCount = Statistics.newCounter("# File.isFile calls") +//} diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index c5b5ae24baeb..a2b4ee4ab719 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -12,7 +12,6 @@ import scala.language.implicitConversions import java.io.{ RandomAccessFile, File => JFile } import java.net.{ URI, URL } import scala.util.Random.alphanumeric -import scala.reflect.internal.util.Statistics /** An abstraction for filesystem paths. The differences between * Path, File, and Directory are primarily to communicate intent. @@ -58,12 +57,12 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { def isFile = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) jfile.isFile } def isDirectory = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) jfile.isDirectory } @@ -199,16 +198,16 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) try jfile.exists() catch { case ex: SecurityException => false } } def isFile = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) try jfile.isFile() catch { case ex: SecurityException => false } } def isDirectory = { - if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } } def isAbsolute = jfile.isAbsolute() diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index cef4ded30852..f39a82a570d7 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -6,6 +6,7 @@ import scala.reflect.internal.{TreeInfo, SomePhase} import scala.reflect.internal.{SymbolTable => InternalSymbolTable} import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable} import scala.reflect.api.{TypeCreator, Universe} +import scala.reflect.internal.util.Statistics /** An implementation of [[scala.reflect.api.Universe]] for runtime reflection using JVM classloaders. * @@ -18,6 +19,7 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle def erasurePhase = SomePhase lazy val settings = new Settings + override val statistics = new Statistics(JavaUniverse.this, settings) with ReflectStats private val isLogging = sys.props contains "scala.debug.reflect" def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index ab6145b28e79..e7e57d556c87 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -123,6 +123,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.termNames this.nme this.sn + this.undetBaseTypeSeq this.Constant this.definitions this.LookupSucceeded diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index fb05ab8d5a2a..7e2028eefb39 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -3,6 +3,7 @@ package symtab import scala.reflect.ClassTag import scala.reflect.internal.{NoPhase, Phase, SomePhase} +import scala.reflect.internal.util.Statistics import scala.tools.util.PathResolver import util.ClassPath import io.AbstractFile @@ -73,6 +74,8 @@ class SymbolTableForUnitTesting extends SymbolTable { s } + override lazy val statistics = new Statistics(this, settings) with ReflectStats + // Members declared in scala.reflect.internal.Required def picklerPhase: scala.reflect.internal.Phase = SomePhase def erasurePhase: scala.reflect.internal.Phase = SomePhase From 4eab2f740f9fe0d8708d6dc389183ff0b7bd7772 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 1 Sep 2017 13:38:11 +0200 Subject: [PATCH 1059/2793] Allow interactive enabling/disabling of statistics So that we enable/disable statistics depending on the current settings, which are mutable and may be changed by the driver of the compiler. --- src/compiler/scala/tools/nsc/Global.scala | 7 ++----- .../scala/reflect/internal/util/Statistics.scala | 13 +++++-------- .../scala/reflect/runtime/JavaUniverse.scala | 1 + 3 files changed, 8 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index ba4b0754f374..62ae82e81ec7 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1223,11 +1223,8 @@ class Global(var currentSettings: Settings, var reporter: Reporter) checkPhaseSettings(including = true, inclusions.toSeq: _*) checkPhaseSettings(including = false, exclusions map (_.value): _*) - // Enable statistics if settings are true - if (settings.YstatisticsEnabled) - statistics.enabled = true - if (settings.YhotStatisticsEnabled) - statistics.hotEnabled = true + // Enable or disable depending on the current setting -- useful for interactive behaviour + statistics.initFromSettings(settings) // Report the overhead of statistics measurements per every run if (statistics.canEnable) diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 146f16883b07..377f19e7f623 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -9,14 +9,11 @@ import java.lang.invoke.{SwitchPoint, MethodHandle, MethodHandles, MethodType} abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { - init() - - def init(): Unit = { - // Turn on statistics on this class if settings say so - if (settings.YstatisticsEnabled) - enabled = true - if (settings.YhotStatisticsEnabled) - hotEnabled = true + initFromSettings(settings) + + def initFromSettings(currentSettings: MutableSettings): Unit = { + enabled = currentSettings.YstatisticsEnabled + hotEnabled = currentSettings.YhotStatisticsEnabled } type TimerSnapshot = (Long, Long) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index f39a82a570d7..80f2dc7b10ed 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -5,6 +5,7 @@ package runtime import scala.reflect.internal.{TreeInfo, SomePhase} import scala.reflect.internal.{SymbolTable => InternalSymbolTable} import scala.reflect.runtime.{SymbolTable => RuntimeSymbolTable} +import scala.reflect.internal.util.Statistics import scala.reflect.api.{TypeCreator, Universe} import scala.reflect.internal.util.Statistics From 3d432633641521cb0080b406986ec6a247587499 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 29 Apr 2017 14:13:39 -0700 Subject: [PATCH 1060/2793] Conditionally lint macros and expansions Adds a flag `-Ywarn-macros:none,before,after,both`. Don't lint macros unless they request it. If warning, then traverse the original tree of macro expansions to witness usages. Optionally do it again for expanded trees. The default is to lint what the user wrote, "before" expansion. Fixes scala/bug#10296 --- .../scala/tools/nsc/settings/Warnings.scala | 14 ++++++++- .../nsc/typechecker/TypeDiagnostics.scala | 30 +++++++++++++++---- test/files/neg/t10296-after.check | 6 ++++ test/files/neg/t10296-after.flags | 1 + .../neg/t10296-after/UnusedMacro_1.scala | 10 +++++++ test/files/neg/t10296-after/Unused_2.scala | 13 ++++++++ test/files/neg/t10296-both.check | 9 ++++++ test/files/neg/t10296-both.flags | 1 + .../files/neg/t10296-both/UnusedMacro_1.scala | 10 +++++++ test/files/neg/t10296-both/Unused_2.scala | 14 +++++++++ test/files/neg/t10296-warn.check | 6 ++++ test/files/neg/t10296-warn.flags | 1 + .../files/neg/t10296-warn/UnusedMacro_1.scala | 9 ++++++ test/files/neg/t10296-warn/Unused_2.scala | 12 ++++++++ test/files/pos/t10296-before.flags | 1 + .../pos/t10296-before/UnusedMacro_1.scala | 10 +++++++ test/files/pos/t10296-before/Unused_2.scala | 13 ++++++++ test/files/pos/t10296.flags | 1 + test/files/pos/t10296/UnusedMacro_1.scala | 9 ++++++ test/files/pos/t10296/Unused_2.scala | 8 +++++ 20 files changed, 172 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t10296-after.check create mode 100644 test/files/neg/t10296-after.flags create mode 100644 test/files/neg/t10296-after/UnusedMacro_1.scala create mode 100644 test/files/neg/t10296-after/Unused_2.scala create mode 100644 test/files/neg/t10296-both.check create mode 100644 test/files/neg/t10296-both.flags create mode 100644 test/files/neg/t10296-both/UnusedMacro_1.scala create mode 100644 test/files/neg/t10296-both/Unused_2.scala create mode 100755 test/files/neg/t10296-warn.check create mode 100644 test/files/neg/t10296-warn.flags create mode 100644 test/files/neg/t10296-warn/UnusedMacro_1.scala create mode 100644 test/files/neg/t10296-warn/Unused_2.scala create mode 100644 test/files/pos/t10296-before.flags create mode 100644 test/files/pos/t10296-before/UnusedMacro_1.scala create mode 100644 test/files/pos/t10296-before/Unused_2.scala create mode 100644 test/files/pos/t10296.flags create mode 100644 test/files/pos/t10296/UnusedMacro_1.scala create mode 100644 test/files/pos/t10296/Unused_2.scala diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index d2f0a5d7ee81..dc553ebda6c3 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -16,7 +16,19 @@ trait Warnings { val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.") // Non-lint warnings. - + val warnMacros = ChoiceSetting( + name = "-Ywarn-macros", + helpArg = "mode", + descr = "Enable lint warnings on macro expansions.", + choices = List("none", "before", "after", "both"), + default = "before", + choicesHelp = List( + "Do not inspect expansions or their original trees when generating unused symbol warnings.", + "Only inspect unexpanded user-written code for unused symbols.", + "Only inspect expanded trees when generating unused symbol warnings.", + "Inspect both user-written code and expanded trees when generating unused symbol warnings." + ) + ) val warnDeadCode = BooleanSetting("-Ywarn-dead-code", "Warn when dead code is identified.") val warnValueDiscard = BooleanSetting("-Ywarn-value-discard", "Warn when non-Unit expression results are unused.") val warnNumericWiden = BooleanSetting("-Ywarn-numeric-widen", "Warn when numerics are widened.") diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index fd6e2f40e777..47a77691d585 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -473,7 +473,7 @@ trait TypeDiagnostics { "readResolve", "readObject", "writeObject", "writeReplace" ).map(TermName(_)) - class UnusedPrivates extends Traverser { + class UnusedPrivates(traverseCheck: Tree => Tree, isOriginal: Boolean) extends Traverser { val defnTrees = ListBuffer[MemberDef]() val targets = mutable.Set[Symbol]() val setVars = mutable.Set[Symbol]() @@ -492,6 +492,7 @@ trait TypeDiagnostics { && !sym.isParamAccessor // could improve this, but it's a pain && !sym.isEarlyInitialized // lots of false positives in the way these are encoded && !(sym.isGetter && sym.accessed.isEarlyInitialized) + && (isOriginal || !sym.isMacro) ) def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage def qualifies(sym: Symbol) = ( @@ -499,7 +500,8 @@ trait TypeDiagnostics { && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym)) ) - override def traverse(t: Tree): Unit = { + override def traverse(t0: Tree): Unit = { + val t = traverseCheck(t0) val sym = t.symbol t match { case m: MemberDef if qualifies(t.symbol) => @@ -606,9 +608,7 @@ trait TypeDiagnostics { warnUnusedPatVars || warnUnusedPrivates || warnUnusedLocals || warnUnusedParams } - def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava) { - val p = new UnusedPrivates - p.traverse(unit.body) + def process(p: UnusedPrivates): Unit = { if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { for (defn: DefTree <- p.unusedTerms) { val sym = defn.symbol @@ -676,6 +676,26 @@ trait TypeDiagnostics { context.warning(s.pos, s"parameter $s in ${s.owner} is never used") } } + def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava) { + settings.warnMacros.value match { + case "none" => + val only = new UnusedPrivates((t: Tree) => if (hasMacroExpansionAttachment(t)) EmptyTree else t, isOriginal = true) + only.traverse(unit.body) + process(only) + case "before" | "both" => + val first = new UnusedPrivates((t: Tree) => if (hasMacroExpansionAttachment(t)) macroExpandee(t) else t, isOriginal = true) + first.traverse(unit.body) + process(first) + case _ => () + } + settings.warnMacros.value match { + case "after" | "both" => + val second = new UnusedPrivates((t: Tree) => t, isOriginal = false) + second.traverse(unit.body) + process(second) + case _ => () + } + } } object checkDead { diff --git a/test/files/neg/t10296-after.check b/test/files/neg/t10296-after.check new file mode 100644 index 000000000000..6faec910abbd --- /dev/null +++ b/test/files/neg/t10296-after.check @@ -0,0 +1,6 @@ +Unused_2.scala:7: warning: private method g in object Unused is never used + private def g(): Int = 17 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/t10296-after.flags b/test/files/neg/t10296-after.flags new file mode 100644 index 000000000000..84830317e3fc --- /dev/null +++ b/test/files/neg/t10296-after.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint:unused -Ywarn-macros:after diff --git a/test/files/neg/t10296-after/UnusedMacro_1.scala b/test/files/neg/t10296-after/UnusedMacro_1.scala new file mode 100644 index 000000000000..9e042f803a86 --- /dev/null +++ b/test/files/neg/t10296-after/UnusedMacro_1.scala @@ -0,0 +1,10 @@ + +import scala.reflect.macros.whitebox.Context + +object UnusedMacro { + def macroImpl(c: Context)(body: c.Expr[Int]): c.Tree = { + import c.universe._ + val _ = body + Literal(Constant(42)) + } +} diff --git a/test/files/neg/t10296-after/Unused_2.scala b/test/files/neg/t10296-after/Unused_2.scala new file mode 100644 index 000000000000..56feb4a3740c --- /dev/null +++ b/test/files/neg/t10296-after/Unused_2.scala @@ -0,0 +1,13 @@ + +import scala.language.experimental.macros + +object Unused extends App { + def m(body: Int): Int = macro UnusedMacro.macroImpl + + private def g(): Int = 17 + + // g is used before but not after expansion + def f(): Int = m(g()) + + println(f()) +} diff --git a/test/files/neg/t10296-both.check b/test/files/neg/t10296-both.check new file mode 100644 index 000000000000..0c8364996ebb --- /dev/null +++ b/test/files/neg/t10296-both.check @@ -0,0 +1,9 @@ +Unused_2.scala:8: warning: private method k in object Unused is never used + private def k(): Int = 17 + ^ +Unused_2.scala:7: warning: private method g in object Unused is never used + private def g(): Int = 17 + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/t10296-both.flags b/test/files/neg/t10296-both.flags new file mode 100644 index 000000000000..3b72954724c4 --- /dev/null +++ b/test/files/neg/t10296-both.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint:unused -Ywarn-macros:both diff --git a/test/files/neg/t10296-both/UnusedMacro_1.scala b/test/files/neg/t10296-both/UnusedMacro_1.scala new file mode 100644 index 000000000000..b636ff0fc88a --- /dev/null +++ b/test/files/neg/t10296-both/UnusedMacro_1.scala @@ -0,0 +1,10 @@ + +import scala.reflect.macros.whitebox.Context + +object UnusedMacro { + def macroImpl(c: Context)(body: c.Expr[Int]): c.Tree = { + import c.universe._ + val _ = body + q"k()" + } +} diff --git a/test/files/neg/t10296-both/Unused_2.scala b/test/files/neg/t10296-both/Unused_2.scala new file mode 100644 index 000000000000..b9cfe5f2e392 --- /dev/null +++ b/test/files/neg/t10296-both/Unused_2.scala @@ -0,0 +1,14 @@ + +import scala.language.experimental.macros + +object Unused extends App { + def m(body: Int): Int = macro UnusedMacro.macroImpl + + private def g(): Int = 17 + private def k(): Int = 17 + + // g is used before but not after expansion + def f(): Int = m(g()) + + println(f()) +} diff --git a/test/files/neg/t10296-warn.check b/test/files/neg/t10296-warn.check new file mode 100755 index 000000000000..b609c44d1be2 --- /dev/null +++ b/test/files/neg/t10296-warn.check @@ -0,0 +1,6 @@ +Unused_2.scala:9: warning: private method unusedMacro in object Unused is never used + private def unusedMacro(): Unit = macro UnusedMacro.usedMacroImpl + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/t10296-warn.flags b/test/files/neg/t10296-warn.flags new file mode 100644 index 000000000000..ce85ee757bfa --- /dev/null +++ b/test/files/neg/t10296-warn.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint:unused diff --git a/test/files/neg/t10296-warn/UnusedMacro_1.scala b/test/files/neg/t10296-warn/UnusedMacro_1.scala new file mode 100644 index 000000000000..d3576ee731f0 --- /dev/null +++ b/test/files/neg/t10296-warn/UnusedMacro_1.scala @@ -0,0 +1,9 @@ + +import scala.reflect.macros.blackbox + +object UnusedMacro { + def usedMacroImpl(c: blackbox.Context)(): c.Tree = { + import c.universe._ + q"""println("apparently unused macro")""" + } +} diff --git a/test/files/neg/t10296-warn/Unused_2.scala b/test/files/neg/t10296-warn/Unused_2.scala new file mode 100644 index 000000000000..382004f24dc9 --- /dev/null +++ b/test/files/neg/t10296-warn/Unused_2.scala @@ -0,0 +1,12 @@ + +import scala.language.experimental.macros + +object Unused { + // seen as used before expansion + private def usedMacro(): Unit = macro UnusedMacro.usedMacroImpl + + // never used + private def unusedMacro(): Unit = macro UnusedMacro.usedMacroImpl + + def f() = usedMacro() +} diff --git a/test/files/pos/t10296-before.flags b/test/files/pos/t10296-before.flags new file mode 100644 index 000000000000..7a639c3fb054 --- /dev/null +++ b/test/files/pos/t10296-before.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint:unused -Ywarn-macros:before diff --git a/test/files/pos/t10296-before/UnusedMacro_1.scala b/test/files/pos/t10296-before/UnusedMacro_1.scala new file mode 100644 index 000000000000..8d08c39ce102 --- /dev/null +++ b/test/files/pos/t10296-before/UnusedMacro_1.scala @@ -0,0 +1,10 @@ + +import scala.reflect.macros.whitebox.Context + +object UnusedMacro { + def macroImpl(c: Context)(body: c.Expr[Int]): c.Tree = { + import c.universe._ + val _ = body + q"42" + } +} diff --git a/test/files/pos/t10296-before/Unused_2.scala b/test/files/pos/t10296-before/Unused_2.scala new file mode 100644 index 000000000000..56feb4a3740c --- /dev/null +++ b/test/files/pos/t10296-before/Unused_2.scala @@ -0,0 +1,13 @@ + +import scala.language.experimental.macros + +object Unused extends App { + def m(body: Int): Int = macro UnusedMacro.macroImpl + + private def g(): Int = 17 + + // g is used before but not after expansion + def f(): Int = m(g()) + + println(f()) +} diff --git a/test/files/pos/t10296.flags b/test/files/pos/t10296.flags new file mode 100644 index 000000000000..ae548523beb5 --- /dev/null +++ b/test/files/pos/t10296.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint:unused diff --git a/test/files/pos/t10296/UnusedMacro_1.scala b/test/files/pos/t10296/UnusedMacro_1.scala new file mode 100644 index 000000000000..d3576ee731f0 --- /dev/null +++ b/test/files/pos/t10296/UnusedMacro_1.scala @@ -0,0 +1,9 @@ + +import scala.reflect.macros.blackbox + +object UnusedMacro { + def usedMacroImpl(c: blackbox.Context)(): c.Tree = { + import c.universe._ + q"""println("apparently unused macro")""" + } +} diff --git a/test/files/pos/t10296/Unused_2.scala b/test/files/pos/t10296/Unused_2.scala new file mode 100644 index 000000000000..51d191f1a4b2 --- /dev/null +++ b/test/files/pos/t10296/Unused_2.scala @@ -0,0 +1,8 @@ + +import scala.language.experimental.macros + +object Unused { + private def usedMacro(): Unit = macro UnusedMacro.usedMacroImpl + + def f() = usedMacro() +} From 1e09de17a3473efb26db535a71f9ec8b03018ac2 Mon Sep 17 00:00:00 2001 From: jvican Date: Wed, 27 Sep 2017 11:12:10 +0200 Subject: [PATCH 1061/2793] Massage performance with statistics optimizations The following commit applies some minor optimizations to the statistics code and adds a heuristic to make sure that impact of runtime is as minimal as possible. These changes are motivated by a recorded performance degradation of around 2-3% when statistics are not stored in an object with static values. The idea of the heuristic is to have a filter that is true almost all the times, with a static `areSomeColdStatsEnabled`. This static will be true whenever a `Global` instance has enabled statistics. If it is true, then the statistics infrastructure will check if the actual global should record statistics, or it's another global instance the one that enabled them. Therefore, when enabling statistics in one global, we'll pay an overall performance degradation of 2/3% for all globals of a given classloaded scalac + the statistics overhead in the global that wants to record statistics. --- src/compiler/scala/tools/nsc/Global.scala | 6 +-- .../tools/nsc/symtab/SymbolLoaders.scala | 6 +-- .../tools/nsc/transform/patmat/Logic.scala | 7 ++- .../nsc/transform/patmat/MatchAnalysis.scala | 9 ++-- .../transform/patmat/MatchTranslation.scala | 5 +- .../tools/nsc/transform/patmat/Solving.scala | 5 +- .../tools/nsc/typechecker/Analyzer.scala | 5 +- .../tools/nsc/typechecker/Implicits.scala | 50 +++++++++---------- .../scala/tools/nsc/typechecker/Macros.scala | 8 +-- .../scala/tools/nsc/typechecker/Typers.scala | 49 +++++++++--------- .../scala/reflect/internal/BaseTypeSeqs.scala | 6 +-- .../scala/reflect/internal/SymbolTable.scala | 2 +- .../scala/reflect/internal/Symbols.scala | 22 ++++---- .../scala/reflect/internal/Trees.scala | 5 +- .../scala/reflect/internal/Types.scala | 40 +++++++-------- .../reflect/internal/tpe/FindMembers.scala | 11 ++-- .../scala/reflect/internal/tpe/GlbLubs.scala | 17 ++++--- .../reflect/internal/tpe/TypeComparers.scala | 3 +- .../reflect/internal/util/Statistics.scala | 44 +++++++++------- .../internal/util/StatisticsStatics.java | 4 +- .../scala/reflect/io/AbstractFile.scala | 2 +- src/reflect/scala/reflect/io/Path.scala | 10 ++-- .../scala/reflect/runtime/JavaUniverse.scala | 2 +- 23 files changed, 167 insertions(+), 151 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 62ae82e81ec7..85d085fc1ec7 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -15,7 +15,7 @@ import io.{AbstractFile, Path, SourceReader} import reporters.Reporter import util.{ClassPath, returning} import scala.reflect.ClassTag -import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile} +import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile, StatisticsStatics} import scala.reflect.internal.pickling.PickleBuffer import symtab.{Flags, SymbolTable, SymbolTrackers} import symtab.classfile.Pickler @@ -169,7 +169,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) with PatternMatchingStats { self: Statistics => } /** Redefine statistics to include all known global + reflect stats. */ - object statistics extends Statistics(Global.this, settings) with GlobalStats + final object statistics extends Statistics(Global.this, settings) with GlobalStats // Components for collecting and generating output @@ -1227,7 +1227,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) statistics.initFromSettings(settings) // Report the overhead of statistics measurements per every run - if (statistics.canEnable) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.reportStatisticsOverhead(reporter) phase = first //parserPhase diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 44a9c62b0e59..85ea78c912a7 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -12,7 +12,7 @@ import scala.reflect.internal.MissingRequirementError import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.reflect.internal.TypesStats -import scala.reflect.internal.util.Statistics +import scala.reflect.internal.util.StatisticsStatics /** This class ... * @@ -314,7 +314,7 @@ abstract class SymbolLoaders { protected def description = "class file "+ classfile.toString protected def doComplete(root: Symbol) { - val start = if (statistics.canEnable) statistics.startTimer(statistics.classReadNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.classReadNanos) else null classfileParser.parse(classfile, clazz, module) if (root.associatedFile eq NoAbstractFile) { root match { @@ -326,7 +326,7 @@ abstract class SymbolLoaders { debuglog("Not setting associatedFile to %s because %s is a %s".format(classfile, root.name, root.shortSymbolClass)) } } - if (statistics.canEnable) statistics.stopTimer(statistics.classReadNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index d791af802241..aeaf2bcdb960 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -8,9 +8,8 @@ package scala package tools.nsc.transform.patmat import scala.language.postfixOps - import scala.collection.mutable -import scala.reflect.internal.util.{NoPosition, Position, HashSet} +import scala.reflect.internal.util.{HashSet, NoPosition, Position, StatisticsStatics} trait Logic extends Debugging { import global.statistics @@ -334,7 +333,7 @@ trait Logic extends Debugging { // V1 = Nil implies -(V2 = Ci) for all Ci in V2's domain (i.e., it is unassignable) // may throw an AnalysisBudget.Exception def removeVarEq(props: List[Prop], modelNull: Boolean = false): (Prop, List[Prop]) = { - val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaVarEq) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaVarEq) else null val vars = new mutable.HashSet[Var] @@ -404,7 +403,7 @@ trait Logic extends Debugging { debug.patmat(s"eqAxioms:\n${eqAxioms.mkString("\n")}") debug.patmat(s"pure:${pure.mkString("\n")}") - if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaVarEq, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaVarEq, start) (And(eqAxioms: _*), pure) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 76da534f0144..ac3f4ff93c6b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -7,6 +7,7 @@ package scala.tools.nsc.transform.patmat import scala.collection.mutable +import scala.reflect.internal.util.StatisticsStatics trait TreeAndTypeAnalysis extends Debugging { import global._ @@ -448,7 +449,7 @@ trait MatchAnalysis extends MatchApproximation { // thus, the case is unreachable if there is no model for -(-P /\ C), // or, equivalently, P \/ -C, or C => P def unreachableCase(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Int] = { - val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaReach) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaReach) else null // use the same approximator so we share variables, // but need different conditions depending on whether we're conservatively looking for failure or success @@ -497,7 +498,7 @@ trait MatchAnalysis extends MatchApproximation { } } - if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaReach, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaReach, start) if (reachable) None else Some(caseIndex) } catch { @@ -516,7 +517,7 @@ trait MatchAnalysis extends MatchApproximation { // - back off (to avoid crying exhaustive too often) when: // - there are guards --> // - there are extractor calls (that we can't secretly/soundly) rewrite - val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaExhaust) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaExhaust) else null var backoff = false val approx = new TreeMakersToPropsIgnoreNullChecks(prevBinder) @@ -570,7 +571,7 @@ trait MatchAnalysis extends MatchApproximation { // since e.g. List(_, _) would cover List(1, _) val pruned = CounterExample.prune(counterExamples.sortBy(_.toString)).map(_.toString) - if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaExhaust, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaExhaust, start) pruned } catch { case ex: AnalysisBudget.Exception => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 8f1ff629b204..1c04be0f294f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -7,6 +7,7 @@ package scala.tools.nsc.transform.patmat import scala.language.postfixOps +import scala.reflect.internal.util.StatisticsStatics /** Translate typed Trees that represent pattern matches into the patternmatching IR, defined by TreeMakers. @@ -209,7 +210,7 @@ trait MatchTranslation { debug.patmat("translating "+ cases.mkString("{", "\n", "}")) - val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatNanos) else null val selectorTp = repeatedToSeq(elimAnonymousClass(selector.tpe.widen.withoutAnnotations)) @@ -225,7 +226,7 @@ trait MatchTranslation { // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride) - if (statistics.canEnable) statistics.stopTimer(statistics.patmatNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 7f3451fe3fd2..b1eadd14e690 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -11,6 +11,7 @@ import scala.language.postfixOps import scala.collection.mutable import scala.reflect.internal.util.Collections._ import scala.reflect.internal.util.Position +import scala.reflect.internal.util.StatisticsStatics // a literal is a (possibly negated) variable class Lit(val v: Int) extends AnyVal { @@ -471,7 +472,7 @@ trait Solving extends Logic { debug.patmat(s"DPLL\n${cnfString(clauses)}") - val start = if (statistics.canEnable) statistics.startTimer(statistics.patmatAnaDPLL) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null val satisfiableWithModel: TseitinModel = if (clauses isEmpty) EmptyTseitinModel @@ -507,7 +508,7 @@ trait Solving extends Logic { } } - if (statistics.canEnable) statistics.stopTimer(statistics.patmatAnaDPLL, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index c0f6cad29ff1..0f8e9eee2396 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -6,6 +6,7 @@ package scala.tools.nsc package typechecker +import scala.reflect.internal.util.StatisticsStatics /** The main attribution phase. */ @@ -87,13 +88,13 @@ trait Analyzer extends AnyRef // compiler run). This is good enough for the resident compiler, which was the most affected. undoLog.clear() override def run() { - val start = if (statistics.canEnable) statistics.startTimer(statistics.typerNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) for (unit <- currentRun.units) { applyPhase(unit) undoLog.clear() } - if (statistics.canEnable) statistics.stopTimer(statistics.typerNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) } def apply(unit: CompilationUnit) { try { diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 9e2ddee950e2..94f58335c7e6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -17,7 +17,7 @@ import scala.collection.mutable import mutable.{ LinkedHashMap, ListBuffer } import scala.util.matching.Regex import symtab.Flags._ -import scala.reflect.internal.util.{TriState, Statistics} +import scala.reflect.internal.util.{TriState, Statistics, StatisticsStatics} import scala.reflect.internal.TypesStats import scala.language.implicitConversions @@ -83,10 +83,10 @@ trait Implicits { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty - val rawTypeStart = if (statistics.canEnable) statistics.startCounter(rawTypeImpl) else null - val findMemberStart = if (statistics.canEnable) statistics.startCounter(findMemberImpl) else null - val subtypeStart = if (statistics.canEnable) statistics.startCounter(subtypeImpl) else null - val start = if (statistics.canEnable) statistics.startTimer(implicitNanos) else null + val rawTypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(rawTypeImpl) else null + val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberImpl) else null + val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeImpl) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(implicitNanos) else null if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) @@ -101,10 +101,10 @@ trait Implicits { // and then filter out any which *were* inferred and are part of the substitutor in the implicit search result. context.undetparams = ((context.undetparams ++ result.undetparams) filterNot result.subst.from.contains).distinct - if (statistics.canEnable) statistics.stopTimer(implicitNanos, start) - if (statistics.canEnable) statistics.stopCounter(rawTypeImpl, rawTypeStart) - if (statistics.canEnable) statistics.stopCounter(findMemberImpl, findMemberStart) - if (statistics.canEnable) statistics.stopCounter(subtypeImpl, subtypeStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(implicitNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(rawTypeImpl, rawTypeStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) result } @@ -370,7 +370,7 @@ trait Implicits { } import infer._ - if (statistics.canEnable) statistics.incCounter(implicitSearchCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitSearchCount) /** The type parameters to instantiate */ val undetParams = if (isView) Nil else context.outer.undetparams @@ -398,12 +398,12 @@ trait Implicits { /** Is implicit info `info1` better than implicit info `info2`? */ def improves(info1: ImplicitInfo, info2: ImplicitInfo) = { - if (statistics.canEnable) statistics.incCounter(improvesCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCount) (info2 == NoImplicitInfo) || (info1 != NoImplicitInfo) && { if (info1.sym.isStatic && info2.sym.isStatic) { improvesCache get ((info1, info2)) match { - case Some(b) => if (statistics.canEnable) statistics.incCounter(improvesCachedCount); b + case Some(b) => if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(improvesCachedCount); b case None => val result = isStrictlyMoreSpecific(info1.tpe, info2.tpe, info1.sym, info2.sym) improvesCache((info1, info2)) = result @@ -519,14 +519,14 @@ trait Implicits { * This method is performance critical: 5-8% of typechecking time. */ private def matchesPt(tp: Type, pt: Type, undet: List[Symbol]): Boolean = { - val start = if (statistics.canEnable) statistics.startTimer(matchesPtNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(matchesPtNanos) else null val result = normSubType(tp, pt) || isView && { pt match { case Function1(arg1, arg2) => matchesPtView(tp, arg1, arg2, undet) case _ => false } } - if (statistics.canEnable) statistics.stopTimer(matchesPtNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(matchesPtNanos, start) result } private def matchesPt(info: ImplicitInfo): Boolean = ( @@ -623,7 +623,7 @@ trait Implicits { } private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = { - if (statistics.canEnable) statistics.incCounter(plausiblyCompatibleImplicits) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) val ok = ptChecked || matchesPt(info) && { def word = if (isLocalToCallsite) "local " else "" typingLog("match", s"$word$info") @@ -633,7 +633,7 @@ trait Implicits { } private def typedImplicit1(info: ImplicitInfo, isLocalToCallsite: Boolean): SearchResult = { - if (statistics.canEnable) statistics.incCounter(matchingImplicits) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchingImplicits) // workaround for deficient context provided by ModelFactoryImplicitSupport#makeImplicitConstraints val isScaladoc = context.tree == EmptyTree @@ -689,7 +689,7 @@ trait Implicits { case None => } - if (statistics.canEnable) statistics.incCounter(typedImplicits) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedImplicits) val itree3 = if (isView) treeInfo.dissectApplied(itree2).callee else adapt(itree2, EXPRmode, wildPt) @@ -768,7 +768,7 @@ trait Implicits { fail("typing TypeApply reported errors for the implicit tree: " + err.errMsg) case None => val result = new SearchResult(unsuppressMacroExpansion(itree3), subst, context.undetparams) - if (statistics.canEnable) statistics.incCounter(foundImplicits) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(foundImplicits) typingLog("success", s"inferred value of type $ptInstantiated is $result") result } @@ -1014,11 +1014,11 @@ trait Implicits { * @return map from infos to search results */ def applicableInfos(iss: Infoss, isLocalToCallsite: Boolean): mutable.LinkedHashMap[ImplicitInfo, SearchResult] = { - val start = if (statistics.canEnable) statistics.startCounter(subtypeAppInfos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeAppInfos) else null val computation = new ImplicitComputation(iss, isLocalToCallsite) { } val applicable = computation.findAll() - if (statistics.canEnable) statistics.stopCounter(subtypeAppInfos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeAppInfos, start) applicable } @@ -1147,13 +1147,13 @@ trait Implicits { * such that some part of `tp` has C as one of its superclasses. */ private def implicitsOfExpectedType: Infoss = { - if (statistics.canEnable) statistics.incCounter(implicitCacheAccs) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheAccs) implicitsCache get pt match { case Some(implicitInfoss) => - if (statistics.canEnable) statistics.incCounter(implicitCacheHits) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(implicitCacheHits) implicitInfoss case None => - val start = if (statistics.canEnable) statistics.startTimer(subtypeETNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(subtypeETNanos) else null // val implicitInfoss = companionImplicits(pt) val implicitInfoss1 = companionImplicitMap(pt).valuesIterator.toList // val is1 = implicitInfoss.flatten.toSet @@ -1162,7 +1162,7 @@ trait Implicits { // if (!(is2 contains i)) println("!!! implicit infos of "+pt+" differ, new does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) // for (i <- is2) // if (!(is1 contains i)) println("!!! implicit infos of "+pt+" differ, old does not contain "+i+",\nold: "+implicitInfoss+",\nnew: "+implicitInfoss1) - if (statistics.canEnable) statistics.stopTimer(subtypeETNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(subtypeETNanos, start) implicitsCache(pt) = implicitInfoss1 if (implicitsCache.size >= sizeLimit) implicitsCache -= implicitsCache.keysIterator.next @@ -1389,7 +1389,7 @@ trait Implicits { * If all fails return SearchFailure */ def bestImplicit: SearchResult = { - val stats = statistics.canEnable + val stats = StatisticsStatics.areSomeColdStatsEnabled val failstart = if (stats) statistics.startTimer(inscopeFailNanos) else null val succstart = if (stats) statistics.startTimer(inscopeSucceedNanos) else null diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 82cdc6b3fadd..e9682d221a50 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -5,7 +5,7 @@ import java.lang.Math.min import symtab.Flags._ import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.runtime.ReflectionUtils -import scala.reflect.internal.util.Statistics +import scala.reflect.internal.util.{Statistics, StatisticsStatics} import scala.reflect.internal.TypesStats import scala.reflect.macros.util._ import scala.util.control.ControlThrowable @@ -575,8 +575,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { if (macroDebugVerbose) println(s"macroExpand: ${summary()}") linkExpandeeAndDesugared(expandee, desugared) - val start = if (statistics.canEnable) statistics.startTimer(statistics.macroExpandNanos) else null - if (statistics.canEnable) statistics.incCounter(statistics.macroExpandCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.macroExpandNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(statistics.macroExpandCount) try { withInfoLevel(nodePrinters.InfoLevel.Quiet) { // verbose printing might cause recursive macro expansions if (expandee.symbol.isErroneous || (expandee exists (_.isErroneous))) { @@ -609,7 +609,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { } } } finally { - if (statistics.canEnable) statistics.stopTimer(statistics.macroExpandNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.macroExpandNanos, start) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 57e46d95d7e8..1d20e6b8e069 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -14,7 +14,7 @@ package tools.nsc package typechecker import scala.collection.{immutable, mutable} -import scala.reflect.internal.util.{ListOfNil, Statistics} +import scala.reflect.internal.util.{ListOfNil, Statistics, StatisticsStatics} import scala.reflect.internal.TypesStats import mutable.ListBuffer import symtab.Flags._ @@ -672,15 +672,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def silent[T](op: Typer => T, reportAmbiguousErrors: Boolean = context.ambiguousErrors, newtree: Tree = context.tree): SilentResult[T] = { - val rawTypeStart = if (statistics.canEnable) statistics.startCounter(rawTypeFailed) else null - val findMemberStart = if (statistics.canEnable) statistics.startCounter(findMemberFailed) else null - val subtypeStart = if (statistics.canEnable) statistics.startCounter(subtypeFailed) else null - val failedSilentStart = if (statistics.canEnable) statistics.startTimer(failedSilentNanos) else null + val rawTypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(rawTypeFailed) else null + val findMemberStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(findMemberFailed) else null + val subtypeStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startCounter(subtypeFailed) else null + val failedSilentStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedSilentNanos) else null def stopStats() = { - if (statistics.canEnable) statistics.stopCounter(rawTypeFailed, rawTypeStart) - if (statistics.canEnable) statistics.stopCounter(findMemberFailed, findMemberStart) - if (statistics.canEnable) statistics.stopCounter(subtypeFailed, subtypeStart) - if (statistics.canEnable) statistics.stopTimer(failedSilentNanos, failedSilentStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(rawTypeFailed, rawTypeStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberFailed, findMemberStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeFailed, subtypeStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedSilentNanos, failedSilentStart) } @inline def wrapResult(reporter: ContextReporter, result: T) = if (reporter.hasErrors) { @@ -3886,9 +3886,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def isCapturedExistential(sym: Symbol) = ( (sym hasAllFlags EXISTENTIAL | CAPTURED) && { - val start = if (statistics.canEnable) statistics.startTimer(isReferencedNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(isReferencedNanos) else null try !isReferencedFrom(context, sym) - finally if (statistics.canEnable) statistics.stopTimer(isReferencedNanos, start) + finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(isReferencedNanos, start) } ) @@ -4581,10 +4581,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * insert an implicit conversion. */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { - val start = if (statistics.canEnable) statistics.startTimer(failedApplyNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null def onError(typeErrors: Seq[AbsTypeError], warnings: Seq[(Position, String)]): Tree = { - if (statistics.canEnable) statistics.stopTimer(failedApplyNanos, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, start) // If the problem is with raw types, convert to existentials and try again. // See #4712 for a case where this situation arises, @@ -4645,8 +4645,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // TODO: replace `fun.symbol.isStable` by `treeInfo.isStableIdentifierPattern(fun)` val stableApplication = (fun.symbol ne null) && fun.symbol.isMethod && fun.symbol.isStable val funpt = if (mode.inPatternMode) pt else WildcardType - val appStart = if (statistics.canEnable) statistics.startTimer(failedApplyNanos) else null - val opeqStart = if (statistics.canEnable) statistics.startTimer(failedOpEqNanos) else null + val appStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedApplyNanos) else null + val opeqStart = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(failedOpEqNanos) else null def isConversionCandidate(qual: Tree, name: Name): Boolean = !mode.inPatternMode && nme.isOpAssignmentName(TermName(name.decode)) && !qual.exists(_.isErroneous) @@ -4676,7 +4676,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Select(qual, name) if isConversionCandidate(qual, name) => val qual1 = typedQualifier(qual) if (treeInfo.isVariableOrGetter(qual1)) { - if (statistics.canEnable) statistics.stopTimer(failedOpEqNanos, opeqStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedOpEqNanos, opeqStart) val erred = qual1.exists(_.isErroneous) || args.exists(_.isErroneous) if (erred) reportError(error) else { val convo = convertToAssignment(fun, qual1, name, args) @@ -4688,7 +4688,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } else { - if (statistics.canEnable) statistics.stopTimer(failedApplyNanos, appStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) val Apply(Select(qual2, _), args2) = tree val erred = qual2.exists(_.isErroneous) || args2.exists(_.isErroneous) reportError { @@ -4696,7 +4696,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } case _ => - if (statistics.canEnable) statistics.stopTimer(failedApplyNanos, appStart) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(failedApplyNanos, appStart) reportError(error) } val silentResult = silent( @@ -4707,7 +4707,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper silentResult match { case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 - if (statistics.canEnable) statistics.incCounter(typedApplyCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedApplyCount) val noSecondTry = ( isPastTyper || context.inSecondTry @@ -4999,7 +4999,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (name.isTypeName) typedSelect(tree, typedTypeSelectionQualifier(tree.qualifier, WildcardType), name) else { - if (statistics.canEnable) statistics.incCounter(typedSelectCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedSelectCount) val qualTyped = checkDead(typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) @@ -5087,7 +5087,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedIdentOrWildcard(tree: Ident) = { val name = tree.name - if (statistics.canEnable) statistics.incCounter(typedIdentCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedIdentCount) if ((name == nme.WILDCARD && mode.typingPatternNotConstructor) || (name == tpnme.WILDCARD && mode.inTypeMode)) tree setType makeFullyDefined(pt) @@ -5551,10 +5551,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else typedInternal(tree, mode, pt) ) - val startByType = if (statistics.hotEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null - if (statistics.hotEnabled) statistics.incCounter(visitsByType, tree.getClass) + val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled + val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null + if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) try body - finally if (statistics.hotEnabled) statistics.popTimer(byTypeStack, startByType) + finally if (statsEnabled) statistics.popTimer(byTypeStack, startByType) } private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = { diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 7dcc2ebf0ecb..d165840aa385 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -8,7 +8,7 @@ package internal // todo implement in terms of BitSet import scala.collection.mutable -import util.Statistics +import util.{Statistics, StatisticsStatics} /** A base type sequence (BaseTypeSeq) is an ordered sequence spanning all the base types * of a type. It characterized by the following two laws: @@ -42,8 +42,8 @@ trait BaseTypeSeqs { */ class BaseTypeSeq protected[reflect] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => - if (statistics.canEnable) statistics.incCounter(baseTypeSeqCount) - if (statistics.canEnable) statistics.incCounter(baseTypeSeqLenTotal, elems.length) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(baseTypeSeqLenTotal, elems.length) private[this] val typeSymbols = { val tmp = new Array[Int](elems.length) var i = 0 diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index f8220acf99e5..494bdc4e6f83 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -187,7 +187,7 @@ abstract class SymbolTable extends macros.Universe final def atPhaseStack: List[Phase] = List.tabulate(phStackIndex)(i => phStack(i)) final def phase: Phase = { - if (statistics.canEnable) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(statistics.phaseCounter) ph } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 6e72a62a1d06..c3f01a6f0557 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -9,7 +9,7 @@ package internal import scala.collection.immutable import scala.collection.mutable.ListBuffer -import util.{ Statistics, shortClassOfInstance } +import util.{ Statistics, shortClassOfInstance, StatisticsStatics } import Flags._ import scala.annotation.tailrec import scala.reflect.io.{ AbstractFile, NoAbstractFile } @@ -767,7 +767,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } final def flags: Long = { - if (statistics.canEnable) statistics.incCounter(flagsCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(flagsCount) val fs = _rawflags & phase.flagMask (fs | ((fs & LateFlags) >>> LateShift)) & ~((fs & AntiFlags) >>> AntiShift) } @@ -1197,7 +1197,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * `assertOwner` aborts compilation immediately if called on NoSymbol. */ def owner: Symbol = { - if (statistics.canEnable) statistics.incCounter(ownerCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(ownerCount) rawowner } final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner @@ -2766,7 +2766,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => private[this] var _rawname: TermName = initName def rawname = _rawname def name = { - if (statistics.canEnable) statistics.incCounter(nameCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) _rawname } override def name_=(name: Name) { @@ -2900,13 +2900,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def moduleClass = referenced override def owner = { - if (statistics.canEnable) statistics.incCounter(ownerCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(ownerCount) // a non-static module symbol gets the METHOD flag in uncurry's info transform -- see isModuleNotMethod if (!isMethod && needsFlatClasses) rawowner.owner else rawowner } override def name: TermName = { - if (statistics.canEnable) statistics.incCounter(nameCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) if (!isMethod && needsFlatClasses) { if (flatname eq null) flatname = nme.flattenedName(rawowner.name, rawname) @@ -3038,7 +3038,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def rawname = _rawname def name = { - if (statistics.canEnable) statistics.incCounter(nameCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) _rawname } final def asNameType(n: Name) = n.toTypeName @@ -3165,7 +3165,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * info for T in Test1 should be >: Nothing <: Test3[_] */ - if (statistics.canEnable) statistics.incCounter(typeSymbolCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typeSymbolCount) } implicit val TypeSymbolTag = ClassTag[TypeSymbol](classOf[TypeSymbol]) @@ -3325,12 +3325,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => } override def owner: Symbol = { - if (statistics.canEnable) statistics.incCounter(ownerCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(ownerCount) if (needsFlatClasses) rawowner.owner else rawowner } override def name: TypeName = { - if (statistics.canEnable) statistics.incCounter(nameCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) if (needsFlatClasses) { if (flatname eq null) flatname = tpnme.flattenedName(rawowner.name, rawname) @@ -3386,7 +3386,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else super.toString ) - if (statistics.canEnable) statistics.incCounter(classSymbolCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(classSymbolCount) } implicit val ClassSymbolTag = ClassTag[ClassSymbol](classOf[ClassSymbol]) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index bae5d438356a..76787aeafa4f 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -10,7 +10,7 @@ package internal import Flags._ import scala.collection.mutable import scala.reflect.macros.Attachments -import util.Statistics +import util.{Statistics, StatisticsStatics} trait Trees extends api.Trees { self: SymbolTable => @@ -38,7 +38,8 @@ trait Trees extends api.Trees { val id = nodeCount // TODO: add to attachment? nodeCount += 1 - if (statistics.hotEnabled) statistics.incCounter(statistics.nodeByType, getClass) + if (StatisticsStatics.areSomeHotStatsEnabled()) + statistics.incCounter(statistics.nodeByType, getClass) final override def pos: Position = rawatt.pos diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index d1f7e2573836..ef293e2fe703 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -13,7 +13,7 @@ import mutable.ListBuffer import Flags._ import scala.util.control.ControlThrowable import scala.annotation.tailrec -import util.Statistics +import util.{Statistics, StatisticsStatics} import util.ThreeValues._ import Variance._ import Depth._ @@ -680,7 +680,7 @@ trait Types * = Int */ def asSeenFrom(pre: Type, clazz: Symbol): Type = { - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, asSeenFromNanos) else null try { val trivial = ( this.isTrivial @@ -696,7 +696,7 @@ trait Types if (m.capturedSkolems.isEmpty) tp1 else deriveType(m.capturedSkolems, _.cloneSymbol setFlag CAPTURED)(tp1) } - } finally if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + } finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } /** The info of `sym`, seen as a member of this type. @@ -802,7 +802,7 @@ trait Types /** Is this type a subtype of that type? */ def <:<(that: Type): Boolean = { - if (statistics.canEnable) stat_<:<(that) + if (StatisticsStatics.areSomeColdStatsEnabled) stat_<:<(that) else { (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) @@ -834,26 +834,26 @@ trait Types }) def stat_<:<(that: Type): Boolean = { - if (statistics.canEnable) statistics.incCounter(subtypeCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = (this eq that) || (if (explainSwitch) explain("<:", isSubType(_: Type, _: Type), this, that) else isSubType(this, that)) - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) result } /** Is this type a weak subtype of that type? True also for numeric types, i.e. Int weak_<:< Long. */ def weak_<:<(that: Type): Boolean = { - if (statistics.canEnable) statistics.incCounter(subtypeCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, subtypeNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(subtypeCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, subtypeNanos) else null val result = ((this eq that) || (if (explainSwitch) explain("weak_<:", isWeakSubType, this, that) else isWeakSubType(this, that))) - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) result } @@ -1097,7 +1097,7 @@ trait Types override def isTrivial = false override def widen: Type = underlying.widen override def baseTypeSeq: BaseTypeSeq = { - if (statistics.canEnable) statistics.incCounter(singletonBaseTypeSeqCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(singletonBaseTypeSeqCount) underlying.baseTypeSeq prepend this } override def isHigherKinded = false // singleton type classifies objects, thus must be kind * @@ -1500,8 +1500,8 @@ trait Types val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq tpe.baseTypeSeqCache = bts lateMap paramToVar } else { - if (statistics.canEnable) statistics.incCounter(compoundBaseTypeSeqCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = @@ -1510,7 +1510,7 @@ trait Types else compoundBaseTypeSeq(tpe) } finally { - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } // [Martin] suppressing memoization solves the problem with "same type after erasure" errors // when compiling with @@ -1577,13 +1577,13 @@ trait Types else { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null try { tpe.baseClassesCache = null tpe.baseClassesCache = tpe.memo(computeBaseClasses(tpe))(tpe.typeSymbol :: _.baseClasses.tail) } finally { - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } } } @@ -2469,13 +2469,13 @@ trait Types if (period != currentPeriod) { tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { - if (statistics.canEnable) statistics.incCounter(typerefBaseTypeSeqCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typerefBaseTypeSeqCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null try { tpe.baseTypeSeqCache = undetBaseTypeSeq tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl } finally { - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } } } @@ -3808,7 +3808,7 @@ trait Types final def howManyUniqueTypes: Int = if (uniques == null) 0 else uniques.size protected def unique[T <: Type](tp: T): T = { - if (statistics.canEnable) statistics.incCounter(rawTypeCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(rawTypeCount) if (uniqueRunId != currentRunId) { uniques = util.WeakHashSet[Type](initialUniquesCapacity) // JZ: We used to register this as a perRunCache so it would be cleared eagerly at diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index 29f0dd1f38f4..cbf87fc0c615 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -5,6 +5,7 @@ package scala.reflect.internal package tpe +import util.StatisticsStatics import Flags._ trait FindMembers { @@ -42,10 +43,10 @@ trait FindMembers { // Main entry point def apply(): T = { - if (statistics.canEnable) statistics.incCounter(findMemberCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, findMemberNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(findMemberCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, findMemberNanos) else null try searchConcreteThenDeferred - finally if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + finally if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } protected def result: T @@ -275,11 +276,11 @@ trait FindMembers { // Assemble the result from the hand-rolled ListBuffer protected def result: Symbol = if (members eq null) { if (member0 == NoSymbol) { - if (statistics.canEnable) statistics.incCounter(noMemberCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(noMemberCount) NoSymbol } else member0 } else { - if (statistics.canEnable) statistics.incCounter(multMemberCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(multMemberCount) lastM.tl = Nil initBaseClasses.head.newOverloaded(tpe, members) } diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 1aafde5d6076..814e1640e0b6 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -5,6 +5,7 @@ package tpe import scala.collection.mutable import scala.annotation.tailrec +import scala.reflect.internal.util.StatisticsStatics import Variance._ private[internal] trait GlbLubs { @@ -254,8 +255,8 @@ private[internal] trait GlbLubs { case Nil => NothingTpe case t :: Nil => t case _ => - if (statistics.canEnable) statistics.incCounter(lubCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { val res = lub(ts, lubDepth(ts)) // If the number of unapplied type parameters in all incoming @@ -273,7 +274,7 @@ private[internal] trait GlbLubs { finally { lubResults.clear() glbResults.clear() - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -396,7 +397,7 @@ private[internal] trait GlbLubs { indent = indent + " " assert(indent.length <= 100) } - if (statistics.canEnable) statistics.incCounter(nestedLubCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) val res = lub0(ts) if (printLubs) { indent = indent stripSuffix " " @@ -421,14 +422,14 @@ private[internal] trait GlbLubs { case List() => AnyTpe case List(t) => t case ts0 => - if (statistics.canEnable) statistics.incCounter(lubCount) - val start = if (statistics.canEnable) statistics.pushTimer(typeOpsStack, lubNanos) else null + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(lubCount) + val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, lubNanos) else null try { glbNorm(ts0, lubDepth(ts0)) } finally { lubResults.clear() glbResults.clear() - if (statistics.canEnable) statistics.popTimer(typeOpsStack, start) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.popTimer(typeOpsStack, start) } } @@ -542,7 +543,7 @@ private[internal] trait GlbLubs { } } // if (settings.debug.value) { println(indent + "glb of " + ts + " at depth "+depth); indent = indent + " " } //DEBUG - if (statistics.canEnable) statistics.incCounter(nestedLubCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nestedLubCount) glb0(ts) // if (settings.debug.value) { indent = indent.substring(0, indent.length() - 2); log(indent + "glb of " + ts + " is " + res) }//DEBUG } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 82541bdf5cb2..60d4fc4df6f4 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -6,6 +6,7 @@ package tpe import scala.collection.{ mutable } import util.TriState import scala.annotation.tailrec +import scala.reflect.internal.util.StatisticsStatics trait TypeComparers { self: SymbolTable => @@ -91,7 +92,7 @@ trait TypeComparers { /** Do `tp1` and `tp2` denote equivalent types? */ def isSameType(tp1: Type, tp2: Type): Boolean = try { - if (statistics.canEnable) statistics.incCounter(sametypeCount) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(sametypeCount) subsametypeRecursions += 1 //OPT cutdown on Function0 allocation //was: diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 377f19e7f623..c43b9235d22c 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -20,45 +20,45 @@ abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSetting /** If enabled, increment counter by one */ @inline final def incCounter(c: Counter) { - if (canEnable && c != null) c.value += 1 + if (areStatisticsLocallyEnabled && c != null) c.value += 1 } /** If enabled, increment counter by given delta */ @inline final def incCounter(c: Counter, delta: Int) { - if (canEnable && c != null) c.value += delta + if (areStatisticsLocallyEnabled && c != null) c.value += delta } /** If enabled, increment counter in map `ctrs` at index `key` by one */ @inline final def incCounter[K](ctrs: QuantMap[K, Counter], key: K) = - if (canEnable && ctrs != null) ctrs(key).value += 1 + if (areStatisticsLocallyEnabled && ctrs != null) ctrs(key).value += 1 /** If enabled, start subcounter. While active it will track all increments of * its base counter. */ @inline final def startCounter(sc: SubCounter): (Int, Int) = - if (canEnable && sc != null) sc.start() else null + if (areStatisticsLocallyEnabled && sc != null) sc.start() else null /** If enabled, stop subcounter from tracking its base counter. */ @inline final def stopCounter(sc: SubCounter, start: (Int, Int)) { - if (canEnable && sc != null) sc.stop(start) + if (areStatisticsLocallyEnabled && sc != null) sc.stop(start) } /** If enabled, start timer */ @inline final def startTimer(tm: Timer): TimerSnapshot = - if (canEnable && tm != null) tm.start() else null + if (areStatisticsLocallyEnabled && tm != null) tm.start() else null /** If enabled, stop timer */ @inline final def stopTimer(tm: Timer, start: TimerSnapshot) { - if (canEnable && tm != null) tm.stop(start) + if (areStatisticsLocallyEnabled && tm != null) tm.stop(start) } /** If enabled, push and start a new timer in timer stack */ @inline final def pushTimer(timers: TimerStack, timer: => StackableTimer): TimerSnapshot = - if (canEnable && timers != null) timers.push(timer) else null + if (areStatisticsLocallyEnabled && timers != null) timers.push(timer) else null /** If enabled, stop and pop timer from timer stack */ @inline final def popTimer(timers: TimerStack, prev: TimerSnapshot) { - if (canEnable && timers != null) timers.pop(prev) + if (areStatisticsLocallyEnabled && timers != null) timers.pop(prev) } /** Create a new counter that shows as `prefix` and is active in given phases */ @@ -121,7 +121,7 @@ quant) * Quantities with non-empty prefix are printed in the statistics info. */ trait Quantity { - if (canEnable && prefix.nonEmpty) { + if (areStatisticsLocallyEnabled && prefix.nonEmpty) { val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" qs(key) = this } @@ -259,29 +259,37 @@ quant) } private val qs = new mutable.HashMap[String, Quantity] + private[scala] var areColdStatsLocallyEnabled: Boolean = false + private[scala] var areHotStatsLocallyEnabled: Boolean = false /** Represents whether normal statistics can or cannot be enabled. */ - @inline final def canEnable: Boolean = StatisticsStatics.areColdStatsEnabled() - - @inline def enabled = canEnable + @inline final def enabled: Boolean = areColdStatsLocallyEnabled def enabled_=(cond: Boolean) = { - if (cond && !canEnable) { + if (cond && !enabled) { StatisticsStatics.enableColdStats() - } else if (!cond && canEnable) { + areColdStatsLocallyEnabled = true + } else if (!cond && enabled) { StatisticsStatics.disableColdStats() + areColdStatsLocallyEnabled = false } } /** Represents whether hot statistics can or cannot be enabled. */ - @inline def hotEnabled: Boolean = canEnable && StatisticsStatics.areHotStatsEnabled() + @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled def hotEnabled_=(cond: Boolean) = { - if (cond && !hotEnabled) { + if (cond && enabled && !hotEnabled) { StatisticsStatics.enableHotStats() - } else if (!cond && hotEnabled) { + areHotStatsLocallyEnabled = true + } else if (!cond && enabled && hotEnabled) { StatisticsStatics.disableHotStats() + areHotStatsLocallyEnabled = false } } + /** Tells whether statistics should be definitely reported to the user for this `Global` instance. */ + @inline final def areStatisticsLocallyEnabled: Boolean = + areColdStatsLocallyEnabled || areHotStatsLocallyEnabled + import scala.reflect.internal.Reporter /** Reports the overhead of measuring statistics via the nanoseconds variation. */ final def reportStatisticsOverhead(reporter: Reporter): Unit = { diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index cc3249125c55..a7a2e02f7144 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -31,7 +31,7 @@ protected BooleanContainer initialValue() { private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.createGetter(); private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.createGetter(); - public static boolean areColdStatsEnabled() { + public static boolean areSomeColdStatsEnabled() { try { return ((BooleanContainer)(Object) COLD_STATS_GETTER.invokeExact()).isEnabledNow(); } catch (Throwable e) { @@ -39,7 +39,7 @@ public static boolean areColdStatsEnabled() { } } - public static boolean areHotStatsEnabled() { + public static boolean areSomeHotStatsEnabled() { try { return ((BooleanContainer)(Object) HOT_STATS_GETTER.invokeExact()).isEnabledNow(); } catch (Throwable e) { diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index e56aa0fc9b1f..e77dd6846c09 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -115,7 +115,7 @@ abstract class AbstractFile extends Iterable[AbstractFile] { /** Does this abstract file denote an existing file? */ def exists: Boolean = { - //if (statistics.canEnable) statistics.incCounter(IOStats.fileExistsCount) + //if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(IOStats.fileExistsCount) (file eq null) || file.exists } diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index a2b4ee4ab719..ff834ced28c4 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -57,12 +57,12 @@ object Path { def apply(path: String): Path = apply(new JFile(path)) def apply(jfile: JFile): Path = try { def isFile = { - //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) jfile.isFile } def isDirectory = { - //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) jfile.isDirectory } @@ -198,16 +198,16 @@ class Path private[io] (val jfile: JFile) { def canRead = jfile.canRead() def canWrite = jfile.canWrite() def exists = { - //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileExistsCount) + //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileExistsCount) try jfile.exists() catch { case ex: SecurityException => false } } def isFile = { - //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsFileCount) + //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsFileCount) try jfile.isFile() catch { case ex: SecurityException => false } } def isDirectory = { - //if (Statistics.canEnable) Statistics.incCounter(IOStats.fileIsDirectoryCount) + //if (StatisticsStatics.areSomeColdStatsEnabled) Statistics.incCounter(IOStats.fileIsDirectoryCount) try jfile.isDirectory() catch { case ex: SecurityException => jfile.getPath == "." } } def isAbsolute = jfile.isAbsolute() diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 80f2dc7b10ed..81c662d2da89 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -20,7 +20,7 @@ class JavaUniverse extends InternalSymbolTable with JavaUniverseForce with Refle def erasurePhase = SomePhase lazy val settings = new Settings - override val statistics = new Statistics(JavaUniverse.this, settings) with ReflectStats + override final val statistics = new Statistics(JavaUniverse.this, settings) with ReflectStats private val isLogging = sys.props contains "scala.debug.reflect" def log(msg: => AnyRef): Unit = if (isLogging) Console.err.println("[reflect] " + msg) From 71ae0c0a0a4caa7844815ad2c6c3ac637ba44a52 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 28 Sep 2017 12:24:13 -0700 Subject: [PATCH 1062/2793] Review #5876: OOPify Good old overrides are sometimes easier to follow than boolean flags and function arguments. Also, incorporate som-snytt's review of the review. --- .../scala/tools/nsc/settings/Warnings.scala | 2 +- .../nsc/typechecker/TypeDiagnostics.scala | 55 ++++++++++--------- 2 files changed, 30 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index dc553ebda6c3..0ff46e21b622 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -15,7 +15,7 @@ trait Warnings { // Warning semantics. val fatalWarnings = BooleanSetting("-Xfatal-warnings", "Fail the compilation if there are any warnings.") - // Non-lint warnings. + // Non-lint warnings. -- TODO turn into MultiChoiceEnumeration val warnMacros = ChoiceSetting( name = "-Ywarn-macros", helpArg = "mode", diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 47a77691d585..4e208a1cf71a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -473,7 +473,7 @@ trait TypeDiagnostics { "readResolve", "readObject", "writeObject", "writeReplace" ).map(TermName(_)) - class UnusedPrivates(traverseCheck: Tree => Tree, isOriginal: Boolean) extends Traverser { + class UnusedPrivates extends Traverser { val defnTrees = ListBuffer[MemberDef]() val targets = mutable.Set[Symbol]() val setVars = mutable.Set[Symbol]() @@ -492,7 +492,6 @@ trait TypeDiagnostics { && !sym.isParamAccessor // could improve this, but it's a pain && !sym.isEarlyInitialized // lots of false positives in the way these are encoded && !(sym.isGetter && sym.accessed.isEarlyInitialized) - && (isOriginal || !sym.isMacro) ) def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage def qualifies(sym: Symbol) = ( @@ -500,8 +499,7 @@ trait TypeDiagnostics { && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym)) ) - override def traverse(t0: Tree): Unit = { - val t = traverseCheck(t0) + override def traverse(t: Tree): Unit = { val sym = t.symbol t match { case m: MemberDef if qualifies(t.symbol) => @@ -602,15 +600,30 @@ trait TypeDiagnostics { def unusedPatVars = patvars.toList.filter(p => isUnusedTerm(p) && !inDefinedAt(p)).sortBy(sympos) } + object skipMacroCall extends UnusedPrivates { + override def qualifiesTerm(sym: Symbol): Boolean = + super.qualifiesTerm(sym) && !sym.isMacro + } + object skipMacroExpansion extends UnusedPrivates { + override def traverse(t: Tree): Unit = + if (!hasMacroExpansionAttachment(t)) super.traverse(t) + } + object checkMacroExpandee extends UnusedPrivates { + override def traverse(t: Tree): Unit = + super.traverse(if (hasMacroExpansionAttachment(t)) macroExpandee(t) else t) + } + private def warningsEnabled: Boolean = { val ss = settings import ss._ warnUnusedPatVars || warnUnusedPrivates || warnUnusedLocals || warnUnusedParams } - def process(p: UnusedPrivates): Unit = { + def run(unusedPrivates: UnusedPrivates)(body: Tree): Unit = { + unusedPrivates.traverse(body) + if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { - for (defn: DefTree <- p.unusedTerms) { + for (defn: DefTree <- unusedPrivates.unusedTerms) { val sym = defn.symbol val pos = ( if (defn.pos.isDefined) defn.pos @@ -640,10 +653,10 @@ trait TypeDiagnostics { ) context.warning(pos, s"$why $what in ${sym.owner} is never used") } - for (v <- p.unsetVars) { + for (v <- unusedPrivates.unsetVars) { context.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set: consider using immutable val") } - for (t <- p.unusedTypes) { + for (t <- unusedPrivates.unusedTypes) { val sym = t.symbol val wrn = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals if (wrn) { @@ -653,7 +666,7 @@ trait TypeDiagnostics { } } if (settings.warnUnusedPatVars) { - for (v <- p.unusedPatVars) + for (v <- unusedPrivates.unusedPatVars) context.warning(v.pos, s"pattern var ${v.name} in ${v.owner} is never used; `${v.name}@_' suppresses this warning") } if (settings.warnUnusedParams) { @@ -672,28 +685,18 @@ trait TypeDiagnostics { && !isImplementation(s.owner) && !isConvention(s) ) - for (s <- p.unusedParams if warnable(s)) + for (s <- unusedPrivates.unusedParams if warnable(s)) context.warning(s.pos, s"parameter $s in ${s.owner} is never used") } } def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava) { + val body = unit.body + // TODO the message should distinguish whether the unusage is before or after macro expansion. settings.warnMacros.value match { - case "none" => - val only = new UnusedPrivates((t: Tree) => if (hasMacroExpansionAttachment(t)) EmptyTree else t, isOriginal = true) - only.traverse(unit.body) - process(only) - case "before" | "both" => - val first = new UnusedPrivates((t: Tree) => if (hasMacroExpansionAttachment(t)) macroExpandee(t) else t, isOriginal = true) - first.traverse(unit.body) - process(first) - case _ => () - } - settings.warnMacros.value match { - case "after" | "both" => - val second = new UnusedPrivates((t: Tree) => t, isOriginal = false) - second.traverse(unit.body) - process(second) - case _ => () + case "none" => run(skipMacroExpansion)(body) + case "before" => run(checkMacroExpandee)(body) + case "after" => run(skipMacroCall)(body) + case "both" => run(checkMacroExpandee)(body) ; run(skipMacroCall)(body) } } } From e7d39c36d500b065f6f8e4cd35fcce151274883d Mon Sep 17 00:00:00 2001 From: jvican Date: Mon, 4 Sep 2017 11:04:25 +0200 Subject: [PATCH 1063/2793] Add statistics to scopes handling This commit adds two important things to the scopes handling in scalac: * Count how many scopes are created. * Time common scope operations, like population and lookup so that we can effectively measure the impact of unused imports and the like. --- .../scala/reflect/internal/Scopes.scala | 23 +++++++++++++++++++ .../scala/reflect/internal/SymbolTable.scala | 3 ++- 2 files changed, 25 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 0b0a4c56407f..8aa9a6d41e75 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -8,9 +8,18 @@ package reflect package internal import scala.annotation.tailrec +import scala.collection.generic.Clearable +import scala.reflect.internal.util.{Statistics, StatisticsStatics} trait Scopes extends api.Scopes { self: SymbolTable => + // Reset `scopeCount` per every run + private[scala] var scopeCount = 0 + perRunCaches.recordCache { + val clearCount: Clearable = () => {scopeCount = 0} + clearCount + } + /** An ADT to represent the results of symbol name lookups. */ sealed trait NameLookup { def symbol: Symbol ; def isSuccess = false } @@ -50,6 +59,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ class Scope protected[Scopes]() extends ScopeApi with MemberScopeApi { + scopeCount += 1 private[scala] var elems: ScopeEntry = _ /** The number of times this scope is nested in another @@ -297,6 +307,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => * change to use iterators as too costly. */ def lookupEntry(name: Name): ScopeEntry = { + val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopeLookupTime) else null var e: ScopeEntry = null if (hashtable ne null) { e = hashtable(name.start & HASHMASK) @@ -309,6 +320,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => e = e.next } } + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopeLookupTime, startTime) e } @@ -452,18 +464,22 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** Create a new scope nested in another one with which it shares its elements */ final def newNestedScope(outer: Scope): Scope = { + val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val nested = newScope // not `new Scope`, we must allow the runtime reflection universe to mixin SynchronizedScopes! nested.elems = outer.elems nested.nestinglevel = outer.nestinglevel + 1 if (outer.hashtable ne null) nested.hashtable = java.util.Arrays.copyOf(outer.hashtable, outer.hashtable.length) + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) nested } /** Create a new scope with given initial elements */ def newScopeWith(elems: Symbol*): Scope = { + val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopePopulationTime) else null val scope = newScope elems foreach scope.enter + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.scopePopulationTime, startTime) scope } @@ -490,3 +506,10 @@ trait Scopes extends api.Scopes { self: SymbolTable => private final val maxRecursions = 1000 } + +trait ScopeStats { + self: Statistics => + val scopeCountView = newView("#created scopes")(symbolTable.scopeCount) + val scopePopulationTime = newTimer("time spent in scope population") + val scopeLookupTime = newTimer("time spent in scope lookup") +} diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 494bdc4e6f83..3e78a60a8ce8 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -55,7 +55,8 @@ abstract class SymbolTable extends macros.Universe with TypesStats with SymbolTableStats with TreesStats - with SymbolsStats { self: Statistics => } + with SymbolsStats + with ScopeStats { self: Statistics => } /** Some statistics (normally disabled) set with -Ystatistics */ val statistics: Statistics with ReflectStats From d2075855601741f071e3f62aa0f5804d76c897b8 Mon Sep 17 00:00:00 2001 From: jvican Date: Mon, 4 Sep 2017 16:24:54 +0200 Subject: [PATCH 1064/2793] Add padding to implicits timers --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index d24234a5a2b0..616616c5a27b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1581,10 +1581,10 @@ trait ImplicitsStats { val matchingImplicits = newSubCounter(" #matching", implicitSearchCount) val typedImplicits = newSubCounter(" #typed", implicitSearchCount) val foundImplicits = newSubCounter(" #found", implicitSearchCount) - val improvesCount = newSubCounter("implicit improves tests", implicitSearchCount) - val improvesCachedCount = newSubCounter("#implicit improves cached ", implicitSearchCount) - val inscopeImplicitHits = newSubCounter("#implicit inscope hits", implicitSearchCount) - val oftypeImplicitHits = newSubCounter("#implicit oftype hits ", implicitSearchCount) + val improvesCount = newSubCounter(" #implicit improves tests", implicitSearchCount) + val improvesCachedCount = newSubCounter(" #implicit improves cached ", implicitSearchCount) + val inscopeImplicitHits = newSubCounter(" #implicit inscope hits", implicitSearchCount) + val oftypeImplicitHits = newSubCounter(" #implicit oftype hits ", implicitSearchCount) val implicitNanos = newSubTimer ("time spent in implicits", typerNanos) val inscopeSucceedNanos = newSubTimer (" successful in scope", typerNanos) val inscopeFailNanos = newSubTimer (" failed in scope", typerNanos) From ce0ed00a2fb79c6ff6a787e9b455ad6eb7349040 Mon Sep 17 00:00:00 2001 From: jvican Date: Wed, 16 Aug 2017 15:42:39 +0200 Subject: [PATCH 1065/2793] Print timers of phases at the end The following commit adds the ability to print the timers for every phase at the end of the `compileSources` execution. This is useful because you can have an idea of how the running times of the phases are distributed. It also works for phases injected by the incremental compiler and compiler plugins (which allows devs to estimate how much time do their compiler plugins take out of the total compile time). It also removes the previous infrastructure to print these timings under the verbose flag, and now reuses the full statistics infrastructure for doing so. --- src/compiler/scala/tools/nsc/Global.scala | 29 ++++++++++++++++------- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 85d085fc1ec7..359477460ab6 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1430,26 +1430,33 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } + private final val GlobalPhaseName = "global (synthetic)" + protected final val totalCompileTime = statistics.newTimer("#total compile time", GlobalPhaseName) + def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { - def currentTime = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - units foreach addUnit - val startTime = currentTime - reporter.reset() warnDeprecatedAndConflictingSettings() globalPhase = fromPhase + val timePhases = StatisticsStatics.areSomeColdStatsEnabled || settings.verbose + val startTotal = if (timePhases) statistics.startTimer(totalCompileTime) else null + while (globalPhase.hasNext && !reporter.hasErrors) { - val startTime = currentTime phase = globalPhase + val phaseTimer = if (timePhases) statistics.newSubTimer(s" ${phase.name}", totalCompileTime) else null + val startPhase = if (timePhases) statistics.startTimer(phaseTimer) else null + val profileBefore=profiler.beforePhase(phase) - globalPhase.run() + try globalPhase.run() + finally if (timePhases) statistics.stopTimer(phaseTimer, startPhase) else () profiler.afterPhase(phase, profileBefore) + if (timePhases) + informTime(globalPhase.description, phaseTimer.nanos) + // progress update - informTime(globalPhase.description, startTime) if ((settings.Xprint containsPhase globalPhase) || settings.printLate && runIsAt(cleanupPhase)) { // print trees if (settings.Xshowtrees || settings.XshowtreesCompact || settings.XshowtreesStringified) nodePrinters.printAll() @@ -1502,7 +1509,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } symSource.keys foreach (x => resetPackageClass(x.owner)) - informTime("total", startTime) + if (timePhases) { + statistics.stopTimer(totalCompileTime, startTotal) + informTime("total", totalCompileTime.nanos) + inform("*** Cumulative timers for phases") + for (q <- statistics.allQuantities if q.phases == List(GlobalPhaseName)) + inform(q.line) + } // Clear any sets or maps created via perRunCaches. perRunCaches.clearAll() From 181e341ef6667a50d60f0faa2aedd478f85407c7 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 29 Sep 2017 13:03:06 +0200 Subject: [PATCH 1066/2793] Hot fix registration of timers and counters Because of initialization order of the cake, `statistics` is initialized after the timers/counters in the cake are, so when it hits the constructor of `Quantity` those are not registered in `qs`. This meant that even though those objects were initialized, statistics were not reported. This change hot fixes it so that they are indeed reported. It does so by removing the guard that checked whether statistics were enabled. From now on, for simplicity and correctness, we will always register timers and counters that are initialized. This should have no impact in performance, since it's done only once when everything is initialized, and it's just an addition to a map. --- src/reflect/scala/reflect/internal/util/Statistics.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index c43b9235d22c..dd1cdc755b77 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -121,7 +121,7 @@ quant) * Quantities with non-empty prefix are printed in the statistics info. */ trait Quantity { - if (areStatisticsLocallyEnabled && prefix.nonEmpty) { + if (prefix.nonEmpty) { val key = s"${if (underlying != this) underlying.prefix else ""}/$prefix" qs(key) = this } From 64e0d91ba7d46d0142c066993e18608d49b6749b Mon Sep 17 00:00:00 2001 From: jvican Date: Thu, 28 Sep 2017 15:25:22 +0200 Subject: [PATCH 1067/2793] Allow `AnalyzerPlugin`s to hook into implicit search The following commit allows an external analyzer plugin to hook into scalac's implicit search. This change replaces explicit instrumentation of this part to capture statistics, and has been therefore extended to allow more generic scenarios, leaving the tasks of capturing the data to external third parties (via compiler plugins). The change adds two new members to the public API of `AnalyzerPlugin`: 1. `pluginsImplicitSearch`: `ImplicitSearch => ()`. 1. `pluginsImplicitSearchResult`: `SearchResult` => `()`. `ImplicitSearch` is the data structure that contains all the necessary information to perform implicit search, whereas `SearchResult` is the resulting found implicit instance. These two methods allow the analyzer plugin to access the full context of implicit search. In order to have access to the parameters of `ImplicitSearch`, this commit also makes private parameters accessible by converting them to `val`s. --- .../nsc/typechecker/AnalyzerPlugins.scala | 32 +++++++++++++++++++ .../tools/nsc/typechecker/Implicits.scala | 7 ++-- 2 files changed, 37 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index e9cce9509685..1ec9de99b4bb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -153,6 +153,26 @@ trait AnalyzerPlugins { self: Analyzer => * @param pt The return type of the enclosing method */ def pluginsTypedReturn(tpe: Type, typer: Typer, tree: Return, pt: Type): Type = tpe + + /** + * Access the search instance that will be used for the implicit search. + * + * The motivation of this method is to allow analyzer plugins to control when/where + * implicit search are triggered, and access their environment for data capturing purposes. + * + * @param search The instance that holds all the information about a given implicit search. + */ + def pluginsNotifyImplicitSearch(search: ImplicitSearch): Unit = () + + /** + * Access the implicit search result from Scalac's typechecker. + * + * The motivation of this method is to allow analyzer plugins to control when/where + * implicit search results are returned, and inspec them for data capturing purposes. + * + * @param result The result to a given implicit search. + */ + def pluginsNotifyImplicitSearchResult(result: SearchResult): Unit = () } /** @@ -349,6 +369,18 @@ trait AnalyzerPlugins { self: Analyzer => def accumulate = (tpe, p) => p.pluginsTypedReturn(tpe, typer, tree, pt) }) + /** @see AnalyzerPlugin.pluginsImplicitSearch */ + def pluginsNotifyImplicitSearch(search: ImplicitSearch): Unit = invoke(new CumulativeOp[Unit] { + def default = () + def accumulate = (_, p) => p.pluginsNotifyImplicitSearch(search) + }) + + /** @see AnalyzerPlugin.pluginsImplicitSearchResult */ + def pluginsNotifyImplicitSearchResult(result: SearchResult): Unit = invoke(new CumulativeOp[Unit] { + def default = () + def accumulate = (_, p) => p.pluginsNotifyImplicitSearchResult(result) + }) + /** A list of registered macro plugins */ private var macroPlugins: List[MacroPlugin] = Nil diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 616616c5a27b..b2e01aa203db 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -90,7 +90,10 @@ trait Implicits { if (shouldPrint) typingStack.printTyping(tree, "typing implicit: %s %s".format(tree, context.undetparamsString)) val implicitSearchContext = context.makeImplicit(reportAmbiguous) - val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos).bestImplicit + val search = new ImplicitSearch(tree, pt, isView, implicitSearchContext, pos) + pluginsNotifyImplicitSearch(search) + val result = search.bestImplicit + pluginsNotifyImplicitSearchResult(result) if (result.isFailure && saveAmbiguousDivergent && implicitSearchContext.reporter.hasErrors) implicitSearchContext.reporter.propagateImplicitTypeErrorsTo(context.reporter) @@ -362,7 +365,7 @@ trait Implicits { * (useful when we infer synthetic stuff and pass EmptyTree in the `tree` argument) * If it's set to NoPosition, then position-based services will use `tree.pos` */ - class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context, pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors { + class ImplicitSearch(val tree: Tree, val pt: Type, val isView: Boolean, val context0: Context, val pos0: Position = NoPosition) extends Typer(context0) with ImplicitsContextErrors { val searchId = implicitSearchId() private def typingLog(what: String, msg: => String) = { if (printingOk(tree)) From 33478bdc9792ee13baa8208e326278695b1bd4e4 Mon Sep 17 00:00:00 2001 From: Tomas Mikula Date: Tue, 12 Sep 2017 01:05:21 +0200 Subject: [PATCH 1068/2793] Higher-kinded type variable unification. Can cause ambiguous implicits, so is under the compiler flag -Xsource:2.13 Fixes scala/bug#10185 Fixes scala/bug#10195 Fixes scala/bug#10197 Fixes scala/bug#10213 Fixes scala/bug#10238 Fixes scala/bug#10372 Presents an alternative fix to scala/bug#6895. --- .../mima-filters/2.12.0.forwards.excludes | 3 +- .../scala/reflect/internal/Types.scala | 2 +- .../internal/settings/MutableSettings.scala | 1 + .../reflect/internal/tpe/TypeComparers.scala | 29 ++- .../scala/reflect/runtime/Settings.scala | 1 + test/files/neg/hk-typevar-unification.check | 22 +++ test/files/neg/hk-typevar-unification.flags | 1 + test/files/neg/hk-typevar-unification.scala | 18 ++ test/files/pos/patmat-hk.flags | 1 + test/files/pos/patmat-hk.scala | 13 ++ test/files/pos/t10185.flags | 1 + test/files/pos/t10185.scala | 10 + test/files/pos/t10195.flags | 1 + test/files/pos/t10195.scala | 11 ++ test/files/pos/t10195b.flags | 1 + test/files/pos/t10195b.scala | 19 ++ test/files/pos/t10197.flags | 1 + test/files/pos/t10197.scala | 38 ++++ test/files/pos/t10213.flags | 1 + test/files/pos/t10213.scala | 53 ++++++ test/files/pos/t10238.flags | 1 + test/files/pos/t10238.scala | 36 ++++ test/files/pos/t10372.flags | 1 + test/files/pos/t10372.scala | 16 ++ test/files/pos/t6895b-2.flags | 1 + test/files/pos/t6895b-2.scala | 39 ++++ test/files/run/hk-typevar-unification.check | 8 + test/files/run/hk-typevar-unification.flags | 1 + test/files/run/hk-typevar-unification.scala | 83 +++++++++ .../scala/reflect/internal/TypesTest.scala | 176 +++++++++++++++++- .../tools/nsc/settings/SettingsTest.scala | 1 + 31 files changed, 585 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/hk-typevar-unification.check create mode 100644 test/files/neg/hk-typevar-unification.flags create mode 100644 test/files/neg/hk-typevar-unification.scala create mode 100644 test/files/pos/patmat-hk.flags create mode 100644 test/files/pos/patmat-hk.scala create mode 100644 test/files/pos/t10185.flags create mode 100644 test/files/pos/t10185.scala create mode 100644 test/files/pos/t10195.flags create mode 100644 test/files/pos/t10195.scala create mode 100644 test/files/pos/t10195b.flags create mode 100644 test/files/pos/t10195b.scala create mode 100644 test/files/pos/t10197.flags create mode 100644 test/files/pos/t10197.scala create mode 100644 test/files/pos/t10213.flags create mode 100644 test/files/pos/t10213.scala create mode 100644 test/files/pos/t10238.flags create mode 100644 test/files/pos/t10238.scala create mode 100644 test/files/pos/t10372.flags create mode 100644 test/files/pos/t10372.scala create mode 100644 test/files/pos/t6895b-2.flags create mode 100644 test/files/pos/t6895b-2.scala create mode 100644 test/files/run/hk-typevar-unification.check create mode 100644 test/files/run/hk-typevar-unification.flags create mode 100644 test/files/run/hk-typevar-unification.scala diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index 0f4142213f96..d905f61dd561 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -13,4 +13,5 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$Laz ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.closeZipFile") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$LeakyEntry") -ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.exists") \ No newline at end of file +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.exists") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settings.isScala213") diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a4413e0d479b..80a268925390 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3325,7 +3325,7 @@ trait Types ) override def etaExpand: Type = ( if (!isHigherKinded) this - else logResult("Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor)))) + else logResult(s"Normalizing HK $this")(typeFun(params, applyArgs(params map (_.typeConstructor)))) ) override def typeSymbol = origin.typeSymbol diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index ab933ae61709..955c083295aa 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -61,6 +61,7 @@ abstract class MutableSettings extends AbsSettings { def isScala211: Boolean def isScala212: Boolean + private[scala] def isScala213: Boolean } object MutableSettings { diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 37d05c08a7c2..de4ca640590b 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -365,7 +365,32 @@ trait TypeComparers { // @assume tp1.isHigherKinded || tp2.isHigherKinded def isHKSubType(tp1: Type, tp2: Type, depth: Depth): Boolean = { - def isSub(ntp1: Type, ntp2: Type) = (ntp1.withoutAnnotations, ntp2.withoutAnnotations) match { + + def isSubHKTypeVar(tp1: Type, tp2: Type) = (tp1, tp2) match { + case (tv1 @ TypeVar(_, _), tv2 @ TypeVar(_, _)) => + reporter.warning(tv1.typeSymbol.pos, + sm"""|compiler bug: Unexpected code path: testing two type variables for subtype relation: + | ${tv1} <:< ${tv2} + |Please report bug at https://github.com/scala/bug/issues + """.trim) + false + case (tp1, tv2 @ TypeVar(_, _)) => + val ntp1 = tp1.normalize + (tv2.params corresponds ntp1.typeParams)(methodHigherOrderTypeParamsSubVariance) && + { tv2.addLoBound(ntp1); true } + case (tv1 @ TypeVar(_, _), tp2) => + val ntp2 = tp2.normalize + (ntp2.typeParams corresponds tv1.params)(methodHigherOrderTypeParamsSubVariance) && + { tv1.addHiBound(ntp2); true } + case _ => + false + } + + def isSub(tp1: Type, tp2: Type) = + settings.isScala213 && isSubHKTypeVar(tp1, tp2) || + isSub2(tp1.normalize, tp2.normalize) // @M! normalize reduces higher-kinded case to PolyType's + + def isSub2(ntp1: Type, ntp2: Type) = (ntp1, ntp2) match { case (TypeRef(_, AnyClass, _), _) => false // avoid some warnings when Nothing/Any are on the other side case (_, TypeRef(_, NothingClass, _)) => false case (pt1: PolyType, pt2: PolyType) => isPolySubType(pt1, pt2) // @assume both .isHigherKinded (both normalized to PolyType) @@ -381,7 +406,7 @@ trait TypeComparers { || (if (isNoArgStaticClassTypeRef(tp1) && isNoArgStaticClassTypeRef(tp2)) tp1.typeSymbolDirect.isNonBottomSubClass(tp2.typeSymbolDirect) // OPT faster than comparing eta-expanded types else - isSub(tp1.normalize, tp2.normalize) && annotationsConform(tp1, tp2) // @M! normalize reduces higher-kinded case to PolyType's + isSub(tp1.withoutAnnotations, tp2.withoutAnnotations) && annotationsConform(tp1, tp2) ) ) } diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 2d8bacd3b2e0..6b129f6ec51a 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -54,4 +54,5 @@ private[reflect] class Settings extends MutableSettings { val maxClassfileName = new IntSetting(255) def isScala211 = true def isScala212 = true + private[scala] def isScala213 = false } diff --git a/test/files/neg/hk-typevar-unification.check b/test/files/neg/hk-typevar-unification.check new file mode 100644 index 000000000000..96dfedda4eaf --- /dev/null +++ b/test/files/neg/hk-typevar-unification.check @@ -0,0 +1,22 @@ +hk-typevar-unification.scala:14: error: inferred kinds of the type arguments ([_ <: B]Foo[_]) do not conform to the expected kinds of the type parameters (type F). +[_ <: B]Foo[_]'s type parameters do not match type F's expected parameters: +type _ (in class Foo)'s bounds <: B are stricter than type _'s declared bounds >: Nothing <: Any + f(tcFoo) + ^ +hk-typevar-unification.scala:14: error: type mismatch; + found : TC[Foo] + required: TC[F] + f(tcFoo) + ^ +hk-typevar-unification.scala:17: error: inferred kinds of the type arguments ([_ <: B]Foo[_]) do not conform to the expected kinds of the type parameters (type F). +[_ <: B]Foo[_]'s type parameters do not match type F's expected parameters: +type _ (in class Foo) is invariant, but type _ is declared covariant +type _ (in class Foo)'s bounds <: B are stricter than type _'s declared bounds >: Nothing <: Any + g(tcFoo) + ^ +hk-typevar-unification.scala:17: error: type mismatch; + found : TC[Foo] + required: TC[F] + g(tcFoo) + ^ +four errors found diff --git a/test/files/neg/hk-typevar-unification.flags b/test/files/neg/hk-typevar-unification.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/neg/hk-typevar-unification.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/neg/hk-typevar-unification.scala b/test/files/neg/hk-typevar-unification.scala new file mode 100644 index 000000000000..abc22db48924 --- /dev/null +++ b/test/files/neg/hk-typevar-unification.scala @@ -0,0 +1,18 @@ +class A +class B +trait TC[F[_ <: A]] +class Foo[_ <: B] + +object Test { + + def f[F[ _]](tc: TC[F]): Unit = () + def g[F[+_]](tc: TC[F]): Unit = () + + val tcFoo: TC[Foo] = new TC[Foo] {} + + // incompatible bounds + f(tcFoo) + + // incompatible variance + g(tcFoo) +} diff --git a/test/files/pos/patmat-hk.flags b/test/files/pos/patmat-hk.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/patmat-hk.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/patmat-hk.scala b/test/files/pos/patmat-hk.scala new file mode 100644 index 000000000000..701a9e7aaf25 --- /dev/null +++ b/test/files/pos/patmat-hk.scala @@ -0,0 +1,13 @@ +case class Foo[F[_]]() + +case class APair[F[_], G[_], A](f: F[A], g: G[A]) + +object Test { + Foo[({ type L[a] = (a, Int) })#L]() match { + case Foo() => () + } + + APair[({ type L[a] = (Boolean, a) })#L, ({ type L[a] = a => Int })#L, String]((true, "two"), _.length) match { + case APair((b, s), f) => () + } +} diff --git a/test/files/pos/t10185.flags b/test/files/pos/t10185.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10185.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10185.scala b/test/files/pos/t10185.scala new file mode 100644 index 000000000000..28bc78a72068 --- /dev/null +++ b/test/files/pos/t10185.scala @@ -0,0 +1,10 @@ +sealed trait Foo[A, F[_ <: A]] +case class Bar[A, F[_ <: A]]() extends Foo[A, F] + +class F[S <: String] + +object Test { + def f(foo: Foo[String, F]): Unit = foo match { + case Bar() => () + } +} diff --git a/test/files/pos/t10195.flags b/test/files/pos/t10195.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10195.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10195.scala b/test/files/pos/t10195.scala new file mode 100644 index 000000000000..c0682c4c1d69 --- /dev/null +++ b/test/files/pos/t10195.scala @@ -0,0 +1,11 @@ +sealed trait Foo[F[_]] +case class Bar[F[_]]() extends Foo[F] + +object Test { + + val foo: Foo[({ type Out[X] = String })#Out] = ??? + + foo match { + case Bar() => + } +} diff --git a/test/files/pos/t10195b.flags b/test/files/pos/t10195b.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10195b.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10195b.scala b/test/files/pos/t10195b.scala new file mode 100644 index 000000000000..9c1eaeb8a70a --- /dev/null +++ b/test/files/pos/t10195b.scala @@ -0,0 +1,19 @@ +sealed trait Foo[F[_]] +case class Bar[F[_]]() extends Foo[F] + +trait TC[A, B] { + type F[X] = B +} + +object TC { + implicit val intInstance: TC[Int, String] = + new TC[Int, String] {} + + implicit class Ops[A, B](a: A)(implicit val tc: TC[A, B]) { + def getFoo: Foo[tc.F] = ??? + } + + 1.getFoo match { + case Bar() => + } +} diff --git a/test/files/pos/t10197.flags b/test/files/pos/t10197.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10197.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10197.scala b/test/files/pos/t10197.scala new file mode 100644 index 000000000000..54d7d6db20b7 --- /dev/null +++ b/test/files/pos/t10197.scala @@ -0,0 +1,38 @@ +import scala.language.higherKinds + +final case class Getter[S, A](get: S => A) + +final case class Wrap[F[_], A](value: F[A]) + +object Wrap { + // Helper to defer specifying second argument to Wrap. + // Basically a type lambda specialized for Wrap. + // Wr[F]#ap[A] =:= Wrap[F, A] + type Wr[F[_]] = { type ap[A] = Wrap[F, A] } + + implicit def unwrapper[F[_], A]: Getter[Wrap[F, A], F[A]] = + Getter(w => w.value) +} + +object Test { + import Wrap._ + + type Foo[A] = List[A] + type Bar[A] = String + + type WrapFoo1[A] = Wrap[Foo, A] + type WrapBar1[A] = Wrap[Bar, A] + + implicitly[Getter[WrapFoo1[Int], Foo[Int]]] + implicitly[Getter[WrapBar1[Int], Bar[Int]]] + + type WrapFoo2[A] = Wr[Foo]#ap[A] + type WrapBar2[A] = Wr[Bar]#ap[A] + + // here's evidence that the new types are the same as the old ones + implicitly[WrapFoo2[Int] =:= WrapFoo1[Int]] + implicitly[WrapBar2[Int] =:= WrapBar1[Int]] + + implicitly[Getter[WrapFoo2[Int], Foo[Int]]] + implicitly[Getter[WrapBar2[Int], Bar[Int]]] +} diff --git a/test/files/pos/t10213.flags b/test/files/pos/t10213.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10213.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10213.scala b/test/files/pos/t10213.scala new file mode 100644 index 000000000000..7f5a44197eef --- /dev/null +++ b/test/files/pos/t10213.scala @@ -0,0 +1,53 @@ +import scala.language.higherKinds + +final case class Coproduct[F[_], G[_], A](run: Either[F[A], G[A]]) + +object Coproduct { + + sealed trait Builder { + type Out[_] + } + + sealed trait :++:[F[_], G[_]] extends Builder { + type Out[A] = Coproduct[F, G, A] + } + + sealed trait :+:[F[_], B <: Builder] extends Builder { + type Out[A] = Coproduct[F, B#Out, A] + } +} + +trait Inject[F[_], H[_]] { + def inj[A](fa: F[A]): H[A] +} + +object Inject { + import Coproduct._ + + implicit def reflexiveInject[F[_]]: Inject[F, F] = + new Inject[F, F] { + def inj[A](fa: F[A]): F[A] = fa + } + + implicit def injectLeft[F[_], G[_]]: Inject[F, (F :++: G)#Out] = + new Inject[F, (F :++: G)#Out] { + def inj[A](fa: F[A]): Coproduct[F, G, A] = Coproduct(Left(fa)) + } + + implicit def injectRight[F[_], G[_], H[_]](implicit I: Inject[F, H]): Inject[F, (G :++: H)#Out] = + new Inject[F, (G :++: H)#Out] { + def inj[A](fa: F[A]): Coproduct[G, H , A] = Coproduct(Right(I.inj(fa))) + } +} + +object Test1 { + import Coproduct.{:++:, :+:} + + class Foo[A] + class Bar[A] + class Baz[A] + + implicitly[Inject[Baz, (Foo :+: Bar :++: Baz)#Out]] + + implicitly[Inject[Baz, ({ type Out[A] = Coproduct[Foo, ({ type Out1[a] = Coproduct[Bar, Baz, a] })#Out1, A] })#Out]] +} diff --git a/test/files/pos/t10238.flags b/test/files/pos/t10238.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10238.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10238.scala b/test/files/pos/t10238.scala new file mode 100644 index 000000000000..4fa06af7b5cb --- /dev/null +++ b/test/files/pos/t10238.scala @@ -0,0 +1,36 @@ +object Test { + + // Data types + + type Id[A] = A + + class MaybeT[F[_], A] + + type Maybe[A] = MaybeT[Id, A] + + type MaybeMaybe[A] = MaybeT[Maybe, A] + + + // Typeclass + + trait Monad[F[_]] + + + // Instances + + implicit val monadId: Monad[Id] = ??? + + implicit def monadMaybeT[F[_]: Monad]: Monad[({ type λ[A] = MaybeT[F, A] })#λ] = ??? + + implicit val monadOption: Monad[Option] = ??? + + + // Implicit search tests + + implicitly[Monad[Id]] + implicitly[Monad[({ type λ[A] = A })#λ]] + implicitly[Monad[Maybe]] + implicitly[Monad[({ type λ[A] = MaybeT[Id, A] })#λ]] + implicitly[Monad[MaybeMaybe]] + implicitly[Monad[({ type λ[A] = MaybeT[Maybe, A] })#λ]] +} diff --git a/test/files/pos/t10372.flags b/test/files/pos/t10372.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t10372.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t10372.scala b/test/files/pos/t10372.scala new file mode 100644 index 000000000000..9923457ebc57 --- /dev/null +++ b/test/files/pos/t10372.scala @@ -0,0 +1,16 @@ +import scala.language.higherKinds +import scala.language.implicitConversions + +object Test { + class Expected[T, Func[_]] + implicit def conv[T, Func[_]](i : Int) : Expected[T, Func] = ??? + type FuncId[T] = T + + object DoesNotCompile { + class Bla { + type Alias[T] = Expected[T, FuncId] + def bla[T](expected : Alias[T]) : Unit = {} + } + (new Bla).bla(2) + } +} diff --git a/test/files/pos/t6895b-2.flags b/test/files/pos/t6895b-2.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/pos/t6895b-2.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/pos/t6895b-2.scala b/test/files/pos/t6895b-2.scala new file mode 100644 index 000000000000..3be68cd3bf9a --- /dev/null +++ b/test/files/pos/t6895b-2.scala @@ -0,0 +1,39 @@ +trait Foo[F[_]] +trait Bar[F[_], A] + +trait Or[A, B] + +class Test { + implicit def orFoo[A]: Foo[({type L[X] = Or[A, X]})#L] = ??? + implicit def barFoo[F[_]](implicit f: Foo[F]): Foo[({type L[X] = Bar[F, X]})#L] = ??? + + // Now we can define a couple of type aliases: + type StringOr[X] = Or[String, X] + type BarStringOr[X] = Bar[StringOr, X] + + // ok + implicitly[Foo[BarStringOr]] + barFoo[StringOr](null) : Foo[BarStringOr] + barFoo(null) : Foo[BarStringOr] + + // nok + implicitly[Foo[({type L[X] = Bar[StringOr, X]})#L]] + // Let's write the application explicitly, and then + // compile with just this line enabled and -explaintypes. + barFoo(null) : Foo[({type L[X] = Bar[StringOr, X]})#L] + + // Foo[[X]Bar[F,X]] <: Foo[[X]Bar[[X]Or[String,X],X]]? + // Bar[[X]Or[String,X],X] <: Bar[F,X]? + // F[_] <: Or[String,_]? + // false + // false + // false + + // Note that the type annotation above is typechecked as + // Foo[[X]Bar[[X]Or[String,X],X]], ie the type alias `L` + // is eta expanded. + // + // This is done so that it does not escape its defining scope. + // However, one this is done, higher kinded inference + // no longer is able to unify F with `StringOr` (scala/bug#2712) +} diff --git a/test/files/run/hk-typevar-unification.check b/test/files/run/hk-typevar-unification.check new file mode 100644 index 000000000000..3b7cea967342 --- /dev/null +++ b/test/files/run/hk-typevar-unification.check @@ -0,0 +1,8 @@ +Some(1) +Some(1) +Some((hi,5)) +Some((hi,5)) +Some(X) +Some(X) +Some(X) +Some(X) diff --git a/test/files/run/hk-typevar-unification.flags b/test/files/run/hk-typevar-unification.flags new file mode 100644 index 000000000000..714bbf5125f3 --- /dev/null +++ b/test/files/run/hk-typevar-unification.flags @@ -0,0 +1 @@ +-Xsource:2.13 diff --git a/test/files/run/hk-typevar-unification.scala b/test/files/run/hk-typevar-unification.scala new file mode 100644 index 000000000000..a8d895f2f948 --- /dev/null +++ b/test/files/run/hk-typevar-unification.scala @@ -0,0 +1,83 @@ +import scala.language.higherKinds + +trait Forall[F[_]] { + def instantiate[A]: F[A] +} + +object Forall { + implicit class Ops[F[_]](f: Forall[F]) { + def apply[A]: F[A] = f.instantiate[A] + } +} + +trait Forall2[F[_, _]] { + def instantiate[A, B]: F[A, B] +} + +object Forall2 { + implicit class Ops[F[_, _]](f: Forall2[F]) { + def apply[A, B]: F[A, B] = f.instantiate[A, B] + } +} + +trait FlatMap[F[_]] { + def flatMap[A, B](fa: F[A])(f: A => F[B]): F[B] +} + +object FlatMap { + implicit val optionInstance: FlatMap[Option] = new FlatMap[Option] { + def flatMap[A, B](fa: Option[A])(f: A => Option[B]) = fa.flatMap(f) + } +} + +object Test extends App { + + // natural transformation + type ~>[F[_], G[_]] = Forall[({ type L[A] = F[A] => G[A] })#L] + + // binatural transformation + type ~~>[F[_, _], G[_, _]] = Forall2[({ type L[A, B] = F[A, B] => G[A, B] })#L] + + + type RightAction[G[_], F[_, _]] = Forall2[({ type L[A, B] = (G[A], F[A, B]) => G[B] })#L] + type LeftAction[G[_], F[_, _]] = Forall2[({ type L[A, B] = (F[A, B], G[B]) => G[A] })#L] + + + val headOpt = new (List ~> Option) { + def instantiate[A]: List[A] => Option[A] = _.headOption + } + + // tests that implicit Forall.Ops is found + println(headOpt.apply(List(1, 2, 3))) + println(headOpt[Int](List(1, 2, 3))) + + val someEntry = new (Map ~~> ({ type L[K, V] = Option[(K, V)] })#L) { + def instantiate[K, V]: Map[K, V] => Option[(K, V)] = _.headOption + } + + // tests that implicit Forall2.Ops is found + println(someEntry.apply(Map(("hi", 5)))) + println(someEntry[String, Int](Map(("hi", 5)))) + + def kleisliPostCompose[F[_], Z](implicit F: FlatMap[F]) = + new RightAction[({ type L[A] = Z => F[A] })#L, ({ type L[A, B] = A => F[B] })#L] { + def instantiate[A, B]: (Z => F[A], A => F[B]) => (Z => F[B]) = (f, g) => (z => F.flatMap(f(z))(g)) + } + + def kleisliPreCompose[F[_], C](implicit F: FlatMap[F]) = + new LeftAction[({ type L[B] = B => F[C] })#L, ({ type L[A, B] = A => F[B] })#L] { + def instantiate[A, B]: (A => F[B], B => F[C]) => (A => F[C]) = (f, g) => (a => F.flatMap(f(a))(g)) + } + + def parseInt(s: String): Option[Int] = Some(42) + def toChar(i: Int): Option[Char] = Some('X') + + val ra = kleisliPostCompose[Option, String] + val la = kleisliPreCompose[Option, Char] + + // tests that implicit Forall2.Ops is found + println( ra.apply(parseInt(_), toChar(_)).apply("") ) + println( ra[Int, Char](parseInt(_), toChar(_))("") ) + println( la.apply(parseInt(_), toChar(_))("") ) + println( la[String, Int](parseInt(_), toChar(_))("") ) +} diff --git a/test/junit/scala/reflect/internal/TypesTest.scala b/test/junit/scala/reflect/internal/TypesTest.scala index d02422c8a4e9..e70a5badc6ce 100644 --- a/test/junit/scala/reflect/internal/TypesTest.scala +++ b/test/junit/scala/reflect/internal/TypesTest.scala @@ -1,10 +1,11 @@ package scala.reflect.internal import org.junit.Assert._ -import org.junit.{Assert, Test} +import org.junit.{After, Assert, Before, Test} import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.mutable +import scala.tools.nsc.settings.ScalaVersion import scala.tools.nsc.symtab.SymbolTableForUnitTesting @RunWith(classOf[JUnit4]) @@ -139,4 +140,177 @@ class TypesTest { assert(ts.forall(_ <:< merged2)) assert(merged1 =:= merged2) } + + + + class Foo[A] + class Bar[+T, A] + class Baz { + def f[F[_]] = () + def g[G[_, _]] = () + } + + var storedXsource: ScalaVersion = null + @Before + def storeXsource: Unit = { + storedXsource = settings.source.value + } + @After + def restoreXsource: Unit = { + settings.source.value = storedXsource + } + + @Test + def testHigherKindedTypeVarUnification(): Unit = { + import rootMirror.EmptyPackageClass + import Flags._ + + val FooTpe = typeOf[Foo[Int]] match { + case TypeRef(pre, sym, _) => + sym.typeParams // doing it for the side effect + TypeRef(pre, sym, Nil) + } + val BarTpe = typeOf[Bar[Int, Int]] match { + case TypeRef(pre, sym, _) => + sym.typeParams // doing it for the side effect + TypeRef(pre, sym, Nil) + } + + // apply Foo to type arugment A + def Foo(A: Type) = FooTpe match { + case TypeRef(pre, sym, Nil) => TypeRef(pre, sym, A :: Nil) + } + + // apply Bar to type arguments A, B + def Bar(A: Type, B: Type) = BarTpe match { + case TypeRef(pre, sym, Nil) => TypeRef(pre, sym, A :: B :: Nil) + } + + val F0 = typeOf[Baz].member(TermName("f")).typeSignature.typeParams.head + val G0 = typeOf[Baz].member(TermName("g")).typeSignature.typeParams.head + + // since TypeVars are mutable, we will be creating fresh ones + def F() = TypeVar(F0) + def G() = TypeVar(G0) + + def polyType(f: TypeVar => Type, flags: Long = 0L): Type = { + val A = EmptyPackageClass.newTypeParameter(newTypeName("A"), newFlags = flags) + A.setInfo(TypeBounds.empty) + val A_ = TypeVar(A) + PolyType(A :: Nil, f(A_)) + } + + def coPolyType(f: TypeVar => Type): Type = + polyType(f, COVARIANT) + + def polyType2(f: (TypeVar, TypeVar) => Type): Type = { + val A = EmptyPackageClass.newTypeParameter(newTypeName("A")) + val B = EmptyPackageClass.newTypeParameter(newTypeName("B")) + A.setInfo(TypeBounds.empty) + B.setInfo(TypeBounds.empty) + val A_ = TypeVar(A) + val B_ = TypeVar(B) + PolyType(A :: B :: Nil, f(A_, B_)) + } + + val Any = typeOf[Any] + val Int = typeOf[Int] + + settings.source.value = ScalaVersion("2.13") + + // test that ?F unifies with Foo + assert(F() <:< FooTpe) + assert(FooTpe <:< F()) + assert(F() =:= FooTpe) + assert(FooTpe =:= F) + + // test that ?F unifies with [A]Foo[A] + assert(F() <:< polyType(A => Foo(A))) + assert(polyType(A => Foo(A)) <:< F()) + assert(F() =:= polyType(A => Foo(A))) + assert(polyType(A => Foo(A)) =:= F()) + + // test that ?F unifies with [A]Bar[Int, A] + assert(F() <:< polyType(A => Bar(Int, A))) + assert(polyType(A => Bar(Int, A)) <:< F()) + assert(F() =:= polyType(A => Bar(Int, A))) + assert(polyType(A => Bar(Int, A)) =:= F()) + + // test that ?F unifies with [A]Bar[A, Int] + assert(F() <:< polyType(A => Bar(A, Int))) + assert(polyType(A => Bar(A, Int)) <:< F()) + assert(F() =:= polyType(A => Bar(A, Int))) + assert(polyType(A => Bar(A, Int)) =:= F()) + + // test that ?F unifies with [+A]Bar[A, Int] + assert(F() <:< coPolyType(A => Bar(A, Int))) + assert(coPolyType(A => Bar(A, Int)) <:< F()) + assert(F() =:= coPolyType(A => Bar(A, Int))) + assert(coPolyType(A => Bar(A, Int)) =:= F()) + + // test that ?F unifies with [A]Foo[Foo[A]] + assert(F() <:< polyType(A => Foo(Foo(A)))) + assert(polyType(A => Foo(Foo(A))) <:< F()) + assert(F() =:= polyType(A => Foo(Foo(A)))) + assert(polyType(A => Foo(Foo(A))) =:= F()) + + // test that ?F unifies with [A]Foo[Bar[A, A]] + assert(F() <:< polyType(A => Foo(Bar(A, A)))) + assert(polyType(A => Foo(Bar(A, A))) <:< F()) + assert(F() =:= polyType(A => Foo(Bar(A, A)))) + assert(polyType(A => Foo(Bar(A, A))) =:= F()) + + // test that ?F unifies with [A]Bar[Foo[A], Foo[A]] + assert(F() <:< polyType(A => Bar(Foo(A), Foo(A)))) + assert(polyType(A => Bar(Foo(A), Foo(A))) <:< F()) + assert(F() =:= polyType(A => Bar(Foo(A), Foo(A)))) + assert(polyType(A => Bar(Foo(A), Foo(A))) =:= F()) + + // test that ?F unifies with [A]A + assert(F() <:< polyType(A => A)) + assert(polyType(A => A) <:< F()) + assert(F() =:= polyType(A => A)) + assert(polyType(A => A) =:= F()) + + // test that ?F unifies with [A]Int + assert(F() <:< polyType(A => Int)) + assert(polyType(A => Int) <:< F()) + assert(F() =:= polyType(A => Int)) + assert(polyType(A => Int) =:= F()) + + // test that ?F unifies with [A]Foo[Int] + assert(F() <:< polyType(A => Foo(Int))) + assert(polyType(A => Foo(Int)) <:< F()) + assert(F() =:= polyType(A => Foo(Int))) + assert(polyType(A => Foo(Int)) =:= F()) + + // test that ?G unifies with Bar + assert(G() <:< BarTpe) + assert(BarTpe <:< G()) + assert(G() =:= BarTpe) + assert(BarTpe =:= G()) + + // test that ?G unifies with [A, B]Bar[A, B] + assert(G() <:< polyType2((A, B) => Bar(A, B))) + assert(polyType2((A, B) => Bar(A, B)) <:< G()) + assert(G() =:= polyType2((A, B) => Bar(A, B))) + assert(polyType2((A, B) => Bar(A, B)) =:= G()) + + // test that ?G unifies with [A, B]Bar[B, A] + assert(G() <:< polyType2((A, B) => Bar(B, A))) + assert(polyType2((B, A) => Bar(A, B)) <:< G()) + assert(G() =:= polyType2((A, B) => Bar(B, A))) + assert(polyType2((B, A) => Bar(A, B)) =:= G()) + + // test that ?G unifies with [A, B]Bar[Bar[B, A], A] + assert(G() <:< polyType2((A, B) => Bar(Bar(B, A), A))) + assert(polyType2((A, B) => Bar(Bar(B, A), A)) <:< G()) + assert(G() =:= polyType2((A, B) => Bar(Bar(B, A), A))) + assert(polyType2((A, B) => Bar(Bar(B, A), A)) =:= G()) + + // test that [A]Bar[Int, A] <:< ?F <:< [A]Bar[Any, A] + F() match { case _F => + assert(polyType(A => Bar(Int, A)) <:< _F && _F <:< polyType(A => Bar(Any, A))) + } + } } diff --git a/test/junit/scala/tools/nsc/settings/SettingsTest.scala b/test/junit/scala/tools/nsc/settings/SettingsTest.scala index 24bfb3dcde5d..6a568012a653 100644 --- a/test/junit/scala/tools/nsc/settings/SettingsTest.scala +++ b/test/junit/scala/tools/nsc/settings/SettingsTest.scala @@ -176,6 +176,7 @@ class SettingsTest { check(expected = "2.11.0", "-Xsource:2.11") check(expected = "2.10", "-Xsource:2.10.0") check(expected = "2.12", "-Xsource:2.12") + check(expected = "2.13", "-Xsource:2.13") assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource"), _ == "-Xsource requires an argument, the syntax is -Xsource:") assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource", "2.11"), _ == "-Xsource requires an argument, the syntax is -Xsource:") assertThrows[IllegalArgumentException](check(expected = "2.11", "-Xsource:2.invalid"), _ contains "Bad version (2.invalid)") From 62e221a3caddd4bb2761514304b227d232e807f7 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 29 Sep 2017 14:15:06 +0200 Subject: [PATCH 1069/2793] Don't display hot counters if hot statistics are disabled This is purely a cosmetic change to make sure that these counters, which will always be zero or empty, are not displayed to users in case hot statistics are disabled. --- src/compiler/scala/tools/nsc/Global.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 359477460ab6..f7f7bd448ef1 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1583,9 +1583,12 @@ class Global(var currentSettings: Settings, var reporter: Reporter) if (!pclazz.isRoot) resetPackageClass(pclazz.owner) } + private val hotCounters = + List(statistics.retainedCount, statistics.retainedByType, statistics.nodeByType) private val parserStats = { - import statistics._ - Seq(treeNodeCount, nodeByType, retainedCount, retainedByType) + import statistics.treeNodeCount + if (settings.YhotStatisticsEnabled) treeNodeCount :: hotCounters + else List(treeNodeCount) } final def printStatisticsFor(phase: Phase) = { @@ -1602,7 +1605,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - val quants = if (phase.name == "parser") parserStats else statistics.allQuantities + val quants: Iterable[statistics.Quantity] = + if (phase.name == "parser") parserStats + else if (settings.YhotStatisticsEnabled) statistics.allQuantities + else statistics.allQuantities.filterNot(q => hotCounters.contains(q)) for (q <- quants if q.showAt(phase.name)) inform(q.line) } } // class Run From f357e9a69aea259bac3054102ada1451b41a327d Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 29 Sep 2017 15:22:16 +0200 Subject: [PATCH 1070/2793] Change name and description of `-Yhot-statistics-enabled` This is a pure UI change that makes it clear that `-Yhot-statistics-enabled` is a complement of `-Ystatistics`, not a replacement. Therefore, users need to use it together with `-Ystatistics`, otherwise `-Yhot-statistics-enabled` won't do anything. --- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index e687476a7ef6..de79ac93152b 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -384,7 +384,7 @@ trait ScalaSettings extends AbsScalaSettings } override def YstatisticsEnabled = Ystatistics.value.nonEmpty - val YhotStatistics = BooleanSetting("-Yhot-statistics", "Print hot compiler statistics for all relevant phases") + val YhotStatistics = BooleanSetting("-Yhot-statistics-enabled", s"Enable `${Ystatistics.name}` to print hot statistics.") override def YhotStatisticsEnabled = YhotStatistics.value val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") From 8fae21b96b3d1098f48dbc73a46fe5f33b7d690a Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 28 Sep 2017 16:01:19 -0700 Subject: [PATCH 1071/2793] Return in PartialFun relative to right enclosing method Where the right enclosing method is the lexically enclosing one, in the code as written by the user, not the one the body actually ends up being type checked in (`applyOrElse`). Since the current owner impacts more than just type checking `Return` trees, we adjust the currently logically enclosing method as we do for lazy getters (that adjustment happens in Context, but retronym pointed out we can do it closer to where it's needed in this case -- TODO: can we do the same for lazy vals). --- .../scala/tools/nsc/typechecker/Contexts.scala | 7 ++++++- .../scala/tools/nsc/typechecker/Typers.scala | 13 ++++++++++--- test/files/run/t10291.scala | 8 ++++++++ 3 files changed, 24 insertions(+), 4 deletions(-) create mode 100644 test/files/run/t10291.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 9c93ad2a1ed0..b0f66d185cca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -214,7 +214,12 @@ trait Contexts { self: Analyzer => /** Is this context in all modes in the given `mask`? */ def apply(mask: ContextMode): Boolean = contextMode.inAll(mask) - /** The next outer context whose tree is a method */ + /** The next (logical) outer context whose tree is a method. + * + * NOTE: this is the "logical" enclosing method, which may not be the actual enclosing method when we + * synthesize a nested method, such as for lazy val getters (scala/bug#8245) or the methods that + * implement a PartialFunction literal (scala/bug#10291). + */ var enclMethod: Context = _ /** Variance relative to enclosing class */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index e076f19f6bdc..6e1a118c974d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2649,6 +2649,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper CaseDef(Bind(nme.DEFAULT_CASE, Ident(nme.WILDCARD)), body) } + def synthMethodTyper(methodSym: MethodSymbol) = { + val ctx = context.makeNewScope(context.tree, methodSym) + // scala/bug#10291 make sure `Return`s are linked to the original enclosing method, not the one we're synthesizing + ctx.enclMethod = context.enclMethod + newTyper(ctx) + } + // `def applyOrElse[A1 <: $argTp, B1 >: $matchResTp](x: A1, default: A1 => B1): B1 = // ${`$selector match { $cases; case default$ => default(x) }` def applyOrElseMethodDef = { @@ -2665,7 +2672,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val paramSyms = List(x, default) methodSym setInfo genPolyType(List(A1, B1), MethodType(paramSyms, B1.tpe)) - val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) + val methodBodyTyper = synthMethodTyper(methodSym) if (!paramSynthetic) methodBodyTyper.context.scope enter x // First, type without the default case; only the cases provided @@ -2745,7 +2752,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val methodSym = anonClass.newMethod(nme.isDefinedAt, tree.pos.makeTransparent, FINAL) val paramSym = mkParam(methodSym) - val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it) + val methodBodyTyper = synthMethodTyper(methodSym) // should use the DefDef for the context's tree, but it doesn't exist yet (we need the typer we're creating to create it) if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym methodSym setInfo MethodType(List(paramSym), BooleanTpe) @@ -2763,7 +2770,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper methodSym setInfo MethodType(List(paramSym), AnyTpe) - val methodBodyTyper = newTyper(context.makeNewScope(context.tree, methodSym)) + val methodBodyTyper = synthMethodTyper(methodSym) if (!paramSynthetic) methodBodyTyper.context.scope enter paramSym val match_ = methodBodyTyper.typedMatch(selector(paramSym), cases, mode, resTp) diff --git a/test/files/run/t10291.scala b/test/files/run/t10291.scala new file mode 100644 index 000000000000..aaec772aeac5 --- /dev/null +++ b/test/files/run/t10291.scala @@ -0,0 +1,8 @@ +object Test { + def main(args: Array[String]): Unit = { + def partially: Any = List(1).collect { case _ => return "a" } + def totally: Any = List(1).map { case _ => return "a" } + assert( partially == "a" ) + assert( totally == "a" ) + } +} From 1c47b2ffad20b6f589255b3c935c8538fd714403 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 29 Sep 2017 18:14:56 +0200 Subject: [PATCH 1072/2793] Avoid disabling statistics This commit avoids disabling statistics to avoid the JVM to unstabilize all the time and to prevent misbehaviours in concurrent builds recycling the same classloader. The main problem here is that if one global has statistics enabled and the second one doesn't, our logic of `enabled` was setting the pseudo-static to false, preventing the correct recording of statistics in the global that does want to record them. Now, the logic to disable these pseudo-statics when `statistics = false` (which is the case almost always), has been removed. TL;DR: Once a global enables statistics, the rest has to pay the price of a 2/3% performance hit. There is no going back. --- src/compiler/scala/tools/nsc/Global.scala | 4 ++-- .../scala/reflect/internal/util/Statistics.scala | 11 ++--------- 2 files changed, 4 insertions(+), 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index f7f7bd448ef1..f834ec985156 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1227,7 +1227,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) statistics.initFromSettings(settings) // Report the overhead of statistics measurements per every run - if (StatisticsStatics.areSomeColdStatsEnabled) + if (statistics.areStatisticsLocallyEnabled) statistics.reportStatisticsOverhead(reporter) phase = first //parserPhase @@ -1440,7 +1440,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) warnDeprecatedAndConflictingSettings() globalPhase = fromPhase - val timePhases = StatisticsStatics.areSomeColdStatsEnabled || settings.verbose + val timePhases = statistics.areStatisticsLocallyEnabled val startTotal = if (timePhases) statistics.startTimer(totalCompileTime) else null while (globalPhase.hasNext && !reporter.hasErrors) { diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index dd1cdc755b77..6e09bbbb5e2d 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -268,27 +268,20 @@ quant) if (cond && !enabled) { StatisticsStatics.enableColdStats() areColdStatsLocallyEnabled = true - } else if (!cond && enabled) { - StatisticsStatics.disableColdStats() - areColdStatsLocallyEnabled = false } } /** Represents whether hot statistics can or cannot be enabled. */ @inline final def hotEnabled: Boolean = enabled && areHotStatsLocallyEnabled def hotEnabled_=(cond: Boolean) = { - if (cond && enabled && !hotEnabled) { + if (cond && enabled && !areHotStatsLocallyEnabled) { StatisticsStatics.enableHotStats() areHotStatsLocallyEnabled = true - } else if (!cond && enabled && hotEnabled) { - StatisticsStatics.disableHotStats() - areHotStatsLocallyEnabled = false } } /** Tells whether statistics should be definitely reported to the user for this `Global` instance. */ - @inline final def areStatisticsLocallyEnabled: Boolean = - areColdStatsLocallyEnabled || areHotStatsLocallyEnabled + @inline final def areStatisticsLocallyEnabled: Boolean = areColdStatsLocallyEnabled import scala.reflect.internal.Reporter /** Reports the overhead of measuring statistics via the nanoseconds variation. */ From 6de63ebe265369258ddde909ee44535b99160dd5 Mon Sep 17 00:00:00 2001 From: jvican Date: Mon, 25 Sep 2017 15:46:51 +0200 Subject: [PATCH 1073/2793] Make `completingStack` global --- .../scala/reflect/internal/pickling/UnPickler.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index f05dc8a39d08..2710bbca34b3 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -45,6 +45,12 @@ abstract class UnPickler { } } + /** Keep track of the symbols pending to be initialized. + * + * Useful for reporting on stub errors and cyclic errors. + */ + private val completingStack = new mutable.ArrayBuffer[Symbol](24) + class Scan(_bytes: Array[Byte], offset: Int, classRoot: ClassSymbol, moduleRoot: ModuleSymbol, filename: String) extends PickleBuffer(_bytes, offset, -1) { //println("unpickle " + classRoot + " and " + moduleRoot)//debug @@ -699,12 +705,6 @@ abstract class UnPickler { new TypeError(e.msg) } - /** Keep track of the symbols pending to be initialized. - * - * Useful for reporting on stub errors and cyclic errors. - */ - private var completingStack = new mutable.ArrayBuffer[Symbol](128) - /** A lazy type which when completed returns type at index `i`. */ private class LazyTypeRef(i: Int) extends LazyType with FlagAgnosticCompleter { private val definedAtRunId = currentRunId From 28300088401fd3262b5ae1a0681cfdf94d78be27 Mon Sep 17 00:00:00 2001 From: Cong Zhao Date: Sat, 30 Sep 2017 07:08:02 +0800 Subject: [PATCH 1074/2793] Avoid repr call in ArrayOps --- .../scala/collection/mutable/ArrayOps.scala | 17 +++--- .../mutable/ArrayOpsBenchmark.scala | 53 +++++++++++++++++++ 2 files changed, 63 insertions(+), 7 deletions(-) create mode 100644 test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index 0f83fd92c172..6c8f9815e3e1 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -42,12 +42,13 @@ sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomPara } override def slice(from: Int, until: Int): Array[T] = { + val reprVal = repr val lo = math.max(from, 0) - val hi = math.min(math.max(until, 0), repr.length) + val hi = math.min(math.max(until, 0), reprVal.length) val size = math.max(hi - lo, 0) val result = java.lang.reflect.Array.newInstance(elementClass, size) if (size > 0) { - Array.copy(repr, lo, result, 0, size) + Array.copy(reprVal, lo, result, 0, size) } result.asInstanceOf[Array[T]] } @@ -61,16 +62,18 @@ sealed trait ArrayOps[T] extends Any with ArrayLike[T, Array[T]] with CustomPara } def :+[B >: T: ClassTag](elem: B): Array[B] = { - val result = Array.ofDim[B](repr.length + 1) - Array.copy(repr, 0, result, 0, repr.length) - result(repr.length) = elem + val currentLength = repr.length + val result = Array.ofDim[B](currentLength + 1) + Array.copy(repr, 0, result, 0, currentLength) + result(currentLength) = elem result } def +:[B >: T: ClassTag](elem: B): Array[B] = { - val result = Array.ofDim[B](repr.length + 1) + val currentLength = repr.length + val result = Array.ofDim[B](currentLength + 1) result(0) = elem - Array.copy(repr, 0, result, 1, repr.length) + Array.copy(repr, 0, result, 1, currentLength) result } diff --git a/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala new file mode 100644 index 000000000000..fbc4a0c9596c --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/mutable/ArrayOpsBenchmark.scala @@ -0,0 +1,53 @@ +package scala.collection.mutable + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ArrayOpsBenchmark { + @Param(Array("1000000")) + var size: Int = _ + + val integers = (1 to size).toList + val strings = integers.map(_.toString) + + @Benchmark def appendInteger(bh: Blackhole): Unit = { + var arr = Array.empty[Int] + integers foreach { i => + arr = arr.:+(i) + } + bh.consume(arr) + } + + @Benchmark def appendString(bh: Blackhole): Unit = { + var arr = Array.empty[String] + strings foreach { i => + arr = arr.:+(i) + } + bh.consume(arr) + } + + @Benchmark def insertInteger(bh: Blackhole): Unit = { + var arr = Array.empty[Int] + integers foreach { i => + arr = arr.+:(i) + } + bh.consume(arr) + } + + @Benchmark def insertString(bh: Blackhole): Unit = { + var arr = Array.empty[String] + strings foreach { i => + arr = arr.+:(i) + } + bh.consume(arr) + } +} From fbc8abbacb5c44ef781642731f49ab26ecf0b41b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 29 Sep 2017 18:10:28 -0700 Subject: [PATCH 1075/2793] Revert "Add infer Product with Serializable linter flag" --- .../scala/tools/nsc/settings/Warnings.scala | 4 +- .../scala/tools/nsc/typechecker/Infer.scala | 38 ++++++------------- .../scala/tools/nsc/typechecker/Namers.scala | 19 +--------- .../scala/reflect/internal/Definitions.scala | 1 - .../reflect/runtime/JavaUniverseForce.scala | 1 - test/files/neg/warn-inferred-any.check | 14 +------ test/files/neg/warn-inferred-any.scala | 18 --------- test/files/neg/warn-inferred-pws.check | 15 -------- test/files/neg/warn-inferred-pws.flags | 1 - test/files/neg/warn-inferred-pws.scala | 28 -------------- 10 files changed, 14 insertions(+), 125 deletions(-) delete mode 100644 test/files/neg/warn-inferred-pws.check delete mode 100644 test/files/neg/warn-inferred-pws.flags delete mode 100644 test/files/neg/warn-inferred-pws.scala diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 40e12988f61a..0ff46e21b622 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -87,8 +87,7 @@ trait Warnings { val NullaryUnit = LintWarning("nullary-unit", "Warn when nullary methods return Unit.", true) val Inaccessible = LintWarning("inaccessible", "Warn about inaccessible types in method signatures.", true) val NullaryOverride = LintWarning("nullary-override", "Warn when non-nullary `def f()' overrides nullary `def f'.", true) - val InferAny = LintWarning("infer-any", "Warn when a type argument, variable definition or method definition is inferred to be `Any`.", true) - val InferPwS = LintWarning("infer-pws", "Warn when a type argument, variable definition, or method definition is inferred to be `Product with Serializable`.") + val InferAny = LintWarning("infer-any", "Warn when a type argument is inferred to be `Any`.", true) val MissingInterpolator = LintWarning("missing-interpolator", "A string literal appears to be missing an interpolator id.") val DocDetached = LintWarning("doc-detached", "A Scaladoc comment appears to be detached from its element.") val PrivateShadow = LintWarning("private-shadow", "A private field (or class parameter) shadows a superclass field.") @@ -112,7 +111,6 @@ trait Warnings { def warnInaccessible = lint contains Inaccessible def warnNullaryOverride = lint contains NullaryOverride def warnInferAny = lint contains InferAny - def warnInferPwS = lint contains InferPwS def warnMissingInterpolator = lint contains MissingInterpolator def warnDocDetached = lint contains DocDetached def warnPrivateShadow = lint contains PrivateShadow diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 48776fe96073..e766b1544223 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -551,37 +551,21 @@ trait Infer extends Checkable { } } val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes)) - def warnIfInferred(warn: Type => Boolean) = { - if (context.reportErrors && !fn.isEmpty) { - targs.withFilter(warn).foreach { targ => - reporter.warning(fn.pos, s"a type was inferred to be `$targ`; this may indicate a programming error.") - } - } - } - def canWarnAbout(explicitlyTyped: List[Type] => Boolean): Boolean = { - val loBounds = tparams map (_.info.bounds.lo) - val hasExplicitType = pt :: restpe :: formals ::: argtpes ::: loBounds exists (tp => explicitlyTyped(tp.dealiasWidenChain)) - !hasExplicitType - } // Can warn about inferring Any/AnyVal as long as they don't appear // explicitly anywhere amongst the formal, argument, result, or expected type. // ...or lower bound of a type param, since they're asking for it. - def canWarnAboutAny = canWarnAbout(_ exists (t => (t contains AnyClass) || (t contains AnyValClass))) - if (settings.warnInferAny && canWarnAboutAny) { - warnIfInferred { - _.typeSymbol match { - case AnyClass | AnyValClass => true - case _ => false - } - } + def canWarnAboutAny = { + val loBounds = tparams map (_.info.bounds.lo) + def containsAny(t: Type) = (t contains AnyClass) || (t contains AnyValClass) + val hasAny = pt :: restpe :: formals ::: argtpes ::: loBounds exists (_.dealiasWidenChain exists containsAny) + !hasAny } - // Ditto for Product with Serializable - def canWarnAboutPwS = canWarnAbout(tps => (tps exists (_ contains ProductRootClass)) && (tps exists (_ contains SerializableClass))) - if (settings.warnInferPwS && canWarnAboutPwS) { - warnIfInferred { - case RefinedType(ProductRootTpe :: SerializableTpe :: _, scope) if scope.isEmpty => true - case _ => false - } + if (settings.warnInferAny && context.reportErrors && !fn.isEmpty && canWarnAboutAny) { + targs.foreach(_.typeSymbol match { + case sym @ (AnyClass | AnyValClass) => + reporter.warning(fn.pos, s"a type was inferred to be `${sym.name}`; this may indicate a programming error.") + case _ => + }) } adjustTypeArgs(tparams, tvars, targs, restpe) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index efca7816bb69..78c9d2964e69 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1079,24 +1079,7 @@ trait Namers extends MethodSynthesis { val defnTpe = widenIfNecessary(tree.symbol, rhsTpe, pt) tree.tpt defineType defnTpe setPos tree.pos.focus - val tpe = tree.tpt.tpe - // if enabled, validate that the now inferred val or def type isn't PwS - if (settings.warnInferPwS && context.reportErrors) { - tpe match { - case RefinedType(ProductRootTpe :: SerializableTpe :: _, scope) if scope.isEmpty => - reporter.warning(tree.pos, s"a type was inferred to be `$tpe`; this may indicate a programming error") - case _ => - } - } - // if enabled, validate the now inferred type isn't Any or AnyVal - if (settings.warnInferAny && context.reportErrors) { - tpe match { - case AnyTpe | AnyValTpe => - reporter.warning(tree.pos, s"a type was inferred to be `$tpe`; this may indicate a programming error") - case _ => - } - } - tpe + tree.tpt.tpe } // owner is the class with the self type diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index c54cf3a88073..eeff6776b85c 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -712,7 +712,6 @@ trait Definitions extends api.StandardDefinitions { def tupleComponents(tp: Type) = tp.dealiasWiden.typeArgs lazy val ProductRootClass: ClassSymbol = requiredClass[scala.Product] - lazy val ProductRootTpe: Type = ProductRootClass.tpe def Product_productArity = getMemberMethod(ProductRootClass, nme.productArity) def Product_productElement = getMemberMethod(ProductRootClass, nme.productElement) def Product_iterator = getMemberMethod(ProductRootClass, nme.productIterator) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index ca74826d9fd4..e7e57d556c87 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -340,7 +340,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.AbstractFunctionClass definitions.MacroContextType definitions.ProductRootClass - definitions.ProductRootTpe definitions.Any_$eq$eq definitions.Any_$bang$eq definitions.Any_equals diff --git a/test/files/neg/warn-inferred-any.check b/test/files/neg/warn-inferred-any.check index 68bebcd09da5..2b321a83c99a 100644 --- a/test/files/neg/warn-inferred-any.check +++ b/test/files/neg/warn-inferred-any.check @@ -10,18 +10,6 @@ warn-inferred-any.scala:17: warning: a type was inferred to be `AnyVal`; this ma warn-inferred-any.scala:25: warning: a type was inferred to be `Any`; this may indicate a programming error. def za = f(1, "one") ^ -warn-inferred-any.scala:30: warning: a type was inferred to be `AnyVal`; this may indicate a programming error - def get(b: Boolean) = if (b) 42 else true // warn (AnyVal) - ^ -warn-inferred-any.scala:31: warning: a type was inferred to be `Any`; this may indicate a programming error - def got(b: Boolean) = if (b) 42 else "42" // warn (Any) - ^ -warn-inferred-any.scala:35: warning: a type was inferred to be `AnyVal`; this may indicate a programming error - val foo = if (true) 42 else false // warn (AnyVal) - ^ -warn-inferred-any.scala:36: warning: a type was inferred to be `Any`; this may indicate a programming error - val bar = if (true) 42 else "42" // warn (Any) - ^ error: No warnings can be incurred under -Xfatal-warnings. -8 warnings found +four warnings found one error found diff --git a/test/files/neg/warn-inferred-any.scala b/test/files/neg/warn-inferred-any.scala index 6ca6eb6200aa..693c33e7be06 100644 --- a/test/files/neg/warn-inferred-any.scala +++ b/test/files/neg/warn-inferred-any.scala @@ -25,21 +25,3 @@ trait Zs { def za = f(1, "one") def zu = g(1, "one") } - -trait DefAny { - def get(b: Boolean) = if (b) 42 else true // warn (AnyVal) - def got(b: Boolean) = if (b) 42 else "42" // warn (Any) -} - -trait ValAny { - val foo = if (true) 42 else false // warn (AnyVal) - val bar = if (true) 42 else "42" // warn (Any) -} - -// these should not warn due to explicit types -trait ExplicitAny { - def get(b: Boolean): AnyVal = if (b) 42 else true - def got(b: Boolean): Any = if (b) 42 else "42" - val foo: AnyVal = if (true) 42 else false - val bar: Any = if (true) 42 else "42" -} diff --git a/test/files/neg/warn-inferred-pws.check b/test/files/neg/warn-inferred-pws.check deleted file mode 100644 index a1da084e5317..000000000000 --- a/test/files/neg/warn-inferred-pws.check +++ /dev/null @@ -1,15 +0,0 @@ -warn-inferred-pws.scala:2: warning: a type was inferred to be `Product with Serializable`; this may indicate a programming error - def get(list: Boolean) = if (list) List(1, 2, 3) else (1, 2, 3) // warn - ^ -warn-inferred-pws.scala:6: warning: a type was inferred to be `Product with Serializable`; this may indicate a programming error - val foo = if (true) List(1, 2) else (1, 2) // warn - ^ -warn-inferred-pws.scala:11: warning: a type was inferred to be `Product with Serializable`; this may indicate a programming error. - val g = f((1, 2), List(1, 2)) // warn - ^ -warn-inferred-pws.scala:15: warning: a type was inferred to be `Product with Serializable`; this may indicate a programming error. - { List(List(1, 2)) contains ((1, 2)) } // warn - ^ -error: No warnings can be incurred under -Xfatal-warnings. -four warnings found -one error found diff --git a/test/files/neg/warn-inferred-pws.flags b/test/files/neg/warn-inferred-pws.flags deleted file mode 100644 index d310af0a5805..000000000000 --- a/test/files/neg/warn-inferred-pws.flags +++ /dev/null @@ -1 +0,0 @@ --Xfatal-warnings -Xlint:infer-pws diff --git a/test/files/neg/warn-inferred-pws.scala b/test/files/neg/warn-inferred-pws.scala deleted file mode 100644 index 8ff9d3501fb3..000000000000 --- a/test/files/neg/warn-inferred-pws.scala +++ /dev/null @@ -1,28 +0,0 @@ -trait DefPwS { - def get(list: Boolean) = if (list) List(1, 2, 3) else (1, 2, 3) // warn -} - -trait ValPwS { - val foo = if (true) List(1, 2) else (1, 2) // warn -} - -trait ParamPwS { - def f[A](as: A*) = 42 - val g = f((1, 2), List(1, 2)) // warn -} - -trait GenericTraitPwS[+A] { - { List(List(1, 2)) contains ((1, 2)) } // warn -} - -// these should not warn as they have explicit types -trait NoWarning { - def get(list: Boolean): Product with Serializable = - if (list) List(1, 2) else (1, 2) - lazy val foo: Product with Serializable = if (true) List(1, 2) else (1, 2) - lazy val bar: Any = if (true) List(1, 2) else (1, 2) - def f[A](as: A*) = 42 - lazy val baz = f[Product with Serializable]((1, 2), List(1, 2)) - def g[A >: Product with Serializable](as: A*) = 42 - lazy val biz = g((1, 2), List(1, 2)) -} From 097c047d3e8aca4f259c9a452bfab7c4319558dd Mon Sep 17 00:00:00 2001 From: Mike Date: Mon, 2 Oct 2017 21:06:18 -0500 Subject: [PATCH 1076/2793] Fix typo in OpenHashMap comments --- src/library/scala/collection/mutable/OpenHashMap.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index b2e9ee27b940..16e5866c4f01 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -79,7 +79,7 @@ extends AbstractMap[Key, Value] private var _size = 0 private var deleted = 0 - // Used for tracking inserts so that iterators can determine in concurrent modification has occurred. + // Used for tracking inserts so that iterators can determine if concurrent modification has occurred. private[this] var modCount = 0 override def size = _size From dfb497543469e3e7f49c1ee779ca269ea77edbe8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 Oct 2017 10:47:13 +1000 Subject: [PATCH 1077/2793] Only add SubstOnlyTreeMaker when dealing with an dep. typed unapply --- .../transform/patmat/MatchTranslation.scala | 24 ++++++++++++++----- 1 file changed, 18 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 1e204671b561..7a84f14942f8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -540,7 +540,7 @@ trait MatchTranslation { def treeMakers(patBinderOrCasted: Symbol, binderKnownNonNull: Boolean, pos: Position): List[TreeMaker] = { // the extractor call (applied to the binder bound by the flatMap corresponding // to the previous (i.e., enclosing/outer) pattern) - val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) + val (extractorApply, needsSubst) = spliceApply(pos, patBinderOrCasted) // can't simplify this when subPatBinders.isEmpty, since UnitTpe is definitely // wrong when isSeq, and resultInMonad should always be correct since it comes // directly from the extractor's result type @@ -553,7 +553,7 @@ trait MatchTranslation { subPatBinders.toSet // types may refer to the dummy symbol unapplySelector (in case of dependent method type for the unapply method) - SubstOnlyTreeMaker(unapplySelector, patBinderOrCasted) :: ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)( + val extractorTreeMaker = ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder)( subPatBinders, subPatRefs(binder), potentiallyMutableBinders, @@ -561,7 +561,11 @@ trait MatchTranslation { checkedLength, patBinderOrCasted, ignoredSubPatBinders - ) :: Nil + ) + if (needsSubst) + SubstOnlyTreeMaker(unapplySelector, patBinderOrCasted) :: extractorTreeMaker :: Nil + else + extractorTreeMaker :: Nil } override protected def seqTree(binder: Symbol): Tree = @@ -574,7 +578,8 @@ trait MatchTranslation { if (isSingle) REF(binder) :: Nil // special case for extractors else super.subPatRefs(binder) - protected def spliceApply(binder: Symbol): Tree = { + protected def spliceApply(pos: Position, binder: Symbol): (Tree, Boolean) = { + var needsSubst = false object splice extends Transformer { def binderRef(pos: Position): Tree = REF(binder) setPos pos @@ -582,7 +587,14 @@ trait MatchTranslation { // duplicated with the extractor Unapplied case Apply(x, List(i @ Ident(nme.SELECTOR_DUMMY))) => // in case the result type depended on the unapply's argument, plug in the new symbol - treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) modifyType(_.substSym(List(i.symbol), List(binder))) + val apply = treeCopy.Apply(t, x, binderRef(i.pos) :: Nil) + val tpe = apply.tpe + val substedTpe = tpe.substSym(List(i.symbol), List(binder)) + if (tpe ne substedTpe) { + needsSubst = true + apply.setType(substedTpe) + } + apply // scala/bug#7868 Account for numeric widening, e.g. .toInt case Apply(x, List(i @ (sel @ Select(Ident(nme.SELECTOR_DUMMY), name)))) => // not substituting `binder` for `i.symbol`: widening conversion implies the binder could not be used as a path @@ -591,7 +603,7 @@ trait MatchTranslation { super.transform(t) } } - splice transform unapplyAppliedToDummy + (atPos(pos)(splice transform unapplyAppliedToDummy), needsSubst) } } From 8c2bf41a56253a5d356038aece3357840f108597 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 3 Oct 2017 19:42:11 +1000 Subject: [PATCH 1078/2793] Reduce the overhead of deferred macro expansion on typechecking --- src/compiler/scala/tools/nsc/typechecker/Analyzer.scala | 2 ++ src/compiler/scala/tools/nsc/typechecker/Macros.scala | 4 +++- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 +++- 3 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 0f8e9eee2396..ccd414cc457d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -94,6 +94,8 @@ trait Analyzer extends AnyRef applyPhase(unit) undoLog.clear() } + // defensive measure in case the bookkeeping in deferred macro expansion is buggy + clearDelayed() if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) } def apply(unit: CompilationUnit) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index e9682d221a50..637864c92c85 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -853,10 +853,12 @@ trait Macros extends MacroRuntimes with Traces with Helpers { * 1) type vars (tpe.isInstanceOf[TypeVar]) // [Eugene] this check is disabled right now, because TypeVars seem to be created from undetparams anyways * 2) undetparams (sym.isTypeParameter && !sym.isSkolem) */ - var hasPendingMacroExpansions = false + var hasPendingMacroExpansions = false // JZ this is never reset to false. What is its purpose? Should it not be stored in Context? + def typerShouldExpandDeferredMacros: Boolean = hasPendingMacroExpansions && !delayed.isEmpty private val forced = perRunCaches.newWeakSet[Tree] private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]() private def isDelayed(expandee: Tree) = delayed contains expandee + def clearDelayed(): Unit = delayed.clear() private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] = if (forced(expandee)) scala.collection.mutable.Set[Int]() else delayed.getOrElse(expandee, { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 6e1a118c974d..95c58faed2db 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5596,7 +5596,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tree1.isEmpty) tree1 else { val result = adapt(tree1, mode, ptPlugins, tree) - if (hasPendingMacroExpansions) macroExpandAll(this, result) else result + if (typerShouldExpandDeferredMacros) { + macroExpandAll(this, result) + } else result } if (shouldPrint) From 120a721a32995b28c92bc3c78a1f668a38517ec9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 Oct 2017 17:52:04 +1000 Subject: [PATCH 1079/2793] Avoid intermetiate Set creation in pattern match analysis --- src/compiler/scala/tools/nsc/transform/patmat/Solving.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index b1eadd14e690..93b1c746af14 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -361,7 +361,7 @@ trait Solving extends Logic { case And(props) => // scala/bug#6942: // CNF(P1 /\ ... /\ PN) == CNF(P1) ++ CNF(...) ++ CNF(PN) - props.map(cnfFor).reduce(_ ++ _) + props.iterator.map(cnfFor).reduce(_ ++ _) case p => cnfFor(p) } From 66d594683a2c4b01cb2e810d14253a91c50f1681 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 4 Oct 2017 20:18:38 -0700 Subject: [PATCH 1080/2793] lengthen timeout on some Future tests these have repeatedly (though intermittently) failed on Jenkins, e.g. https://scala-ci.typesafe.com/job/scala-2.11.x-integrate-windows/795/ not *that* often, but just often enough to be annoying perhaps longer timeouts will make it happen less often --- test/files/jvm/future-spec/FutureTests.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/files/jvm/future-spec/FutureTests.scala b/test/files/jvm/future-spec/FutureTests.scala index efe9c59d7a87..dd487701b1fc 100644 --- a/test/files/jvm/future-spec/FutureTests.scala +++ b/test/files/jvm/future-spec/FutureTests.scala @@ -61,7 +61,7 @@ class FutureTests extends MinimalScalaTest { val waiting = Future { Thread.sleep(1000) } - Await.ready(waiting, 2000 millis) + Await.ready(waiting, 4000 millis) ms.size mustBe (4) ec.shutdownNow() @@ -95,7 +95,7 @@ class FutureTests extends MinimalScalaTest { val t = new InterruptedException() val f = Future(throw t)(ec) - Await.result(p.future, 2.seconds) mustBe t + Await.result(p.future, 4.seconds) mustBe t } } From b64ad85d1cfdfff29d0836a66736d6d2b0830c0e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 2 Oct 2017 10:06:55 +1000 Subject: [PATCH 1081/2793] Move compilation daemon portfile under `~/.scalac/` Store the compilation daemon's administrativia (port file, redirection) under `~/.scalac/`, instead of the less standard `/tmp/scala-devel/${USER:shared}/scalac-compile-server-port`. On creation, remove group- and other-permissions from these private files, ditto for the repl's history file. On Java 6 on Windows, opt in to compilation daemon using `-nc:false`. --- .../scala/tools/nsc/CompileServer.scala | 37 +++--- .../scala/tools/nsc/CompileSocket.scala | 71 ++++++----- .../tools/nsc/GenericRunnerSettings.scala | 10 +- src/compiler/scala/tools/nsc/Properties.scala | 5 + .../scala/tools/nsc/ScriptRunner.scala | 7 +- .../internal/util/OwnerOnlyChmod.scala | 110 ++++++++++++++++++ .../interpreter/jline/FileBackedHistory.scala | 32 +++-- 7 files changed, 213 insertions(+), 59 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index aa02957a6c82..748393236fa2 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -6,11 +6,13 @@ package scala.tools.nsc import java.io.PrintStream -import io.Directory -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.reflect.internal.util.{FakePos, Position} + +import scala.reflect.internal.util.FakePos +import scala.tools.nsc.Properties.scalacDir +import scala.tools.nsc.io.Directory +import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} +import scala.tools.nsc.settings.FscSettings import scala.tools.util.SocketServer -import settings.FscSettings /** * The server part of the fsc offline compiler. It awaits compilation @@ -33,7 +35,7 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { val MaxCharge = 0.8 private val runtime = Runtime.getRuntime() - import runtime.{ totalMemory, freeMemory, maxMemory } + import runtime.{freeMemory, maxMemory, totalMemory} /** Create a new compiler instance */ def newGlobal(settings: Settings, reporter: Reporter) = @@ -178,14 +180,15 @@ object CompileServer { execute(() => (), args) /** - * Used for internal testing. The callback is called upon - * server start, notifying the caller that the server is - * ready to run. WARNING: the callback runs in the - * server's thread, blocking the server from doing any work - * until the callback is finished. Callbacks should be kept - * simple and clients should not try to interact with the - * server while the callback is processing. - */ + * The server's main loop. + * + * `startupCallback` is used for internal testing; it's called upon server start, + * notifying the caller that the server is ready to run. + * + * WARNING: the callback runs in the server's thread, blocking the server from doing any work + * until the callback is finished. Callbacks should be kept simple and clients should not try to + * interact with the server while the callback is processing. + */ def execute(startupCallback : () => Unit, args: Array[String]) { val debug = args contains "-v" var port = 0 @@ -193,14 +196,14 @@ object CompileServer { val i = args.indexOf("-p") if (i >= 0 && args.length > i + 1) { scala.util.control.Exception.ignoring(classOf[NumberFormatException]) { - port = args(i + 1).toInt + port = args(i + 1).toInt } } - + // Create instance rather than extend to pass a port parameter. val server = new StandardCompileServer(port) - val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory() - + val redirectDir = server.compileSocket.mkDaemonDir("fsc_redirects") + if (debug) { server.echo("Starting CompileServer on port " + server.port) server.echo("Redirect dir is " + redirectDir) diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index 27a14141faee..27e11d12fa41 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -5,12 +5,16 @@ package scala.tools.nsc -import java.io.{ FileNotFoundException, PrintWriter, FileOutputStream } +import java.math.BigInteger import java.security.SecureRandom -import io.{ File, Path, Directory, Socket } -import scala.tools.util.CompileOutputCommon + +import scala.reflect.internal.util.OwnerOnlyChmod import scala.reflect.internal.util.StringOps.splitWhere import scala.sys.process._ +import scala.tools.nsc.Properties.scalacDir +import scala.tools.nsc.io.{File, Socket} +import scala.tools.util.CompileOutputCommon +import scala.util.control.NonFatal trait HasCompileSocket { def compileSocket: CompileSocket @@ -46,14 +50,11 @@ trait HasCompileSocket { class CompileSocket extends CompileOutputCommon { protected lazy val compileClient: StandardCompileClient = CompileClient def verbose = compileClient.verbose - + def verbose_=(v: Boolean) = compileClient.verbose = v + /* Fixes the port where to start the server, 0 yields some free port */ var fixPort = 0 - /** The prefix of the port identification file, which is followed - * by the port number. - */ - protected lazy val dirName = "scalac-compile-server-port" protected def cmdName = Properties.scalaCmd /** The vm part of the command to start a new scala compile server */ @@ -67,22 +68,10 @@ class CompileSocket extends CompileOutputCommon { /** The class name of the scala compile server */ protected val serverClass = "scala.tools.nsc.CompileServer" - protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) - - /** A temporary directory to use */ - val tmpDir = { - val udir = Option(Properties.userName) getOrElse "shared" - val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory() - - if (f.isDirectory && f.canWrite) { - info("[Temp directory: " + f + "]") - f - } - else fatal("Could not find a directory for temporary files") - } + protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) /* A directory holding port identification files */ - val portsDir = (tmpDir / dirName).createDirectory() + private lazy val portsDir = mkDaemonDir("fsc_port") /** The command which starts the compile server, given vm arguments. * @@ -104,7 +93,7 @@ class CompileSocket extends CompileOutputCommon { } /** The port identification file */ - def portFile(port: Int) = portsDir / File(port.toString) + def portFile(port: Int): File = portsDir / File(port.toString) /** Poll for a server port number; return -1 if none exists yet */ private def pollPort(): Int = if (fixPort > 0) { @@ -138,19 +127,19 @@ class CompileSocket extends CompileOutputCommon { } info("[Port number: " + port + "]") if (port < 0) - fatal("Could not connect to compilation daemon after " + attempts + " attempts.") + fatal(s"Could not connect to compilation daemon after $attempts attempts. To run without it, use `-nocompdaemon` or `-nc`.") port } /** Set the port number to which a scala compile server is connected */ - def setPort(port: Int) { + def setPort(port: Int): Unit = { val file = portFile(port) - val secret = new SecureRandom().nextInt.toString + val secretBytes = new Array[Byte](16) + new SecureRandom().nextBytes(secretBytes) + val secretDigits = new BigInteger(secretBytes).toString().getBytes("UTF-8") - try file writeAll secret catch { - case e @ (_: FileNotFoundException | _: SecurityException) => - fatal("Cannot create file: %s".format(file.path)) - } + try OwnerOnlyChmod().chmodAndWrite(file.jfile, secretDigits) + catch chmodFailHandler(s"Cannot create file: ${file}") } /** Delete the port number to which a scala compile server was connected */ @@ -196,7 +185,7 @@ class CompileSocket extends CompileOutputCommon { catch { case _: NumberFormatException => None } def getSocket(serverAdr: String): Option[Socket] = ( - for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield + for ((name, portStr) <- splitWhere(serverAdr, _ == ':', doDropIndex = true) ; port <- parseInt(portStr)) yield getSocket(name, port) ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr) @@ -205,7 +194,7 @@ class CompileSocket extends CompileOutputCommon { if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port)) sock } - + def getPassword(port: Int): String = { val ff = portFile(port) val f = ff.bufferedReader() @@ -223,6 +212,24 @@ class CompileSocket extends CompileOutputCommon { f.close() result } + + private def chmodFailHandler(msg: String): PartialFunction[Throwable, Unit] = { + case NonFatal(e) => + if (verbose) e.printStackTrace() + fatal(msg) + } + + def mkDaemonDir(name: String) = { + val dir = (scalacDir / name).createDirectory() + + if (dir.isDirectory && dir.canWrite) info(s"[Temp directory: $dir]") + else fatal(s"Could not create compilation daemon directory $dir") + + try OwnerOnlyChmod().chmod(dir.jfile) + catch chmodFailHandler(s"Failed to change permissions on $dir. The compilation daemon requires a secure directory; use -nc to disable the daemon.") + dir + } + } diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index d1f8db048bad..ebdfaad17b52 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -45,5 +45,13 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { val nc = BooleanSetting( "-nc", - "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" + "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" withPostSetHook((x: BooleanSetting) => {_useCompDaemon = !x.value }) + + private def defaultUseCompdaemon = { + // can't reliably lock down permissions on the portfile in this environment => disable by default. + !scala.util.Properties.isWin || scala.util.Properties.isJavaAtLeast("7") + } + private[this] var _useCompDaemon = defaultUseCompdaemon + + def useCompDaemon: Boolean = _useCompDaemon } diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index cb523edfe593..334158982bbb 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -5,6 +5,8 @@ package scala.tools.nsc +import scala.tools.nsc.io.Path + /** Loads `compiler.properties` from the jar archive file. */ object Properties extends scala.util.PropertiesTrait { @@ -28,4 +30,7 @@ object Properties extends scala.util.PropertiesTrait { // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" + + // Where we keep fsc's state (ports/redirection) + lazy val scalacDir = (Path(Properties.userHome) / ".scalac").createDirectory(force = false) } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index bf93ad30bc92..ff3c054d8f98 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -67,7 +67,10 @@ class ScriptRunner extends HasCompileSocket { val coreCompArgs = compSettings flatMap (_.unparse) val compArgs = coreCompArgs ++ List("-Xscript", scriptMain(settings), scriptFile) - CompileSocket getOrCreateSocket "" match { + // TODO: untangle this mess of top-level objects with their own little view of the mutable world of settings + compileSocket.verbose = settings.verbose.value + + compileSocket getOrCreateSocket "" match { case Some(sock) => compileOnServer(sock, compArgs) case _ => false } @@ -99,7 +102,7 @@ class ScriptRunner extends HasCompileSocket { settings.outdir.value = compiledPath.path - if (settings.nc) { + if (!settings.useCompDaemon) { /* Setting settings.script.value informs the compiler this is not a * self contained compilation unit. */ diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala new file mode 100644 index 000000000000..9ac125d90504 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -0,0 +1,110 @@ +/* NSC -- new Scala compiler + * Copyright 2017 LAMP/EPFL + * @author Martin Odersky + */ +package scala.reflect.internal.util + +import java.io.{File, FileOutputStream, IOException} + + +trait OwnerOnlyChmod { + /** Remove group/other permisisons for `file`, it if exists */ + def chmod(file: java.io.File): Unit + + /** Delete `file` if it exists, recreate it with no group/other permissions, and write `contents` */ + final def chmodAndWrite(file: File, contents: Array[Byte]): Unit = { + file.delete() + val fos = new FileOutputStream(file) + fos.close() + chmod(file) + val fos2 = new FileOutputStream(file) + try { + fos2.write(contents) + } finally { + fos2.close() + } + } + + // TODO: use appropriate NIO call instead of two-step exists?/create! + final def chmodOrCreateEmpty(file: File): Unit = + if (!file.exists()) chmodAndWrite(file, Array[Byte]()) else chmod(file) + +} + +object OwnerOnlyChmod { + def apply(): OwnerOnlyChmod = { + if (!util.Properties.isWin) Java6UnixChmod + else if (util.Properties.isJavaAtLeast("7")) new NioAclChmodReflective + else NoOpOwnerOnlyChmod + } +} + +object NoOpOwnerOnlyChmod extends OwnerOnlyChmod { + override def chmod(file: File): Unit = () +} + + +/** Adjust permissions with `File.{setReadable, setWritable}` */ +object Java6UnixChmod extends OwnerOnlyChmod { + + def chmod(file: File): Unit = if (file.exists()) { + def clearAndSetOwnerOnly(f: (Boolean, Boolean) => Boolean): Unit = { + def fail() = throw new IOException("Unable to modify permissions of " + file) + // attribute = false, ownerOwnly = false + if (!f(false, false)) fail() + // attribute = true, ownerOwnly = true + if (!f(true, true)) fail() + } + if (file.isDirectory) { + clearAndSetOwnerOnly(file.setExecutable) + } + clearAndSetOwnerOnly(file.setReadable) + clearAndSetOwnerOnly(file.setWritable) + } +} + + +object NioAclChmodReflective { + private class Reflectors { + val file_toPath = classOf[java.io.File].getMethod("toPath") + val files = Class.forName("java.nio.file.Files") + val path_class = Class.forName("java.nio.file.Path") + val getFileAttributeView = files.getMethod("getFileAttributeView", path_class, classOf[Class[_]], Class.forName("[Ljava.nio.file.LinkOption;")) + val linkOptionEmptyArray = java.lang.reflect.Array.newInstance(Class.forName("java.nio.file.LinkOption"), 0) + val aclFileAttributeView_class = Class.forName("java.nio.file.attribute.AclFileAttributeView") + val aclEntry_class = Class.forName("java.nio.file.attribute.AclEntry") + val aclEntryBuilder_class = Class.forName("java.nio.file.attribute.AclEntry$Builder") + val newBuilder = aclEntry_class.getMethod("newBuilder") + val aclEntryBuilder_build = aclEntryBuilder_class.getMethod("build") + val userPrinciple_class = Class.forName("java.nio.file.attribute.UserPrincipal") + val setPrincipal = aclEntryBuilder_class.getMethod("setPrincipal", userPrinciple_class) + val setPermissions = aclEntryBuilder_class.getMethod("setPermissions", Class.forName("[Ljava.nio.file.attribute.AclEntryPermission;")) + val aclEntryType_class = Class.forName("java.nio.file.attribute.AclEntryType") + val setType = aclEntryBuilder_class.getMethod("setType", aclEntryType_class) + val aclEntryPermission_class = Class.forName("java.nio.file.attribute.AclEntryPermission") + val aclEntryPermissionValues = aclEntryPermission_class.getDeclaredMethod("values") + val aclEntryType_ALLOW = aclEntryType_class.getDeclaredField("ALLOW") + } + private val reflectors = try { new Reflectors } catch { case ex: Throwable => null } +} + +/** Reflective version of `NioAclChmod` */ +final class NioAclChmodReflective extends OwnerOnlyChmod { + import NioAclChmodReflective.reflectors._ + def chmod(file: java.io.File): Unit = { + val path = file_toPath.invoke(file) + val view = getFileAttributeView.invoke(null, path, aclFileAttributeView_class, linkOptionEmptyArray) + val setAcl = aclFileAttributeView_class.getMethod("setAcl", classOf[java.util.List[_]]) + val getOwner = aclFileAttributeView_class.getMethod("getOwner") + val owner = getOwner.invoke(view) + setAcl.invoke(view, acls(owner)) + } + + private def acls(owner: Object) = { + val builder = newBuilder.invoke(null) + setPrincipal.invoke(builder, owner) + setPermissions.invoke(builder, aclEntryPermissionValues.invoke(null)) + setType.invoke(builder, aclEntryType_ALLOW.get(null)) + java.util.Collections.singletonList(aclEntryBuilder_build.invoke(builder)) + } +} diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala index 53a06ca97258..9f7b5e46bccf 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala @@ -8,15 +8,37 @@ package scala.tools.nsc.interpreter.jline import _root_.jline.console.history.PersistentHistory import scala.tools.nsc.interpreter -import scala.reflect.io.{ File, Path } -import scala.tools.nsc.Properties.{ propOrNone, userHome } +import scala.reflect.io.{File, Path} +import scala.tools.nsc.Properties.{propOrNone, userHome} +import scala.reflect.internal.util.OwnerOnlyChmod +import scala.util.control.NonFatal /** TODO: file locking. */ trait FileBackedHistory extends JLineHistory with PersistentHistory { def maxSize: Int - protected lazy val historyFile: File = FileBackedHistory.defaultFile + // For a history file in the standard location, always try to restrict permission, + // creating an empty file if none exists. + // For a user-specified location, only lock down permissions on if we're the ones + // creating it, otherwise responsibility for permissions is up to the caller. + protected lazy val historyFile: File = File { + propOrNone("scala.shell.histfile").map(Path.apply) match { + case Some(p) => if (!p.exists) secure(p) else p + case None => secure(Path(userHome) / FileBackedHistory.defaultFileName) + } + } + + private def secure(p: Path): Path = { + try OwnerOnlyChmod().chmodOrCreateEmpty(p.jfile) + catch { case NonFatal(e) => + if (interpreter.isReplDebug) e.printStackTrace() + interpreter.replinfo(s"Warning: history file ${p}'s permissions could not be restricted to owner-only.") + } + + p + } + private var isPersistent = true locally { @@ -86,8 +108,4 @@ object FileBackedHistory { // val ContinuationNL: String = Array('\003', '\n').mkString final val defaultFileName = ".scala_history" - - def defaultFile: File = File( - propOrNone("scala.shell.histfile") map (Path.apply) getOrElse (Path(userHome) / defaultFileName) - ) } From 48afc40f61d45b5d5608c7f415111f9034f0bbfc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Sep 2017 14:00:06 +1000 Subject: [PATCH 1082/2793] [backport] Fix runtime refletion of empty package members under Java 9. We used to rely on `cls.getPackage == null` for `cls` defined in the empty package. Under Java 9, we actually get the empty package back from that call. This commit ensures we use the one true empty package symbol on either Java 8 or 9. (cherry picked from commit b81bc778822de33e73fda59d5014baa1292856d4) --- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 2 +- test/files/run/sd304.check | 1 + test/files/run/sd304/ReflectTest.scala | 8 ++++++++ test/files/run/sd304/Test.java | 5 +++++ 4 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 test/files/run/sd304.check create mode 100644 test/files/run/sd304/ReflectTest.scala create mode 100644 test/files/run/sd304/Test.java diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 50442519f2ca..7cfb5434db1a 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -937,7 +937,7 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive * The Scala package with given fully qualified name. Unlike `packageNameToScala`, * this one bypasses the cache. */ - private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = gilSynchronized { + private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = if (fullname == "") EmptyPackage else gilSynchronized { val split = fullname lastIndexOf '.' val ownerModule: ModuleSymbol = if (split > 0) packageNameToScala(fullname take split) else this.RootPackage diff --git a/test/files/run/sd304.check b/test/files/run/sd304.check new file mode 100644 index 000000000000..be7795442a7a --- /dev/null +++ b/test/files/run/sd304.check @@ -0,0 +1 @@ +class Test diff --git a/test/files/run/sd304/ReflectTest.scala b/test/files/run/sd304/ReflectTest.scala new file mode 100644 index 000000000000..7685227b7de6 --- /dev/null +++ b/test/files/run/sd304/ReflectTest.scala @@ -0,0 +1,8 @@ +package p1 + +class ReflectTest { + def test(a: AnyRef): Unit = { + val mirror = reflect.runtime.universe.runtimeMirror(a.getClass.getClassLoader) + println(mirror.reflect(a).symbol) + } +} diff --git a/test/files/run/sd304/Test.java b/test/files/run/sd304/Test.java new file mode 100644 index 000000000000..97d523f8fb4b --- /dev/null +++ b/test/files/run/sd304/Test.java @@ -0,0 +1,5 @@ +public class Test { + public static void main(String[] args) { + new p1.ReflectTest().test(new Test()); + } +} From e43d48c8657eb6eca17bf2f054f565a0abef7632 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 25 Sep 2017 14:00:06 +1000 Subject: [PATCH 1083/2793] [backport] Fix runtime refletion of empty package members under Java 9. We used to rely on `cls.getPackage == null` for `cls` defined in the empty package. Under Java 9, we actually get the empty package back from that call. This commit ensures we use the one true empty package symbol on either Java 8 or 9. (cherry picked from commit b81bc778822de33e73fda59d5014baa1292856d4) --- src/reflect/scala/reflect/runtime/JavaMirrors.scala | 2 +- test/files/run/sd304.check | 1 + test/files/run/sd304/ReflectTest.scala | 8 ++++++++ test/files/run/sd304/Test.java | 5 +++++ 4 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 test/files/run/sd304.check create mode 100644 test/files/run/sd304/ReflectTest.scala create mode 100644 test/files/run/sd304/Test.java diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 9c0781ca06fc..ff19dcd6408d 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -896,7 +896,7 @@ private[reflect] trait JavaMirrors extends internal.SymbolTable with api.JavaUni * The Scala package with given fully qualified name. Unlike `packageNameToScala`, * this one bypasses the cache. */ - private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = { + private[JavaMirrors] def makeScalaPackage(fullname: String): ModuleSymbol = if (fullname == "") EmptyPackage else { val split = fullname lastIndexOf '.' val ownerModule: ModuleSymbol = if (split > 0) packageNameToScala(fullname take split) else this.RootPackage diff --git a/test/files/run/sd304.check b/test/files/run/sd304.check new file mode 100644 index 000000000000..be7795442a7a --- /dev/null +++ b/test/files/run/sd304.check @@ -0,0 +1 @@ +class Test diff --git a/test/files/run/sd304/ReflectTest.scala b/test/files/run/sd304/ReflectTest.scala new file mode 100644 index 000000000000..7685227b7de6 --- /dev/null +++ b/test/files/run/sd304/ReflectTest.scala @@ -0,0 +1,8 @@ +package p1 + +class ReflectTest { + def test(a: AnyRef): Unit = { + val mirror = reflect.runtime.universe.runtimeMirror(a.getClass.getClassLoader) + println(mirror.reflect(a).symbol) + } +} diff --git a/test/files/run/sd304/Test.java b/test/files/run/sd304/Test.java new file mode 100644 index 000000000000..97d523f8fb4b --- /dev/null +++ b/test/files/run/sd304/Test.java @@ -0,0 +1,5 @@ +public class Test { + public static void main(String[] args) { + new p1.ReflectTest().test(new Test()); + } +} From aa133c9e9dd73bc82eb2f9cce0e3e02c91542090 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 5 Oct 2017 17:13:04 -0700 Subject: [PATCH 1084/2793] Address SethTisue's review feedback --- .../scala/tools/nsc/CompileSocket.scala | 11 +++-- .../tools/nsc/GenericRunnerSettings.scala | 7 +-- .../scala/tools/nsc/ScriptRunner.scala | 9 ++++ .../internal/util/OwnerOnlyChmod.scala | 47 +++++++++---------- .../interpreter/jline/FileBackedHistory.scala | 2 +- 5 files changed, 39 insertions(+), 37 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index 27e11d12fa41..63fcc09c8f06 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -8,6 +8,7 @@ package scala.tools.nsc import java.math.BigInteger import java.security.SecureRandom +import scala.io.Codec import scala.reflect.internal.util.OwnerOnlyChmod import scala.reflect.internal.util.StringOps.splitWhere import scala.sys.process._ @@ -133,10 +134,10 @@ class CompileSocket extends CompileOutputCommon { /** Set the port number to which a scala compile server is connected */ def setPort(port: Int): Unit = { - val file = portFile(port) - val secretBytes = new Array[Byte](16) - new SecureRandom().nextBytes(secretBytes) - val secretDigits = new BigInteger(secretBytes).toString().getBytes("UTF-8") + val file = portFile(port) + // 128 bits of delicious randomness, suitable for printing with println over a socket, + // and storage in a file -- see getPassword + val secretDigits = new BigInteger(128, new SecureRandom()).toString.getBytes("UTF-8") try OwnerOnlyChmod().chmodAndWrite(file.jfile, secretDigits) catch chmodFailHandler(s"Cannot create file: ${file}") @@ -197,7 +198,7 @@ class CompileSocket extends CompileOutputCommon { def getPassword(port: Int): String = { val ff = portFile(port) - val f = ff.bufferedReader() + val f = ff.bufferedReader(Codec.UTF8) // allow some time for the server to start up def check = { diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index ebdfaad17b52..d33f5530b9ec 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -47,11 +47,6 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { "-nc", "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" withPostSetHook((x: BooleanSetting) => {_useCompDaemon = !x.value }) - private def defaultUseCompdaemon = { - // can't reliably lock down permissions on the portfile in this environment => disable by default. - !scala.util.Properties.isWin || scala.util.Properties.isJavaAtLeast("7") - } - private[this] var _useCompDaemon = defaultUseCompdaemon - + private[this] var _useCompDaemon = true def useCompDaemon: Boolean = _useCompDaemon } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index ff3c054d8f98..1d0a71036c57 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -102,6 +102,12 @@ class ScriptRunner extends HasCompileSocket { settings.outdir.value = compiledPath.path + // can't reliably lock down permissions on the portfile in this environment => disable by default. + // not the cleanest to do this here, but I don't see where else to decide this and emit the warning below + val cantLockdown = !settings.nc.isSetByUser && scala.util.Properties.isWin && !scala.util.Properties.isJavaAtLeast("7") + + if (cantLockdown) settings.nc.value = true + if (!settings.useCompDaemon) { /* Setting settings.script.value informs the compiler this is not a * self contained compilation unit. @@ -110,6 +116,9 @@ class ScriptRunner extends HasCompileSocket { val reporter = new ConsoleReporter(settings) val compiler = newGlobal(settings, reporter) + if (cantLockdown) + reporter.echo("[info] The compilation daemon is disabled by default on this platform. To force its usage, use `-nocompdaemon:false`.") + new compiler.Run compile List(scriptFile) if (reporter.hasErrors) None else Some(compiledPath) } diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala index 9ac125d90504..c0da65db3873 100644 --- a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -8,7 +8,7 @@ import java.io.{File, FileOutputStream, IOException} trait OwnerOnlyChmod { - /** Remove group/other permisisons for `file`, it if exists */ + /** Remove group/other permissions for `file`, it if exists */ def chmod(file: java.io.File): Unit /** Delete `file` if it exists, recreate it with no group/other permissions, and write `contents` */ @@ -50,9 +50,9 @@ object Java6UnixChmod extends OwnerOnlyChmod { def chmod(file: File): Unit = if (file.exists()) { def clearAndSetOwnerOnly(f: (Boolean, Boolean) => Boolean): Unit = { def fail() = throw new IOException("Unable to modify permissions of " + file) - // attribute = false, ownerOwnly = false + // attribute = false, ownerOnly = false if (!f(false, false)) fail() - // attribute = true, ownerOwnly = true + // attribute = true, ownerOnly = true if (!f(true, true)) fail() } if (file.isDirectory) { @@ -65,32 +65,29 @@ object Java6UnixChmod extends OwnerOnlyChmod { object NioAclChmodReflective { - private class Reflectors { - val file_toPath = classOf[java.io.File].getMethod("toPath") - val files = Class.forName("java.nio.file.Files") - val path_class = Class.forName("java.nio.file.Path") - val getFileAttributeView = files.getMethod("getFileAttributeView", path_class, classOf[Class[_]], Class.forName("[Ljava.nio.file.LinkOption;")) - val linkOptionEmptyArray = java.lang.reflect.Array.newInstance(Class.forName("java.nio.file.LinkOption"), 0) - val aclFileAttributeView_class = Class.forName("java.nio.file.attribute.AclFileAttributeView") - val aclEntry_class = Class.forName("java.nio.file.attribute.AclEntry") - val aclEntryBuilder_class = Class.forName("java.nio.file.attribute.AclEntry$Builder") - val newBuilder = aclEntry_class.getMethod("newBuilder") - val aclEntryBuilder_build = aclEntryBuilder_class.getMethod("build") - val userPrinciple_class = Class.forName("java.nio.file.attribute.UserPrincipal") - val setPrincipal = aclEntryBuilder_class.getMethod("setPrincipal", userPrinciple_class) - val setPermissions = aclEntryBuilder_class.getMethod("setPermissions", Class.forName("[Ljava.nio.file.attribute.AclEntryPermission;")) - val aclEntryType_class = Class.forName("java.nio.file.attribute.AclEntryType") - val setType = aclEntryBuilder_class.getMethod("setType", aclEntryType_class) - val aclEntryPermission_class = Class.forName("java.nio.file.attribute.AclEntryPermission") - val aclEntryPermissionValues = aclEntryPermission_class.getDeclaredMethod("values") - val aclEntryType_ALLOW = aclEntryType_class.getDeclaredField("ALLOW") - } - private val reflectors = try { new Reflectors } catch { case ex: Throwable => null } + val file_toPath = classOf[java.io.File].getMethod("toPath") + val files = Class.forName("java.nio.file.Files") + val path_class = Class.forName("java.nio.file.Path") + val getFileAttributeView = files.getMethod("getFileAttributeView", path_class, classOf[Class[_]], Class.forName("[Ljava.nio.file.LinkOption;")) + val linkOptionEmptyArray = java.lang.reflect.Array.newInstance(Class.forName("java.nio.file.LinkOption"), 0) + val aclFileAttributeView_class = Class.forName("java.nio.file.attribute.AclFileAttributeView") + val aclEntry_class = Class.forName("java.nio.file.attribute.AclEntry") + val aclEntryBuilder_class = Class.forName("java.nio.file.attribute.AclEntry$Builder") + val newBuilder = aclEntry_class.getMethod("newBuilder") + val aclEntryBuilder_build = aclEntryBuilder_class.getMethod("build") + val userPrinciple_class = Class.forName("java.nio.file.attribute.UserPrincipal") + val setPrincipal = aclEntryBuilder_class.getMethod("setPrincipal", userPrinciple_class) + val setPermissions = aclEntryBuilder_class.getMethod("setPermissions", Class.forName("[Ljava.nio.file.attribute.AclEntryPermission;")) + val aclEntryType_class = Class.forName("java.nio.file.attribute.AclEntryType") + val setType = aclEntryBuilder_class.getMethod("setType", aclEntryType_class) + val aclEntryPermission_class = Class.forName("java.nio.file.attribute.AclEntryPermission") + val aclEntryPermissionValues = aclEntryPermission_class.getDeclaredMethod("values") + val aclEntryType_ALLOW = aclEntryType_class.getDeclaredField("ALLOW") } /** Reflective version of `NioAclChmod` */ final class NioAclChmodReflective extends OwnerOnlyChmod { - import NioAclChmodReflective.reflectors._ + import NioAclChmodReflective._ def chmod(file: java.io.File): Unit = { val path = file_toPath.invoke(file) val view = getFileAttributeView.invoke(null, path, aclFileAttributeView_class, linkOptionEmptyArray) diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala index 9f7b5e46bccf..b215b26f4ec1 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala @@ -20,7 +20,7 @@ trait FileBackedHistory extends JLineHistory with PersistentHistory { // For a history file in the standard location, always try to restrict permission, // creating an empty file if none exists. - // For a user-specified location, only lock down permissions on if we're the ones + // For a user-specified location, only lock down permissions if we're the ones // creating it, otherwise responsibility for permissions is up to the caller. protected lazy val historyFile: File = File { propOrNone("scala.shell.histfile").map(Path.apply) match { From cf2d62936bb147c8fa1693cabd0e3659e8dfb9b2 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Fri, 6 Oct 2017 10:57:20 +0100 Subject: [PATCH 1085/2793] Ensure display order of note tags and others matches source definition Fixes scala/bug#10325 --- .../nsc/doc/base/CommentFactoryBase.scala | 4 +-- .../tools/nsc/scaladoc/HtmlFactoryTest.scala | 2 +- test/scaladoc/resources/t10325.scala | 36 +++++++++++++++++++ test/scaladoc/run/t10325.check | 16 +++++++++ test/scaladoc/run/t10325.scala | 33 +++++++++++++++++ 5 files changed, 88 insertions(+), 3 deletions(-) create mode 100644 test/scaladoc/resources/t10325.scala create mode 100644 test/scaladoc/run/t10325.check create mode 100644 test/scaladoc/run/t10325.scala diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index a5d3cbca5ab5..b1bb842453c3 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -1,5 +1,5 @@ /* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL + * Copyright 2007-2017 LAMP/EPFL * @author Manohar Jonnalagedda */ @@ -343,7 +343,7 @@ trait CommentFactoryBase { this: MemberLookupBase => } def allTags(key: SimpleTagKey): List[Body] = - (bodyTags remove key).getOrElse(Nil).filterNot(_.blocks.isEmpty) + (bodyTags remove key).getOrElse(Nil).filterNot(_.blocks.isEmpty).reverse def allSymsOneTag(key: TagKey, filterEmpty: Boolean = true): Map[String, Body] = { val keys: Seq[SymbolTagKey] = diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala index 13aa76415803..91a38084c92a 100644 --- a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala +++ b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala @@ -770,7 +770,7 @@ object HtmlFactoryTest extends Properties("HtmlFactory") { property("scala/bug#9599 Multiple @todo formatted with comma on separate line") = { createTemplates("t9599.scala")("X.html") match { - case node: scala.xml.Node => node.text.contains("todo3todo2todo1") + case node: scala.xml.Node => node.text.contains("todo1todo2todo3") case _ => false } } diff --git a/test/scaladoc/resources/t10325.scala b/test/scaladoc/resources/t10325.scala new file mode 100644 index 000000000000..e610a82f7aff --- /dev/null +++ b/test/scaladoc/resources/t10325.scala @@ -0,0 +1,36 @@ +package scala.test.scaladoc + +/** + * @note Note B + * @note Note A + * @note Note C + */ +trait Note + +/** + * @author Author B + * @author Author A + * @author Author C + */ +trait Author + +/** + * @see See B + * @see See A + * @see See C + */ +trait See + +/** + * @todo Todo B + * @todo Todo C + * @todo Todo A + */ +trait Todo + +/** + * @example Example B + * @example Example C + * @example Example A + */ +trait Example diff --git a/test/scaladoc/run/t10325.check b/test/scaladoc/run/t10325.check new file mode 100644 index 000000000000..33c2904da37d --- /dev/null +++ b/test/scaladoc/run/t10325.check @@ -0,0 +1,16 @@ +Body(List(Paragraph(Chain(List(Summary(Text(Note B))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Note A))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Note C))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Author B))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Author A))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Author C))))))) +Body(List(Paragraph(Chain(List(Summary(Text(See B))))))) +Body(List(Paragraph(Chain(List(Summary(Text(See A))))))) +Body(List(Paragraph(Chain(List(Summary(Text(See C))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Todo B))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Todo C))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Todo A))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Example B))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Example C))))))) +Body(List(Paragraph(Chain(List(Summary(Text(Example A))))))) +Done. diff --git a/test/scaladoc/run/t10325.scala b/test/scaladoc/run/t10325.scala new file mode 100644 index 000000000000..e1fce45f148d --- /dev/null +++ b/test/scaladoc/run/t10325.scala @@ -0,0 +1,33 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest +import language._ +import scala.tools.nsc.doc.base.comment.Comment + +object Test extends ScaladocModelTest { + + override def resourceFile = "t10325.scala" + + override def scaladocSettings = "" + + def testModel(root: Package) = { + import access._ + + val base = root._package("scala")._package("test")._package("scaladoc") + + def printTags(tag: String) = { + val t = base._trait(tag) + val comment: Comment = t.comment.get + comment.note foreach println + comment.authors foreach println + comment.see foreach println + comment.todo foreach println + comment.example foreach println + } + + printTags("Note") + printTags("Author") + printTags("See") + printTags("Todo") + printTags("Example") + } +} From e01e9d9fa87567299279f4c323339d5b5ee979a3 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 6 Oct 2017 12:52:54 +0200 Subject: [PATCH 1086/2793] Upgrade scala-asm to 5.2-scala-2 --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index 445101916902..25032ac4d124 100644 --- a/versions.properties +++ b/versions.properties @@ -23,5 +23,5 @@ scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.6 scala-swing.version.number=2.0.0 partest.version.number=1.1.1 -scala-asm.version=5.2.0-scala-1 +scala-asm.version=5.2.0-scala-2 jline.version=2.14.4 From 42e9a64e60c38bc77e67a4517ece097cf6a06cdf Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Sat, 7 Oct 2017 04:51:47 -0700 Subject: [PATCH 1087/2793] Fix for scala/bug#10540 - AnyRefMap dropped entries with one hash code. Changed the internal hash code calculation to not produce zero values. --- .../scala/collection/mutable/AnyRefMap.scala | 5 ++-- .../collection/mutable/AnyRefMapTest.scala | 24 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 test/junit/scala/collection/mutable/AnyRefMapTest.scala diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 6ff79dd1b87f..3550afeda4a6 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -81,13 +81,14 @@ extends AbstractMap[K, V] (_size + _vacant) > 0.5*mask || _vacant > _size private def hashOf(key: K): Int = { + // Note: this method must not return 0 or Int.MinValue, as these indicate no element if (key eq null) 0x41081989 else { val h = key.hashCode // Part of the MurmurHash3 32 bit finalizer val i = (h ^ (h >>> 16)) * 0x85EBCA6B - val j = (i ^ (i >>> 13)) - if (j==0) 0x41081989 else j & 0x7FFFFFFF + val j = (i ^ (i >>> 13)) & 0x7FFFFFFF + if (j==0) 0x41081989 else j } } diff --git a/test/junit/scala/collection/mutable/AnyRefMapTest.scala b/test/junit/scala/collection/mutable/AnyRefMapTest.scala new file mode 100644 index 000000000000..6c12296950c3 --- /dev/null +++ b/test/junit/scala/collection/mutable/AnyRefMapTest.scala @@ -0,0 +1,24 @@ +package scala.collection.mutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import org.junit.Assert.assertTrue + +import scala.collection.mutable.AnyRefMap + +/* Test for scala/bug#10540 */ +@RunWith(classOf[JUnit4]) +class AnyRefMapTest { + @Test + def test10540: Unit = { + val badHashCode = -2105619938 + val reported = "K00278:18:H7C2NBBXX:7:1111:7791:21465" + val equivalent = "JK1C=H" + val sameHashCode = java.lang.Integer.valueOf(badHashCode) + assertTrue(AnyRefMap(reported -> 1) contains reported) + assertTrue(AnyRefMap(equivalent -> 1) contains equivalent) + assertTrue(AnyRefMap(sameHashCode -> 1) contains sameHashCode) + assertTrue(sameHashCode.hashCode == badHashCode) // Make sure test works + } +} From 19ce707d9bed3036670f160d46253d27084d7e58 Mon Sep 17 00:00:00 2001 From: Rex Kerr Date: Sat, 7 Oct 2017 04:51:47 -0700 Subject: [PATCH 1088/2793] [backport] AnyRefMap dropped entries with one hash code. Changed the internal hash code calculation to not produce zero values. (cherry picked from commit 42e9a64e60c38bc77e67a4517ece097cf6a06cdf) Backport of fix for for scala/bug#10540 --- .../scala/collection/mutable/AnyRefMap.scala | 5 ++-- .../collection/mutable/AnyRefMapTest.scala | 24 +++++++++++++++++++ 2 files changed, 27 insertions(+), 2 deletions(-) create mode 100644 test/junit/scala/collection/mutable/AnyRefMapTest.scala diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 369d596ec35b..34a4b63aaf37 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -79,13 +79,14 @@ extends AbstractMap[K, V] (_size + _vacant) > 0.5*mask || _vacant > _size private def hashOf(key: K): Int = { + // Note: this method must not return 0 or Int.MinValue, as these indicate no element if (key eq null) 0x41081989 else { val h = key.hashCode // Part of the MurmurHash3 32 bit finalizer val i = (h ^ (h >>> 16)) * 0x85EBCA6B - val j = (i ^ (i >>> 13)) - if (j==0) 0x41081989 else j & 0x7FFFFFFF + val j = (i ^ (i >>> 13)) & 0x7FFFFFFF + if (j==0) 0x41081989 else j } } diff --git a/test/junit/scala/collection/mutable/AnyRefMapTest.scala b/test/junit/scala/collection/mutable/AnyRefMapTest.scala new file mode 100644 index 000000000000..6c12296950c3 --- /dev/null +++ b/test/junit/scala/collection/mutable/AnyRefMapTest.scala @@ -0,0 +1,24 @@ +package scala.collection.mutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test +import org.junit.Assert.assertTrue + +import scala.collection.mutable.AnyRefMap + +/* Test for scala/bug#10540 */ +@RunWith(classOf[JUnit4]) +class AnyRefMapTest { + @Test + def test10540: Unit = { + val badHashCode = -2105619938 + val reported = "K00278:18:H7C2NBBXX:7:1111:7791:21465" + val equivalent = "JK1C=H" + val sameHashCode = java.lang.Integer.valueOf(badHashCode) + assertTrue(AnyRefMap(reported -> 1) contains reported) + assertTrue(AnyRefMap(equivalent -> 1) contains equivalent) + assertTrue(AnyRefMap(sameHashCode -> 1) contains sameHashCode) + assertTrue(sameHashCode.hashCode == badHashCode) // Make sure test works + } +} From cd54e2b0aa6a7069ec3aa29bbf5499d6d8003770 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 9 Oct 2017 13:02:39 -0700 Subject: [PATCH 1089/2793] Move compilation daemon portfile under `~/.scalac/` Store the compilation daemon's administrativia (port file, redirection) under `~/.scalac/`, instead of the less standard `/tmp/scala-devel/${USER:shared}/scalac-compile-server-port`. On creation, remove group- and other-permissions from these private files, ditto for the repl's history file. Based on b64ad85 --- .../scala/tools/nsc/CompileServer.scala | 29 ++++---- .../scala/tools/nsc/CompileSocket.scala | 71 ++++++++++--------- .../tools/nsc/GenericRunnerSettings.scala | 6 +- src/compiler/scala/tools/nsc/Properties.scala | 5 ++ .../scala/tools/nsc/ScriptRunner.scala | 7 +- .../internal/util/OwnerOnlyChmod.scala | 59 +++++++++++++++ .../interpreter/jline/FileBackedHistory.scala | 32 +++++++-- 7 files changed, 153 insertions(+), 56 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index ffe95ba9dc80..3cd9ce61f969 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -6,11 +6,12 @@ package scala.tools.nsc import java.io.PrintStream -import io.Directory -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} + import scala.reflect.internal.util.FakePos +import scala.tools.nsc.io.Directory +import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} +import scala.tools.nsc.settings.FscSettings import scala.tools.util.SocketServer -import settings.FscSettings /** * The server part of the fsc offline compiler. It awaits compilation @@ -33,7 +34,7 @@ class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { val MaxCharge = 0.8 private val runtime = Runtime.getRuntime() - import runtime.{ totalMemory, freeMemory, maxMemory } + import runtime.{freeMemory, maxMemory, totalMemory} /** Create a new compiler instance */ def newGlobal(settings: Settings, reporter: Reporter) = @@ -178,14 +179,15 @@ object CompileServer { execute(() => (), args) /** - * Used for internal testing. The callback is called upon - * server start, notifying the caller that the server is - * ready to run. WARNING: the callback runs in the - * server's thread, blocking the server from doing any work - * until the callback is finished. Callbacks should be kept - * simple and clients should not try to interact with the - * server while the callback is processing. - */ + * The server's main loop. + * + * `startupCallback` is used for internal testing; it's called upon server start, + * notifying the caller that the server is ready to run. + * + * WARNING: the callback runs in the server's thread, blocking the server from doing any work + * until the callback is finished. Callbacks should be kept simple and clients should not try to + * interact with the server while the callback is processing. + */ def execute(startupCallback : () => Unit, args: Array[String]) { val debug = args contains "-v" var port = 0 @@ -199,8 +201,7 @@ object CompileServer { // Create instance rather than extend to pass a port parameter. val server = new StandardCompileServer(port) - val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory() - + val redirectDir = server.compileSocket.mkDaemonDir("fsc_redirects") if (debug) { server.echo("Starting CompileServer on port " + server.port) server.echo("Redirect dir is " + redirectDir) diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index 01c7d72d4f72..d0083059fcac 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -5,12 +5,17 @@ package scala.tools.nsc -import java.io.FileNotFoundException +import java.math.BigInteger import java.security.SecureRandom -import io.{ File, Path, Socket } -import scala.tools.util.CompileOutputCommon + +import scala.io.Codec +import scala.reflect.internal.util.OwnerOnlyChmod import scala.reflect.internal.util.StringOps.splitWhere import scala.sys.process._ +import scala.tools.nsc.Properties.scalacDir +import scala.tools.nsc.io.{File, Socket} +import scala.tools.util.CompileOutputCommon +import scala.util.control.NonFatal trait HasCompileSocket { def compileSocket: CompileSocket @@ -46,14 +51,10 @@ trait HasCompileSocket { class CompileSocket extends CompileOutputCommon { protected lazy val compileClient: StandardCompileClient = CompileClient def verbose = compileClient.verbose - + def verbose_=(v: Boolean) = compileClient.verbose = v /* Fixes the port where to start the server, 0 yields some free port */ var fixPort = 0 - /** The prefix of the port identification file, which is followed - * by the port number. - */ - protected lazy val dirName = "scalac-compile-server-port" protected def cmdName = Properties.scalaCmd /** The vm part of the command to start a new scala compile server */ @@ -69,20 +70,8 @@ class CompileSocket extends CompileOutputCommon { protected val serverClass = "scala.tools.nsc.CompileServer" protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) - /** A temporary directory to use */ - val tmpDir = { - val udir = Option(Properties.userName) getOrElse "shared" - val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory() - - if (f.isDirectory && f.canWrite) { - info("[Temp directory: " + f + "]") - f - } - else fatal("Could not find a directory for temporary files") - } - /* A directory holding port identification files */ - val portsDir = (tmpDir / dirName).createDirectory() + private lazy val portsDir = mkDaemonDir("fsc_port") /** The command which starts the compile server, given vm arguments. * @@ -104,7 +93,7 @@ class CompileSocket extends CompileOutputCommon { } /** The port identification file */ - def portFile(port: Int) = portsDir / File(port.toString) + def portFile(port: Int): File = portsDir / File(port.toString) /** Poll for a server port number; return -1 if none exists yet */ private def pollPort(): Int = if (fixPort > 0) { @@ -138,19 +127,19 @@ class CompileSocket extends CompileOutputCommon { } info("[Port number: " + port + "]") if (port < 0) - fatal("Could not connect to compilation daemon after " + attempts + " attempts.") + fatal(s"Could not connect to compilation daemon after $attempts attempts. To run without it, use `-nocompdaemon` or `-nc`.") port } /** Set the port number to which a scala compile server is connected */ - def setPort(port: Int) { - val file = portFile(port) - val secret = new SecureRandom().nextInt.toString - - try file writeAll secret catch { - case e @ (_: FileNotFoundException | _: SecurityException) => - fatal("Cannot create file: %s".format(file.path)) - } + def setPort(port: Int): Unit = { + val file = portFile(port) + // 128 bits of delicious randomness, suitable for printing with println over a socket, + // and storage in a file -- see getPassword + val secretDigits = new BigInteger(128, new SecureRandom()).toString.getBytes("UTF-8") + + try OwnerOnlyChmod.chmodFileAndWrite(file.jfile.toPath, secretDigits) + catch chmodFailHandler(s"Cannot create file: ${file}") } /** Delete the port number to which a scala compile server was connected */ @@ -208,7 +197,7 @@ class CompileSocket extends CompileOutputCommon { def getPassword(port: Int): String = { val ff = portFile(port) - val f = ff.bufferedReader() + val f = ff.bufferedReader(Codec.UTF8) // allow some time for the server to start up def check = { @@ -223,6 +212,24 @@ class CompileSocket extends CompileOutputCommon { f.close() result } + + private def chmodFailHandler(msg: String): PartialFunction[Throwable, Unit] = { + case NonFatal(e) => + if (verbose) e.printStackTrace() + fatal(msg) + } + + def mkDaemonDir(name: String) = { + val dir = (scalacDir / name).createDirectory() + + if (dir.isDirectory && dir.canWrite) info(s"[Temp directory: $dir]") + else fatal(s"Could not create compilation daemon directory $dir") + + try OwnerOnlyChmod.chmod(dir.jfile.toPath) + catch chmodFailHandler(s"Failed to change permissions on $dir. The compilation daemon requires a secure directory; use -nc to disable the daemon.") + dir + } + } diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index 726640feb54a..332467fce2db 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -45,5 +45,9 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { val nc = BooleanSetting( "-nc", - "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" + "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" withPostSetHook((x: BooleanSetting) => {_useCompDaemon = !x.value }) + + + private[this] var _useCompDaemon = true + def useCompDaemon: Boolean = _useCompDaemon } diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index cb523edfe593..334158982bbb 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -5,6 +5,8 @@ package scala.tools.nsc +import scala.tools.nsc.io.Path + /** Loads `compiler.properties` from the jar archive file. */ object Properties extends scala.util.PropertiesTrait { @@ -28,4 +30,7 @@ object Properties extends scala.util.PropertiesTrait { // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" + + // Where we keep fsc's state (ports/redirection) + lazy val scalacDir = (Path(Properties.userHome) / ".scalac").createDirectory(force = false) } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 2cd9e6cbbe4f..1f1953803ea0 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -65,7 +65,10 @@ class ScriptRunner extends HasCompileSocket { val coreCompArgs = compSettings flatMap (_.unparse) val compArgs = coreCompArgs ++ List("-Xscript", scriptMain(settings), scriptFile) - CompileSocket getOrCreateSocket "" match { + // TODO: untangle this mess of top-level objects with their own little view of the mutable world of settings + compileSocket.verbose = settings.verbose.value + + compileSocket getOrCreateSocket "" match { case Some(sock) => compileOnServer(sock, compArgs) case _ => false } @@ -97,7 +100,7 @@ class ScriptRunner extends HasCompileSocket { settings.outdir.value = compiledPath.path - if (settings.nc) { + if (!settings.useCompDaemon) { /* Setting settings.script.value informs the compiler this is not a * self contained compilation unit. */ diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala new file mode 100644 index 000000000000..7c7950d93233 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -0,0 +1,59 @@ +/* NSC -- new Scala compiler + * Copyright 2017 LAMP/EPFL + * @author Martin Odersky + */ +package scala.reflect.internal.util + +import java.nio.ByteBuffer +import java.nio.file.StandardOpenOption.{CREATE, TRUNCATE_EXISTING, WRITE} +import java.nio.file.attribute.PosixFilePermission.{OWNER_EXECUTE, OWNER_READ, OWNER_WRITE} +import java.nio.file.attribute.PosixFilePermissions.asFileAttribute +import java.nio.file.attribute._ +import java.nio.file.{Files, Path} +import java.util.EnumSet + + +object OwnerOnlyChmod { + private def canPosix(path: Path) = + Files.getFileStore(path).supportsFileAttributeView(classOf[PosixFileAttributeView]) + + private val posixDir = EnumSet.of(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE) + private val posixFile = EnumSet.of(OWNER_READ, OWNER_WRITE) + private def fileAttributes(path: Path) = + if (canPosix(path)) Array(asFileAttribute(posixFile)) else Array.empty[FileAttribute[_]] + + /** Remove group/other permissions for `file`, it if exists, and if the runtime environment supports modifying permissions. */ + def chmod(path: Path): Unit = { + if (canPosix(path)) Files.setPosixFilePermissions(path, if (Files.isDirectory(path)) posixDir else posixFile) + else { + // if getting this view fails, we fail + val view = Files.getFileAttributeView(path, classOf[AclFileAttributeView]) + if (view == null) throw new UnsupportedOperationException(s"Cannot get file attribute view for $path") + + val acls = { + val builder = AclEntry.newBuilder + builder.setPrincipal(view.getOwner) + builder.setPermissions(AclEntryPermission.values(): _*) + builder.setType(AclEntryType.ALLOW) + val entry = builder.build + java.util.Collections.singletonList(entry) + } + + view.setAcl(acls) + } + } + + def chmodFileOrCreateEmpty(path: Path): Unit = { + // Create new file if none existed, with appropriate permissions via the fileAttributes attributes (if supported). + Files.newByteChannel(path, EnumSet.of(WRITE, CREATE), fileAttributes(path): _*).close() + // Change (if needed -- either because the file already existed, or the FS needs a separate call to set the ACL) + chmod(path) + } + + def chmodFileAndWrite(path: Path, contents: Array[Byte]): Unit = { + val sbc = Files.newByteChannel(path, EnumSet.of(WRITE, TRUNCATE_EXISTING), fileAttributes(path): _*) + try sbc.write(ByteBuffer.wrap(contents)) finally sbc.close() + chmod(path) // for acl-based FS + } +} + diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala index 53a06ca97258..3dc6f01c0a69 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala @@ -8,15 +8,37 @@ package scala.tools.nsc.interpreter.jline import _root_.jline.console.history.PersistentHistory import scala.tools.nsc.interpreter -import scala.reflect.io.{ File, Path } -import scala.tools.nsc.Properties.{ propOrNone, userHome } +import scala.reflect.io.{File, Path} +import scala.tools.nsc.Properties.{propOrNone, userHome} +import scala.reflect.internal.util.OwnerOnlyChmod +import scala.util.control.NonFatal /** TODO: file locking. */ trait FileBackedHistory extends JLineHistory with PersistentHistory { def maxSize: Int - protected lazy val historyFile: File = FileBackedHistory.defaultFile + // For a history file in the standard location, always try to restrict permission, + // creating an empty file if none exists. + // For a user-specified location, only lock down permissions if we're the ones + // creating it, otherwise responsibility for permissions is up to the caller. + protected lazy val historyFile: File = File { + propOrNone("scala.shell.histfile").map(Path.apply) match { + case Some(p) => if (!p.exists) secure(p) else p + case None => secure(Path(userHome) / FileBackedHistory.defaultFileName) + } + } + + private def secure(p: Path): Path = { + try OwnerOnlyChmod.chmodFileOrCreateEmpty(p.jfile.toPath) + catch { case NonFatal(e) => + if (interpreter.isReplDebug) e.printStackTrace() + interpreter.replinfo(s"Warning: history file ${p}'s permissions could not be restricted to owner-only.") + } + + p + } + private var isPersistent = true locally { @@ -86,8 +108,4 @@ object FileBackedHistory { // val ContinuationNL: String = Array('\003', '\n').mkString final val defaultFileName = ".scala_history" - - def defaultFile: File = File( - propOrNone("scala.shell.histfile") map (Path.apply) getOrElse (Path(userHome) / defaultFileName) - ) } From c0bcc296dff008c0815b98e4f64452f63810a62f Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 10 Oct 2017 14:43:08 -0700 Subject: [PATCH 1090/2793] Unbreak compilation daemon portfile writing canPosix fails for non-existent files We can just do the chmod after writing the data, since the file is in a secure directory, there is no potential for a race condition. --- src/compiler/scala/tools/util/SocketServer.scala | 14 ++++++++++---- .../reflect/internal/util/OwnerOnlyChmod.scala | 11 ++++------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala index 7858bf06581b..acf406c676cf 100644 --- a/src/compiler/scala/tools/util/SocketServer.scala +++ b/src/compiler/scala/tools/util/SocketServer.scala @@ -9,17 +9,23 @@ package scala package tools.util -import java.net.{ ServerSocket, SocketException, SocketTimeoutException } -import java.io.{ PrintWriter, BufferedReader } +import java.net.{ServerSocket, SocketException, SocketTimeoutException} +import java.io.{BufferedReader, PrintStream, PrintWriter} + import scala.tools.nsc.io.Socket trait CompileOutputCommon { def verbose: Boolean def info(msg: String) = if (verbose) echo(msg) - def echo(msg: String) = {Console println msg; Console.flush()} - def warn(msg: String) = {Console.err println msg; Console.flush()} + def echo(msg: String) = printlnFlush(msg, Console.out) + def warn(msg: String) = printlnFlush(msg, Console.err) def fatal(msg: String) = { warn(msg) ; sys.exit(1) } + + private def printlnFlush(msg: String, out: PrintStream) = { + out.println(msg) + out.flush() + } } /** The abstract class SocketServer implements the server diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala index 7c7950d93233..ece34966a44b 100644 --- a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -14,13 +14,12 @@ import java.util.EnumSet object OwnerOnlyChmod { + // @requires Files.exists(path) private def canPosix(path: Path) = Files.getFileStore(path).supportsFileAttributeView(classOf[PosixFileAttributeView]) private val posixDir = EnumSet.of(OWNER_READ, OWNER_WRITE, OWNER_EXECUTE) private val posixFile = EnumSet.of(OWNER_READ, OWNER_WRITE) - private def fileAttributes(path: Path) = - if (canPosix(path)) Array(asFileAttribute(posixFile)) else Array.empty[FileAttribute[_]] /** Remove group/other permissions for `file`, it if exists, and if the runtime environment supports modifying permissions. */ def chmod(path: Path): Unit = { @@ -44,16 +43,14 @@ object OwnerOnlyChmod { } def chmodFileOrCreateEmpty(path: Path): Unit = { - // Create new file if none existed, with appropriate permissions via the fileAttributes attributes (if supported). - Files.newByteChannel(path, EnumSet.of(WRITE, CREATE), fileAttributes(path): _*).close() - // Change (if needed -- either because the file already existed, or the FS needs a separate call to set the ACL) + Files.newByteChannel(path, EnumSet.of(WRITE, CREATE)).close() // make sure it exists chmod(path) } def chmodFileAndWrite(path: Path, contents: Array[Byte]): Unit = { - val sbc = Files.newByteChannel(path, EnumSet.of(WRITE, TRUNCATE_EXISTING), fileAttributes(path): _*) + val sbc = Files.newByteChannel(path, EnumSet.of(WRITE, CREATE, TRUNCATE_EXISTING)) try sbc.write(ByteBuffer.wrap(contents)) finally sbc.close() - chmod(path) // for acl-based FS + chmod(path) } } From c2a5883891a68180b143eb462c8b0cebc8d3b021 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 10 Oct 2017 15:05:11 -0700 Subject: [PATCH 1091/2793] Upgrade to jline 2.14.5 Fixes a regression in newline printing --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index 25032ac4d124..f17bf2255d27 100644 --- a/versions.properties +++ b/versions.properties @@ -24,4 +24,4 @@ scala-parser-combinators.version.number=1.0.6 scala-swing.version.number=2.0.0 partest.version.number=1.1.1 scala-asm.version=5.2.0-scala-2 -jline.version=2.14.4 +jline.version=2.14.5 From e50d0691bfb3be4bf0f24d55cc26a79853a1f780 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 12 Oct 2017 15:17:40 -0700 Subject: [PATCH 1092/2793] Bump build number to 2.10.7 --- build.number | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.number b/build.number index ddb01678e5e4..c040e1d2948e 100644 --- a/build.number +++ b/build.number @@ -1,7 +1,7 @@ #Tue Sep 11 19:21:09 CEST 2007 version.major=2 version.minor=10 -version.patch=6 +version.patch=7 # This is the -N part of a version. if it's 0, it's dropped from maven versions. version.bnum=0 From cee6d9d3be8260376e1ba5062076ac7e2861ca53 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 12 Oct 2017 11:28:30 -0700 Subject: [PATCH 1093/2793] Use https in pull-binary-libs Mysterious failures in the http:// urls. --- tools/binary-repo-lib.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh index 2f5d481e39ed..ebf72c282f8a 100755 --- a/tools/binary-repo-lib.sh +++ b/tools/binary-repo-lib.sh @@ -3,8 +3,8 @@ # Library to push and pull binary artifacts from a remote repository using CURL. -remote_urlget="http://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap" -remote_urlpush="http://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap" +remote_urlget="https://repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap" +remote_urlpush="https://private-repo.typesafe.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap" libraryJar="$(pwd)/lib/scala-library.jar" desired_ext=".desired.sha1" push_jar="$(pwd)/tools/push.jar" From 04bee52459bd60b87e752a4d2bb3bf1d0ecd64ec Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Thu, 28 Jan 2016 19:23:08 +0100 Subject: [PATCH 1094/2793] Document when the `scala` command starts/uses a compilation daemon Cherry picked from dcc455a --- src/compiler/scala/tools/nsc/GenericRunnerCommand.scala | 5 ++++- src/manual/scala/man1/scala.scala | 9 +++++++++ 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala index c8fd5985c651..029ade1e4d3e 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala @@ -87,7 +87,10 @@ self-contained compilation units (classes and objects) and exactly one runnable main method. In that case the file will be compiled and the main method invoked. This provides a bridge between scripts and standard scala source. - """) + "\n" + +When running a script or using -e, an already running compilation daemon +(fsc) is used, or a new one started on demand. The -nc option can be +used to prevent this.%n""") } object GenericRunnerCommand { diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala index dbd4ea55a2bf..c08b9ec2d734 100644 --- a/src/manual/scala/man1/scala.scala +++ b/src/manual/scala/man1/scala.scala @@ -64,6 +64,10 @@ object scala extends Command { CmdOption("nocompdaemon"), "Do not use the " & MBold("fsc") & " offline compiler."), + Definition( + CmdOption("nc"), + "Same as " & Mono("-nocompdaemon") & "."), + Definition( CmdOptionBound("D", "property=value"), "Set a Java system property. If no value is specified, " & @@ -135,6 +139,11 @@ object scala extends Command { "line. Headers can be used to make stand-alone script files, as shown " & "in the examples below.", + "When running a script or using " & Mono("-e") & ", an already running " & + "compilation daemon (fsc) is used, or a new one started on demand. The " & + Mono("-nocompdaemon") & " or " & Mono("-nc") & " option can be used to " & + "prevent this.", + "If " & Mono("scala") & " is run from an sbaz(1) directory, " & "then it will add to its classpath any jars installed in the " & "lib directory of the sbaz directory. Additionally, if no " & From caa9ebc482969a884da5f9c9c246470811b8599d Mon Sep 17 00:00:00 2001 From: Teemu Lehtinen Date: Wed, 20 Aug 2014 13:20:41 +0300 Subject: [PATCH 1095/2793] Add option -port to fsc Option "port" limits compile server lookup and start to given port. Normally fsc will start a compile server in a random port if no server is yet running. This can be problematic with firewalls and/or remote compile servers. Option "port" should not be confused with option "server" which looks for a compile server in given host and port and fails if such server is not found. Automatic tests for command line user interface do not exist at all. Thus, adding a test for one new option would require designing a whole new testing method. Cherry picked from 7daecd8 --- .../scala/tools/nsc/CompileClient.scala | 4 +- .../scala/tools/nsc/CompileServer.scala | 56 ++++++++++++------- .../scala/tools/nsc/CompileSocket.scala | 37 +++++++----- .../tools/nsc/settings/FscSettings.scala | 4 +- .../scala/tools/util/SocketServer.scala | 4 +- 5 files changed, 65 insertions(+), 40 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala index 731f6926f003..842d6ac535be 100644 --- a/src/compiler/scala/tools/nsc/CompileClient.scala +++ b/src/compiler/scala/tools/nsc/CompileClient.scala @@ -43,8 +43,8 @@ class StandardCompileClient extends HasCompileSocket with CompileOutputCommon { info(vmArgs.mkString("[VM arguments: ", " ", "]")) val socket = - if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown) - else Some(compileSocket.getSocket(settings.server.value)) + if (settings.server.value == "") compileSocket.getOrCreateSocket(vmArgs mkString " ", !shutdown, settings.port.value) + else compileSocket.getSocket(settings.server.value) socket match { case Some(sock) => compileOnServer(sock, fscArgs) diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 7a0a072bb8d0..6352d75686a0 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -5,11 +5,13 @@ package scala.tools.nsc -import java.io.{ BufferedOutputStream, FileOutputStream, PrintStream } -import scala.tools.nsc.reporters.{Reporter, ConsoleReporter} -import scala.reflect.internal.util.FakePos //Position +import java.io.PrintStream + +import scala.reflect.internal.util.FakePos +import scala.tools.nsc.io.Directory +import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} +import scala.tools.nsc.settings.FscSettings import scala.tools.util.SocketServer -import settings.FscSettings /** * The server part of the fsc offline compiler. It awaits compilation @@ -19,7 +21,7 @@ import settings.FscSettings * @author Martin Odersky * @version 1.0 */ -class StandardCompileServer extends SocketServer { +class StandardCompileServer(fixPort: Int = 0) extends SocketServer(fixPort) { lazy val compileSocket: CompileSocket = CompileSocket private var compiler: Global = null @@ -34,7 +36,7 @@ class StandardCompileServer extends SocketServer { val MaxCharge = 0.8 private val runtime = Runtime.getRuntime() - import runtime.{ totalMemory, freeMemory, maxMemory } + import runtime.{freeMemory, maxMemory, totalMemory} /** Create a new compiler instance */ def newGlobal(settings: Settings, reporter: Reporter) = @@ -170,16 +172,16 @@ class StandardCompileServer extends SocketServer { } -object CompileServer extends StandardCompileServer { +object CompileServer { /** A directory holding redirected output */ - private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory() + //private lazy val redirectDir = (compileSocket.tmpDir / "output-redirects").createDirectory() - private def createRedirect(filename: String) = - new PrintStream((redirectDir / filename).createFile().bufferedOutput()) + private def createRedirect(dir: Directory, filename: String) = + new PrintStream((dir / filename).createFile().bufferedOutput()) - def main(args: Array[String]) = + def main(args: Array[String]) = execute(() => (), args) - + /** * Used for internal testing. The callback is called upon * server start, notifying the caller that the server is @@ -191,21 +193,33 @@ object CompileServer extends StandardCompileServer { */ def execute(startupCallback : () => Unit, args: Array[String]) { val debug = args contains "-v" + var port = 0 + + val i = args.indexOf("-p") + if (i >= 0 && args.length > i + 1) { + scala.util.control.Exception.ignoring(classOf[NumberFormatException]) { + port = args(i + 1).toInt + } + } + + // Create instance rather than extend to pass a port parameter. + val server = new StandardCompileServer(port) + val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory() if (debug) { - echo("Starting CompileServer on port " + port) - echo("Redirect dir is " + redirectDir) + server.echo("Starting CompileServer on port " + server.port) + server.echo("Redirect dir is " + redirectDir) } - Console.withErr(createRedirect("scala-compile-server-err.log")) { - Console.withOut(createRedirect("scala-compile-server-out.log")) { - Console.err.println("...starting server on socket "+port+"...") + Console.withErr(createRedirect(redirectDir, "scala-compile-server-err.log")) { + Console.withOut(createRedirect(redirectDir, "scala-compile-server-out.log")) { + Console.err.println("...starting server on socket "+server.port+"...") Console.err.flush() - compileSocket setPort port + server.compileSocket setPort server.port startupCallback() - run() - - compileSocket deletePort port + server.run() + + server.compileSocket deletePort server.port } } } diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index 4051bda9144d..f5039b8303f4 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -5,16 +5,13 @@ package scala.tools.nsc -import java.io.{ IOException, FileNotFoundException, PrintWriter, FileOutputStream } -import java.io.{ BufferedReader, FileReader } -import java.util.regex.Pattern -import java.net._ +import java.io.FileNotFoundException import java.security.SecureRandom -import io.{ File, Path, Directory, Socket } -import scala.util.control.Exception.catching -import scala.tools.util.CompileOutputCommon + import scala.reflect.internal.util.StringOps.splitWhere import scala.sys.process._ +import scala.tools.nsc.io.{File, Path, Socket} +import scala.tools.util.CompileOutputCommon trait HasCompileSocket { def compileSocket: CompileSocket @@ -50,6 +47,9 @@ class CompileSocket extends CompileOutputCommon { protected lazy val compileClient: StandardCompileClient = CompileClient def verbose = compileClient.verbose + /* Fixes the port where to start the server, 0 yields some free port */ + var fixPort = 0 + /** The prefix of the port identification file, which is followed * by the port number. */ @@ -67,7 +67,7 @@ class CompileSocket extends CompileOutputCommon { /** The class name of the scala compile server */ protected val serverClass = "scala.tools.nsc.CompileServer" - protected def serverClassArgs = if (verbose) List("-v") else Nil // debug + protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) /** A temporary directory to use */ val tmpDir = { @@ -107,9 +107,14 @@ class CompileSocket extends CompileOutputCommon { def portFile(port: Int) = portsDir / File(port.toString) /** Poll for a server port number; return -1 if none exists yet */ - private def pollPort(): Int = portsDir.list.toList match { + private def pollPort(): Int = if (fixPort > 0) { + if (portsDir.list.toList.exists(_.name == fixPort.toString)) fixPort else -1 + } else portsDir.list.toList match { case Nil => -1 - case x :: xs => try x.name.toInt finally xs foreach (_.delete()) + case x :: xs => try x.name.toInt catch { + case e: Exception => x.delete() + throw e + } } /** Get the port number to which a scala compile server is connected; @@ -155,7 +160,8 @@ class CompileSocket extends CompileOutputCommon { * create a new daemon if necessary. Returns None if the connection * cannot be established. */ - def getOrCreateSocket(vmArgs: String, create: Boolean = true): Option[Socket] = { + def getOrCreateSocket(vmArgs: String, create: Boolean = true, fixedPort: Int = 0): Option[Socket] = { + fixPort = fixedPort val maxMillis = 10 * 1000 // try for 10 seconds val retryDelay = 50 val maxAttempts = maxMillis / retryDelay @@ -189,13 +195,16 @@ class CompileSocket extends CompileOutputCommon { try { Some(x.toInt) } catch { case _: NumberFormatException => None } - def getSocket(serverAdr: String): Socket = ( + def getSocket(serverAdr: String): Option[Socket] = ( for ((name, portStr) <- splitWhere(serverAdr, _ == ':', true) ; port <- parseInt(portStr)) yield getSocket(name, port) ) getOrElse fatal("Malformed server address: %s; exiting" format serverAdr) - def getSocket(hostName: String, port: Int): Socket = - Socket(hostName, port).opt getOrElse fatal("Unable to establish connection to server %s:%d; exiting".format(hostName, port)) + def getSocket(hostName: String, port: Int): Option[Socket] = { + val sock = Socket(hostName, port).opt + if (sock.isEmpty) warn("Unable to establish connection to server %s:%d".format(hostName, port)) + sock + } def getPassword(port: Int): String = { val ff = portFile(port) diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala index 5c852ae07c1e..f5f971d697ee 100644 --- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala @@ -22,13 +22,15 @@ class FscSettings(error: String => Unit) extends Settings(error) { val reset = BooleanSetting("-reset", "Reset compile server caches") val shutdown = BooleanSetting("-shutdown", "Shutdown compile server") val server = StringSetting ("-server", "hostname:portnumber", "Specify compile server socket", "") + val port = IntSetting ("-port", "Search and start compile server in given port only", + 0, Some((0, Int.MaxValue)), (_: String) => None) val preferIPv4 = BooleanSetting("-ipv4", "Use IPv4 rather than IPv6 for the server socket") val idleMins = IntSetting ("-max-idle", "Set idle timeout in minutes for fsc (use 0 for no timeout)", 30, Some((0, Int.MaxValue)), (_: String) => None) // For improved help output, separating fsc options from the others. def fscSpecific = Set[Settings#Setting]( - currentDir, reset, shutdown, server, preferIPv4, idleMins + currentDir, reset, shutdown, server, port, preferIPv4, idleMins ) val isFscSpecific: String => Boolean = fscSpecific map (_.name) diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala index 1b06ce2ff2ed..edbc7ecc554d 100644 --- a/src/compiler/scala/tools/util/SocketServer.scala +++ b/src/compiler/scala/tools/util/SocketServer.scala @@ -27,12 +27,12 @@ trait CompileOutputCommon { * @author Martin Odersky * @version 1.0 */ -abstract class SocketServer extends CompileOutputCommon { +abstract class SocketServer(fixPort: Int = 0) extends CompileOutputCommon { def shutdown: Boolean def session(): Unit def timeout(): Unit = () // called after a timeout is detected for subclasses to cleanup // a hook for subclasses - protected def createServerSocket(): ServerSocket = new ServerSocket(0) + protected def createServerSocket(): ServerSocket = new ServerSocket(fixPort) var in: BufferedReader = _ var out: PrintWriter = _ From 67e1437e55df6789d0883cb8846d12071de75c63 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 2 Oct 2017 10:06:55 +1000 Subject: [PATCH 1096/2793] Move compilation daemon portfile under `~/.scalac/` Store the compilation daemon's administrativia (port file, redirection) under `~/.scalac/`, instead of the less standard `/tmp/scala-devel/${USER:shared}/scalac-compile-server-port`. On creation, remove group- and other-permissions from these private files, ditto for the repl's history file. On Java 6 on Windows, opt in to compilation daemon using `-nc:false`. Cherry picked from b64ad85, aa133c9, 2ceb09c --- .../scala/tools/nsc/CompileServer.scala | 22 ++-- .../scala/tools/nsc/CompileSocket.scala | 68 ++++++----- .../tools/nsc/GenericRunnerSettings.scala | 5 +- src/compiler/scala/tools/nsc/Properties.scala | 5 + .../scala/tools/nsc/ScriptRunner.scala | 20 +++- .../session/FileBackedHistory.scala | 32 +++++- .../tools/nsc/util/ScalaClassLoader.scala | 27 ++--- .../internal/util/OwnerOnlyChmod.scala | 107 ++++++++++++++++++ 8 files changed, 221 insertions(+), 65 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 6352d75686a0..c454ba8b62be 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -183,14 +183,15 @@ object CompileServer { execute(() => (), args) /** - * Used for internal testing. The callback is called upon - * server start, notifying the caller that the server is - * ready to run. WARNING: the callback runs in the - * server's thread, blocking the server from doing any work - * until the callback is finished. Callbacks should be kept - * simple and clients should not try to interact with the - * server while the callback is processing. - */ + * The server's main loop. + * + * `startupCallback` is used for internal testing; it's called upon server start, + * notifying the caller that the server is ready to run. + * + * WARNING: the callback runs in the server's thread, blocking the server from doing any work + * until the callback is finished. Callbacks should be kept simple and clients should not try to + * interact with the server while the callback is processing. + */ def execute(startupCallback : () => Unit, args: Array[String]) { val debug = args contains "-v" var port = 0 @@ -198,14 +199,13 @@ object CompileServer { val i = args.indexOf("-p") if (i >= 0 && args.length > i + 1) { scala.util.control.Exception.ignoring(classOf[NumberFormatException]) { - port = args(i + 1).toInt + port = args(i + 1).toInt } } // Create instance rather than extend to pass a port parameter. val server = new StandardCompileServer(port) - val redirectDir = (server.compileSocket.tmpDir / "output-redirects").createDirectory() - + val redirectDir = server.compileSocket.mkDaemonDir("fsc_redirects") if (debug) { server.echo("Starting CompileServer on port " + server.port) server.echo("Redirect dir is " + redirectDir) diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index f5039b8303f4..b73d251e9cc6 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -5,13 +5,17 @@ package scala.tools.nsc -import java.io.FileNotFoundException +import java.math.BigInteger import java.security.SecureRandom +import scala.io.Codec +import scala.reflect.internal.util.OwnerOnlyChmod import scala.reflect.internal.util.StringOps.splitWhere import scala.sys.process._ -import scala.tools.nsc.io.{File, Path, Socket} +import scala.tools.nsc.Properties.scalacDir +import scala.tools.nsc.io.{File, Socket} import scala.tools.util.CompileOutputCommon +import scala.util.control.NonFatal trait HasCompileSocket { def compileSocket: CompileSocket @@ -46,14 +50,10 @@ trait HasCompileSocket { class CompileSocket extends CompileOutputCommon { protected lazy val compileClient: StandardCompileClient = CompileClient def verbose = compileClient.verbose - + def verbose_=(v: Boolean) = compileClient.verbose = v /* Fixes the port where to start the server, 0 yields some free port */ var fixPort = 0 - /** The prefix of the port identification file, which is followed - * by the port number. - */ - protected lazy val dirName = "scalac-compile-server-port" protected def cmdName = Properties.scalaCmd /** The vm part of the command to start a new scala compile server */ @@ -69,20 +69,8 @@ class CompileSocket extends CompileOutputCommon { protected val serverClass = "scala.tools.nsc.CompileServer" protected def serverClassArgs = (if (verbose) List("-v") else Nil) ::: (if (fixPort > 0) List("-p", fixPort.toString) else Nil) - /** A temporary directory to use */ - val tmpDir = { - val udir = Option(Properties.userName) getOrElse "shared" - val f = (Path(Properties.tmpDir) / ("scala-devel" + udir)).createDirectory() - - if (f.isDirectory && f.canWrite) { - info("[Temp directory: " + f + "]") - f - } - else fatal("Could not find a directory for temporary files") - } - /* A directory holding port identification files */ - val portsDir = (tmpDir / dirName).createDirectory() + private lazy val portsDir = mkDaemonDir("fsc_port") /** The command which starts the compile server, given vm arguments. * @@ -104,7 +92,7 @@ class CompileSocket extends CompileOutputCommon { } /** The port identification file */ - def portFile(port: Int) = portsDir / File(port.toString) + def portFile(port: Int): File = portsDir / File(port.toString) /** Poll for a server port number; return -1 if none exists yet */ private def pollPort(): Int = if (fixPort > 0) { @@ -138,19 +126,19 @@ class CompileSocket extends CompileOutputCommon { } info("[Port number: " + port + "]") if (port < 0) - fatal("Could not connect to compilation daemon after " + attempts + " attempts.") + fatal(s"Could not connect to compilation daemon after $attempts attempts. To run without it, use `-nocompdaemon` or `-nc`.") port } /** Set the port number to which a scala compile server is connected */ - def setPort(port: Int) { - val file = portFile(port) - val secret = new SecureRandom().nextInt.toString - - try file writeAll secret catch { - case e @ (_: FileNotFoundException | _: SecurityException) => - fatal("Cannot create file: %s".format(file.path)) - } + def setPort(port: Int): Unit = { + val file = portFile(port) + // 128 bits of delicious randomness, suitable for printing with println over a socket, + // and storage in a file -- see getPassword + val secretDigits = new BigInteger(128, new SecureRandom()).toString.getBytes("UTF-8") + + try OwnerOnlyChmod().chmodAndWrite(file.jfile, secretDigits) + catch chmodFailHandler(s"Cannot create file: ${file}") } /** Delete the port number to which a scala compile server was connected */ @@ -208,7 +196,7 @@ class CompileSocket extends CompileOutputCommon { def getPassword(port: Int): String = { val ff = portFile(port) - val f = ff.bufferedReader() + val f = ff.bufferedReader(Codec.UTF8) // allow some time for the server to start up def check = { @@ -223,6 +211,24 @@ class CompileSocket extends CompileOutputCommon { f.close() result } + + private def chmodFailHandler(msg: String): PartialFunction[Throwable, Unit] = { + case NonFatal(e) => + if (verbose) e.printStackTrace() + fatal(msg) + } + + def mkDaemonDir(name: String) = { + val dir = (scalacDir / name).createDirectory() + + if (dir.isDirectory && dir.canWrite) info(s"[Temp directory: $dir]") + else fatal(s"Could not create compilation daemon directory $dir") + + try OwnerOnlyChmod().chmod(dir.jfile) + catch chmodFailHandler(s"Failed to change permissions on $dir. The compilation daemon requires a secure directory; use -nc to disable the daemon.") + dir + } + } diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index 9c2db11a56ea..edfc095c7f75 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -38,8 +38,11 @@ class GenericRunnerSettings(error: String => Unit) extends Settings(error) { val nc = BooleanSetting( "-nc", - "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" + "do not use the fsc compilation daemon") withAbbreviation "-nocompdaemon" withPostSetHook((x: BooleanSetting) => {_useCompDaemon = !x.value }) @deprecated("Use `nc` instead", "2.9.0") def nocompdaemon = nc @deprecated("Use `save` instead", "2.9.0") def savecompiled = save + + private[this] var _useCompDaemon = true + def useCompDaemon: Boolean = _useCompDaemon } diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 55fd19671640..8b314ba0b82a 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -5,6 +5,8 @@ package scala.tools.nsc +import scala.tools.nsc.io.Path + /** Loads `compiler.properties` from the jar archive file. */ object Properties extends scala.util.PropertiesTrait { @@ -22,4 +24,7 @@ object Properties extends scala.util.PropertiesTrait { // derived values def isEmacsShell = propOrEmpty("env.emacs") != "" def fileEndings = fileEndingString.split("""\|""").toList + + // Where we keep fsc's state (ports/redirection) + lazy val scalacDir = (Path(Properties.userHome) / ".scalac").createDirectory(force = false) } diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 107c4b3df3dd..9af0079ffd6c 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -77,7 +77,10 @@ class ScriptRunner extends HasCompileSocket { val coreCompArgs = compSettings flatMap (_.unparse) val compArgs = coreCompArgs ++ List("-Xscript", scriptMain(settings), scriptFile) - CompileSocket getOrCreateSocket "" match { + // TODO: untangle this mess of top-level objects with their own little view of the mutable world of settings + compileSocket.verbose = settings.verbose.value + + compileSocket getOrCreateSocket "" match { case Some(sock) => compileOnServer(sock, compArgs) case _ => false } @@ -109,14 +112,23 @@ class ScriptRunner extends HasCompileSocket { settings.outdir.value = compiledPath.path - if (settings.nc.value) { - /** Setting settings.script.value informs the compiler this is not a - * self contained compilation unit. + // can't reliably lock down permissions on the portfile in this environment => disable by default. + // not the cleanest to do this here, but I don't see where else to decide this and emit the warning below + val cantLockdown = !settings.nc.isSetByUser && scala.util.Properties.isWin && !scala.util.Properties.isJavaAtLeast("7") + + if (cantLockdown) settings.nc.value = true + + if (!settings.useCompDaemon) { + /* Setting settings.script.value informs the compiler this is not a + * self contained compilation unit. */ settings.script.value = mainClass val reporter = new ConsoleReporter(settings) val compiler = newGlobal(settings, reporter) + if (cantLockdown) + reporter.echo("[info] The compilation daemon is disabled by default on this platform. To force its usage, use `-nocompdaemon:false`.") + new compiler.Run compile List(scriptFile) if (reporter.hasErrors) None else Some(compiledPath) } diff --git a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala b/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala index dddfb1b8f646..5467c0a61ef7 100644 --- a/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala +++ b/src/compiler/scala/tools/nsc/interpreter/session/FileBackedHistory.scala @@ -7,14 +7,37 @@ package scala.tools.nsc package interpreter package session -import scala.tools.nsc.io._ -import FileBackedHistory._ +import scala.reflect.internal.util.OwnerOnlyChmod +import scala.reflect.io.{File, Path} +import scala.tools.nsc.Properties.{propOrNone, userHome} +import scala.util.control.NonFatal /** TODO: file locking. */ trait FileBackedHistory extends JLineHistory with JPersistentHistory { def maxSize: Int - protected lazy val historyFile: File = defaultFile + + // For a history file in the standard location, always try to restrict permission, + // creating an empty file if none exists. + // For a user-specified location, only lock down permissions if we're the ones + // creating it, otherwise responsibility for permissions is up to the caller. + protected lazy val historyFile: File = File { + propOrNone("scala.shell.histfile").map(Path.apply) match { + case Some(p) => if (!p.exists) secure(p) else p + case None => secure(Path(userHome) / FileBackedHistory.defaultFileName) + } + } + + private def secure(p: Path): Path = { + try OwnerOnlyChmod().chmodOrCreateEmpty(p.jfile) + catch { case NonFatal(e) => + if (interpreter.isReplDebug) e.printStackTrace() + interpreter.replinfo(s"Warning: history file ${p}'s permissions could not be restricted to owner-only.") + } + + p + } + private var isPersistent = true locally { @@ -79,6 +102,5 @@ object FileBackedHistory { // val ContinuationNL: String = Array('\003', '\n').mkString import Properties.userHome - def defaultFileName = ".scala_history" - def defaultFile: File = File(Path(userHome) / defaultFileName) + final val defaultFileName = ".scala_history" } diff --git a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala index 1f6fa68f5726..0673fa1f758f 100644 --- a/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala +++ b/src/compiler/scala/tools/nsc/util/ScalaClassLoader.scala @@ -3,19 +3,18 @@ * @author Paul Phillips */ -package scala.tools.nsc -package util - -import java.lang.{ ClassLoader => JClassLoader } -import java.lang.reflect.{ Constructor, Modifier, Method } -import java.io.{ File => JFile } -import java.net.{ URLClassLoader => JURLClassLoader } -import java.net.URL -import scala.reflect.runtime.ReflectionUtils.unwrapHandler -import ScalaClassLoader._ -import scala.util.control.Exception.{ catching } +package scala.tools.nsc.util + +import java.io.{File => JFile} +import java.lang.reflect.{Constructor, Modifier} +import java.lang.{ClassLoader => JClassLoader} +import java.net.{URL, URLClassLoader => JURLClassLoader} + import scala.language.implicitConversions -import scala.reflect.{ ClassTag, classTag } +import scala.reflect.runtime.ReflectionUtils.unwrapHandler +import scala.reflect.{ClassTag, classTag} +import scala.tools.nsc.io.Streamable +import scala.util.control.Exception.catching trait HasClassPath { def classPathURLs: Seq[URL] @@ -25,6 +24,8 @@ trait HasClassPath { * of java reflection. */ trait ScalaClassLoader extends JClassLoader { + import ScalaClassLoader._ + /** Executing an action with this classloader as context classloader */ def asContext[T](action: => T): T = { val saved = contextLoader @@ -52,7 +53,7 @@ trait ScalaClassLoader extends JClassLoader { /** The actual bytes for a class file, or an empty array if it can't be found. */ def classBytes(className: String): Array[Byte] = classAsStream(className) match { case null => Array() - case stream => io.Streamable.bytes(stream) + case stream => Streamable.bytes(stream) } /** An InputStream representing the given class name, or null if not found. */ diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala new file mode 100644 index 000000000000..c0da65db3873 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -0,0 +1,107 @@ +/* NSC -- new Scala compiler + * Copyright 2017 LAMP/EPFL + * @author Martin Odersky + */ +package scala.reflect.internal.util + +import java.io.{File, FileOutputStream, IOException} + + +trait OwnerOnlyChmod { + /** Remove group/other permissions for `file`, it if exists */ + def chmod(file: java.io.File): Unit + + /** Delete `file` if it exists, recreate it with no group/other permissions, and write `contents` */ + final def chmodAndWrite(file: File, contents: Array[Byte]): Unit = { + file.delete() + val fos = new FileOutputStream(file) + fos.close() + chmod(file) + val fos2 = new FileOutputStream(file) + try { + fos2.write(contents) + } finally { + fos2.close() + } + } + + // TODO: use appropriate NIO call instead of two-step exists?/create! + final def chmodOrCreateEmpty(file: File): Unit = + if (!file.exists()) chmodAndWrite(file, Array[Byte]()) else chmod(file) + +} + +object OwnerOnlyChmod { + def apply(): OwnerOnlyChmod = { + if (!util.Properties.isWin) Java6UnixChmod + else if (util.Properties.isJavaAtLeast("7")) new NioAclChmodReflective + else NoOpOwnerOnlyChmod + } +} + +object NoOpOwnerOnlyChmod extends OwnerOnlyChmod { + override def chmod(file: File): Unit = () +} + + +/** Adjust permissions with `File.{setReadable, setWritable}` */ +object Java6UnixChmod extends OwnerOnlyChmod { + + def chmod(file: File): Unit = if (file.exists()) { + def clearAndSetOwnerOnly(f: (Boolean, Boolean) => Boolean): Unit = { + def fail() = throw new IOException("Unable to modify permissions of " + file) + // attribute = false, ownerOnly = false + if (!f(false, false)) fail() + // attribute = true, ownerOnly = true + if (!f(true, true)) fail() + } + if (file.isDirectory) { + clearAndSetOwnerOnly(file.setExecutable) + } + clearAndSetOwnerOnly(file.setReadable) + clearAndSetOwnerOnly(file.setWritable) + } +} + + +object NioAclChmodReflective { + val file_toPath = classOf[java.io.File].getMethod("toPath") + val files = Class.forName("java.nio.file.Files") + val path_class = Class.forName("java.nio.file.Path") + val getFileAttributeView = files.getMethod("getFileAttributeView", path_class, classOf[Class[_]], Class.forName("[Ljava.nio.file.LinkOption;")) + val linkOptionEmptyArray = java.lang.reflect.Array.newInstance(Class.forName("java.nio.file.LinkOption"), 0) + val aclFileAttributeView_class = Class.forName("java.nio.file.attribute.AclFileAttributeView") + val aclEntry_class = Class.forName("java.nio.file.attribute.AclEntry") + val aclEntryBuilder_class = Class.forName("java.nio.file.attribute.AclEntry$Builder") + val newBuilder = aclEntry_class.getMethod("newBuilder") + val aclEntryBuilder_build = aclEntryBuilder_class.getMethod("build") + val userPrinciple_class = Class.forName("java.nio.file.attribute.UserPrincipal") + val setPrincipal = aclEntryBuilder_class.getMethod("setPrincipal", userPrinciple_class) + val setPermissions = aclEntryBuilder_class.getMethod("setPermissions", Class.forName("[Ljava.nio.file.attribute.AclEntryPermission;")) + val aclEntryType_class = Class.forName("java.nio.file.attribute.AclEntryType") + val setType = aclEntryBuilder_class.getMethod("setType", aclEntryType_class) + val aclEntryPermission_class = Class.forName("java.nio.file.attribute.AclEntryPermission") + val aclEntryPermissionValues = aclEntryPermission_class.getDeclaredMethod("values") + val aclEntryType_ALLOW = aclEntryType_class.getDeclaredField("ALLOW") +} + +/** Reflective version of `NioAclChmod` */ +final class NioAclChmodReflective extends OwnerOnlyChmod { + import NioAclChmodReflective._ + def chmod(file: java.io.File): Unit = { + val path = file_toPath.invoke(file) + val view = getFileAttributeView.invoke(null, path, aclFileAttributeView_class, linkOptionEmptyArray) + val setAcl = aclFileAttributeView_class.getMethod("setAcl", classOf[java.util.List[_]]) + val getOwner = aclFileAttributeView_class.getMethod("getOwner") + val owner = getOwner.invoke(view) + setAcl.invoke(view, acls(owner)) + } + + private def acls(owner: Object) = { + val builder = newBuilder.invoke(null) + setPrincipal.invoke(builder, owner) + setPermissions.invoke(builder, aclEntryPermissionValues.invoke(null)) + setType.invoke(builder, aclEntryType_ALLOW.get(null)) + java.util.Collections.singletonList(aclEntryBuilder_build.invoke(builder)) + } +} From d8b6c9985156c132dece3164498fbaf0b1393264 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 13 Oct 2017 08:10:14 -0700 Subject: [PATCH 1097/2793] Typesafe -> Lightbend, 2013 -> 2017 for 2.10.7 --- CONTRIBUTING.md | 4 ++-- README.rst | 2 +- build.xml | 2 +- docs/LICENSE | 4 ++-- project/Versions.scala | 2 +- src/build/maven/continuations-plugin-pom.xml | 4 ++-- src/build/maven/jline-pom.xml | 4 ++-- src/build/maven/scala-actors-pom.xml | 4 ++-- src/build/maven/scala-compiler-pom.xml | 4 ++-- src/build/maven/scala-library-pom.xml | 4 ++-- src/build/maven/scala-partest-pom.xml | 4 ++-- src/build/maven/scala-reflect-pom.xml | 4 ++-- src/build/maven/scala-swing-pom.xml | 4 ++-- src/build/maven/scalap-pom.xml | 4 ++-- src/compiler/scala/tools/nsc/doc/Settings.scala | 2 +- src/compiler/scala/tools/nsc/doc/html/page/Template.scala | 2 +- 16 files changed, 27 insertions(+), 27 deletions(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 53d245331470..3aa9dc17669f 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,4 @@ # No future Scala 2.10.x releases are planned. -[Scala 2.10.5](https://github.com/scala/scala/releases/v2.10.5) concluded this series. +[Scala 2.10.6](https://github.com/scala/scala/releases/v2.10.6) concluded this series. -We encourage you to target 2.11.x or 2.12.x instead. If you're feeling nostalgic, check out the [the 2.10.x contribution guidelines](https://github.com/scala/scala/blob/v2.10.5/CONTRIBUTING.md)! +We encourage you to target 2.12.x or 2.13.x instead. If you're feeling nostalgic, check out the [the 2.10.x contribution guidelines](https://github.com/scala/scala/blob/v2.10.5/CONTRIBUTING.md)! diff --git a/README.rst b/README.rst index 4ed283dd29c0..70415a52d166 100644 --- a/README.rst +++ b/README.rst @@ -191,7 +191,7 @@ In detail: http://github.com/scala/scala If you are interested in contributing code, we ask you to sign the -[Scala Contributor License Agreement](http://typesafe.com/contribute/cla/scala), +[Scala Contributor License Agreement](https://www.lightbend.com/contribute/cla/scala), which allows us to ensure that all code submitted to the project is unencumbered by copyrights or patents. diff --git a/build.xml b/build.xml index a54b033b019a..95c748834db2 100644 --- a/build.xml +++ b/build.xml @@ -150,7 +150,7 @@ TODO: - + ${appliedType(samTyCon, targs)} by ${fun.tpe} <:< $samInfoWithTVars --> $targs for $tparams") + debuglog(s"sam infer: $pt --> ${appliedType(samTyCon, targs)} by ${fun.tpe} <:< $samInfoWithTVars --> $targs for $tparams") - val ptFullyDefined = appliedType(samTyCon, targs) - if (ptFullyDefined <:< pt && fullyDefinedMeetsExpectedFunTp(ptFullyDefined)) { - debuglog(s"sam fully defined expected type: $ptFullyDefined from $pt for ${fun.tpe}") - ptFullyDefined - } else { - debuglog(s"Could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)") - NoType + val ptFullyDefined = appliedType(samTyCon, targs) + if (ptFullyDefined <:< pt && fullyDefinedMeetsExpectedFunTp(ptFullyDefined)) { + debuglog(s"sam fully defined expected type: $ptFullyDefined from $pt for ${fun.tpe}") + ptFullyDefined + } else { + debuglog(s"Could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)") + NoType + } + } catch { + case e@(_: NoInstance | _: TypeError) => + debuglog(s"Error during SAM synthesis: could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)\n$e") + NoType + } + + if (samTp eq NoType) false + else { + /* Make a synthetic class symbol to represent the synthetic class that + * will be spun up by LMF for this function. This is necessary because + * it's possible that the SAM method might need bridges, and they have + * to go somewhere. Erasure knows to compute bridges for these classes + * just as if they were real templates extending the SAM type. */ + val synthCls = fun.symbol.owner.newClassWithInfo( + name = tpnme.ANON_CLASS_NAME, + parents = ObjectTpe :: samTp :: Nil, + scope = newScope, + pos = sam.pos, + newFlags = SYNTHETIC | ARTIFACT + ) + + synthCls.info.decls.enter { + val newFlags = (sam.flags & ~DEFERRED) | SYNTHETIC + sam.cloneSymbol(synthCls, newFlags).setInfo(samTp memberInfo sam) + } + + fun.setType(samTp) + + /* Arguably I should do `fun.setSymbol(samCls)` rather than leaning + * on an attachment, but doing that confounds lambdalift's free var + * analysis in a way which does not seem to be trivially reparable. */ + fun.updateAttachment(SAMFunction(samTp, sam, synthCls)) + + true } - } catch { - case e@(_: NoInstance | _: TypeError) => - debuglog(s"Error during SAM synthesis: could not define type $pt using ${fun.tpe} <:< ${pt memberInfo sam} (for $sam)\n$e") - NoType - }, sam) + } + case _ => false } /** Type check a function literal. diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index dfca57970743..76e64ccda983 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -51,10 +51,11 @@ trait StdAttachments { * * @param samTp the expected type that triggered sam conversion (may be a subtype of the type corresponding to sam's owner) * @param sam the single abstract method implemented by the Function we're attaching this to + * @param synthCls the (synthetic) class representing the eventual implementation class (spun at runtime by LMF on the JVM) * * @since 2.12.0-M4 */ - case class SAMFunction(samTp: Type, sam: Symbol) extends PlainAttachment + case class SAMFunction(samTp: Type, sam: Symbol, synthCls: Symbol) extends PlainAttachment case object DelambdafyTarget extends PlainAttachment diff --git a/test/files/jvm/t10512a.flags b/test/files/jvm/t10512a.flags new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/files/jvm/t10512a.scala b/test/files/jvm/t10512a.scala new file mode 100644 index 000000000000..a91eee80e6d7 --- /dev/null +++ b/test/files/jvm/t10512a.scala @@ -0,0 +1,43 @@ +trait JsonValue +class JsonObject extends JsonValue +class JsonString extends JsonValue + +trait JsonEncoder[A] { + def encode(value: A): JsonValue +} + +trait JsonObjectEncoder[A] extends JsonEncoder[A] { + def encode(value: A): JsonObject +} + +object JsonEncoderInstances { + + val seWorks: JsonEncoder[String] = + new JsonEncoder[String] { + def encode(value: String) = new JsonString + } + + implicit val stringEncoder: JsonEncoder[String] = + s => new JsonString + //new JsonEncoder[String] { + // def encode(value: String) = new JsonString + //} + + def leWorks[A](implicit encoder: JsonEncoder[A]): JsonObjectEncoder[List[A]] = + new JsonObjectEncoder[List[A]] { + def encode(value: List[A]) = new JsonObject + } + + implicit def listEncoder[A](implicit encoder: JsonEncoder[A]): JsonObjectEncoder[List[A]] = + l => new JsonObject +// new JsonObjectEncoder[List[A]] { +// def encode(value: List[A]) = new JsonObject +// } + +} + +object Test extends App { + import JsonEncoderInstances._ + + implicitly[JsonEncoder[List[String]]].encode("" :: Nil) +} \ No newline at end of file diff --git a/test/files/jvm/t10512b.scala b/test/files/jvm/t10512b.scala new file mode 100644 index 000000000000..6429ce2d80a7 --- /dev/null +++ b/test/files/jvm/t10512b.scala @@ -0,0 +1,54 @@ +trait A +trait B extends A +trait C extends B +object it extends C + +/* try as many weird diamondy things as I can think of */ +trait SAM_A { def apply(): A } +trait SAM_A1 extends SAM_A { def apply(): A } +trait SAM_B extends SAM_A1 { def apply(): B } +trait SAM_B1 extends SAM_A1 { def apply(): B } +trait SAM_B2 extends SAM_B with SAM_B1 +trait SAM_C extends SAM_B2 { def apply(): C } + +trait SAM_F extends (() => A) with SAM_C +trait SAM_F1 extends (() => C) with SAM_F + + +object Test extends App { + + val s1: SAM_A = () => it + val s2: SAM_A1 = () => it + val s3: SAM_B = () => it + val s4: SAM_B1 = () => it + val s5: SAM_B2 = () => it + val s6: SAM_C = () => it + val s7: SAM_F = () => it + val s8: SAM_F1 = () => it + + (s1(): A) + + (s2(): A) + + (s3(): B) + (s3(): A) + + (s4(): B) + (s4(): A) + + (s5(): B) + (s5(): A) + + (s6(): C) + (s6(): B) + (s6(): A) + + (s7(): C) + (s7(): B) + (s7(): A) + + (s8(): C) + (s8(): B) + (s8(): A) + +} From 8238f983891860bc3403ca5ba7897b58578cce2e Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 29 Nov 2017 23:37:51 +0000 Subject: [PATCH 1150/2793] initialise ClassBType info field as part of construction ban external mutation of info provide memory barriers for late initialised info field --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 41 ++++++++--- .../nsc/backend/jvm/BTypesFromClassfile.scala | 26 +++---- .../nsc/backend/jvm/BTypesFromSymbols.scala | 68 +++++++++---------- 3 files changed, 76 insertions(+), 59 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index f0ceed826448..18930ec247c6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -607,7 +607,7 @@ abstract class BTypes { * a missing info. In order not to crash the compiler unnecessarily, the inliner does not force * infos using `get`, but it reports inliner warnings for missing infos that prevent inlining. */ - final case class ClassBType(internalName: InternalName)(cache: mutable.Map[InternalName, ClassBType]) extends RefBType { + final class ClassBType private (val internalName: InternalName) extends RefBType { /** * Write-once variable allows initializing a cyclic graph of infos. This is required for * nested classes. Example: for the definition `class A { class B }` we have @@ -615,21 +615,20 @@ abstract class BTypes { * B.info.nestedInfo.outerClass == A * A.info.nestedClasses contains B */ - private var _info: Either[NoClassBTypeInfo, ClassInfo] = null + // volatile is required to ensure no early initialisation in apply + // like classic double checked lock in java + @volatile private var _info: Either[NoClassBTypeInfo, ClassInfo] = null def info: Either[NoClassBTypeInfo, ClassInfo] = { + if (_info eq null) + // synchronization required to ensure the apply is finished + // which populates info. ClassBType doesnt escape apart from via the map + // and the object mutex is locked prior to insertion. See apply + this.synchronized() assert(_info != null, s"ClassBType.info not yet assigned: $this") _info } - def info_=(i: Either[NoClassBTypeInfo, ClassInfo]): Unit = { - assert(_info == null, s"Cannot set ClassBType.info multiple times: $this") - _info = i - checkInfoConsistency() - } - - cache(internalName) = this - private def checkInfoConsistency(): Unit = { if (info.isLeft) return @@ -783,6 +782,15 @@ abstract class BTypes { } while (fcs == null) fcs } + + // equallity and hashcode is based on internalName + override def equals(obj: scala.Any): Boolean = obj match { + case o:ClassBType => internalName == o.internalName + case _ => false + } + + // equallity and hashcode is based on internalName + override def hashCode(): Int = internalName.hashCode } object ClassBType { @@ -804,6 +812,19 @@ abstract class BTypes { "scala/Null", "scala/Nothing" ) + def unapply(cr:ClassBType) = Some(cr.internalName) + + def apply(internalName: InternalName, cache: mutable.Map[InternalName, ClassBType])(init: (ClassBType) => Either[NoClassBTypeInfo, ClassInfo]) = { + val res = new ClassBType(internalName) + // synchronized s required to ensure proper initialisation if info. + // see comment on def info + res.synchronized { + cache(internalName) = res + res._info = init(res) + res.checkInfoConsistency() + } + res + } } /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index c120fbf62ce9..da27d29b62b6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -46,25 +46,28 @@ abstract class BTypesFromClassfile { * be found in the `byteCodeRepository`, the `info` of the resulting ClassBType is undefined. */ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = { - cachedClassBType(internalName).getOrElse({ - val res = ClassBType(internalName)(classBTypeCacheFromClassfile) - byteCodeRepository.classNode(internalName) match { - case Left(msg) => res.info = Left(NoClassBTypeInfoMissingBytecode(msg)); res - case Right(c) => setClassInfoFromClassNode(c, res) + cachedClassBType(internalName).getOrElse{ + ClassBType(internalName, classBTypeCacheFromClassfile){ res:ClassBType => + byteCodeRepository.classNode(internalName) match { + case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) + case Right(c) => computeClassInfoFromClassNode(c, res) + } } - }) + } } /** * Construct the [[ClassBType]] for a parsed classfile. */ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = { - cachedClassBType(classNode.name).getOrElse({ - setClassInfoFromClassNode(classNode, ClassBType(classNode.name)(classBTypeCacheFromClassfile)) - }) + cachedClassBType(classNode.name).getOrElse { + ClassBType(classNode.name, classBTypeCacheFromClassfile) { res: ClassBType => + computeClassInfoFromClassNode(classNode, res) + } + } } - private def setClassInfoFromClassNode(classNode: ClassNode, classBType: ClassBType): ClassBType = { + private def computeClassInfoFromClassNode(classNode: ClassNode, classBType: ClassBType): Right[Nothing, ClassInfo] = { val superClass = classNode.superName match { case null => assert(classNode.name == ObjectRef.internalName, s"class with missing super type: ${classNode.name}") @@ -119,8 +122,7 @@ abstract class BTypesFromClassfile { val interfaces: List[ClassBType] = classNode.interfaces.asScala.map(classBTypeFromParsedClassfile)(collection.breakOut) - classBType.info = Right(ClassInfo(superClass, interfaces, flags, Lazy.withoutLock(nestedClasses), Lazy.withoutLock(nestedInfo), inlineInfo)) - classBType + Right(ClassInfo(superClass, interfaces, flags, Lazy.withoutLock(nestedClasses), Lazy.withoutLock(nestedInfo), inlineInfo)) } /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 8f3500070def..3376d4253020 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -99,14 +99,12 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(classBTypeCacheFromSymbol.contains(internalName), s"ClassBType for class being compiled was already created from a classfile: ${classSym.fullName}") bType case None => - // The new ClassBType is added to the map in its constructor, before we set its info. This + // The new ClassBType is added to the map via its apply, before we set its info. This // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. - val res = ClassBType(internalName)(classBTypeCacheFromSymbol) - if (completeSilentlyAndCheckErroneous(classSym)) { - res.info = Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) - res - } else { - setClassInfo(classSym, res) + ClassBType(internalName, classBTypeCacheFromSymbol) { res:ClassBType => + if (completeSilentlyAndCheckErroneous(classSym)) + Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) + else computeClassInfo(classSym, res) } } } @@ -261,7 +259,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { r })(collection.breakOut) - private def setClassInfo(classSym: Symbol, classBType: ClassBType): ClassBType = { + private def computeClassInfo(classSym: Symbol, classBType: ClassBType): Right[Nothing, ClassInfo] = { /** * Reconstruct the classfile flags from a Java defined class symbol. * @@ -437,8 +435,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val inlineInfo = buildInlineInfo(classSym, classBType.internalName) - classBType.info = Right(ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo, inlineInfo)) - classBType + Right(ClassInfo(superClass, interfaces, flags, nestedClasses, nestedInfo, inlineInfo)) } private def isEmptyNestedInfo(innerClassSym: Symbol): Boolean = { assert(innerClassSym.isClass, s"Cannot build NestedInfo for non-class symbol $innerClassSym") @@ -626,37 +623,34 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = { assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym") val internalName = moduleClassSym.javaBinaryNameString.stripSuffix(nme.MODULE_SUFFIX_STRING) - cachedClassBType(internalName).getOrElse({ - val c = ClassBType(internalName)(classBTypeCacheFromSymbol) - - val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) - // class info consistent with BCodeHelpers.genMirrorClass - val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) - - c.info = Right(ClassInfo( - superClass = Some(ObjectRef), - interfaces = Nil, - flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, - nestedClasses = nested, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class - c - }) + cachedClassBType(internalName).getOrElse { + ClassBType(internalName, classBTypeCacheFromSymbol) { c: ClassBType => + val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) + val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) + Right(ClassInfo( + superClass = Some(ObjectRef), + interfaces = Nil, + flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, + nestedClasses = nested, + nestedInfo = Lazy.eagerNone, + inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class + } + } } def beanInfoClassClassBType(mainClass: Symbol): ClassBType = { val internalName = mainClass.javaBinaryNameString + "BeanInfo" - cachedClassBType(internalName).getOrElse({ - val c = ClassBType(internalName)(classBTypeCacheFromSymbol) - c.info = Right(ClassInfo( - superClass = Some(sbScalaBeanInfoRef), - interfaces = Nil, - flags = javaFlags(mainClass), - nestedClasses = Lazy.eagerNil, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo)) - c - }) + cachedClassBType(internalName).getOrElse { + ClassBType(internalName, classBTypeCacheFromSymbol) { c: ClassBType => + Right(ClassInfo( + superClass = Some(sbScalaBeanInfoRef), + interfaces = Nil, + flags = javaFlags(mainClass), + nestedClasses = Lazy.eagerNil, + nestedInfo = Lazy.eagerNone, + inlineInfo = EmptyInlineInfo)) + } + } } /** From c879562419b127cb50c1a278632a56716bcc0ecf Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 29 Nov 2017 23:50:42 +0000 Subject: [PATCH 1151/2793] minor memory and inlining improvements --- src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala | 5 ++++- src/reflect/scala/reflect/internal/util/Statistics.scala | 2 +- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index f0ceed826448..0f3cc8e3f6a9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1034,7 +1034,10 @@ abstract class BTypes { } } - def reInitialize(): Unit = frontendSynch(isInit = false) + def reInitialize(): Unit = frontendSynch{ + v = null.asInstanceOf[T] + isInit = false + } } } diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index 6e09bbbb5e2d..df8f5e78065a 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -298,7 +298,7 @@ quant) } /** Helper for measuring the overhead of a concrete thunk `body`. */ - final def timed[T](timer: Timer)(body: => T): T = { + @inline final def timed[T](timer: Timer)(body: => T): T = { val start = startTimer(timer) try body finally stopTimer(timer, start) } From 1b5b88373e9615e92ecc374fa86044db56347a53 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 30 Nov 2017 11:50:22 -0500 Subject: [PATCH 1152/2793] Expand check for value-class-wrapping-value-class errors to include parents. Refinement types were getting a pass here because the typeSymbol check didn't look at parents, so a type like `Any with X`, which erases to `X`, wasn't caught. Fixes scala/bug#10530. --- .../scala/tools/nsc/typechecker/Typers.scala | 3 ++- test/files/neg/t10530.check | 25 +++++++++++++++++++ test/files/neg/t10530.scala | 12 +++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t10530.check create mode 100644 test/files/neg/t10530.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d6d8e1e9bb45..a4f0acbded7c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1433,7 +1433,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Some(acc) if acc.isProtectedLocal => context.error(paramAccessor.pos, "value class parameter must not be protected[this]") case Some(acc) => - if (acc.tpe.typeSymbol.isDerivedValueClass) + /* check all base classes, since derived value classes might lurk in refinement parents */ + if (acc.tpe.typeSymbol.baseClasses exists (_.isDerivedValueClass)) context.error(acc.pos, "value class may not wrap another user-defined value class") checkEphemeral(clazz, body filterNot (stat => stat.symbol != null && stat.symbol.accessedOrSelf == paramAccessor)) } diff --git a/test/files/neg/t10530.check b/test/files/neg/t10530.check new file mode 100644 index 000000000000..3bf79a71ee18 --- /dev/null +++ b/test/files/neg/t10530.check @@ -0,0 +1,25 @@ +t10530.scala:1: error: value class may not wrap another user-defined value class +class X(val u: Any with X) extends AnyVal + ^ +t10530.scala:2: error: value class may not wrap another user-defined value class +class Y(val u: Y with Y) extends AnyVal + ^ +t10530.scala:3: error: value class may not wrap another user-defined value class +class Z(val u: Z with String) extends AnyVal + ^ +t10530.scala:4: error: value class may not wrap another user-defined value class +class U(val u: U with Int) extends AnyVal + ^ +t10530.scala:6: error: value class may not wrap another user-defined value class +class W(val u: Z with U) extends AnyVal + ^ +t10530.scala:7: error: value class may not wrap another user-defined value class +class R(val u: Z {}) extends AnyVal + ^ +t10530.scala:9: error: value class may not wrap another user-defined value class +class Q(val u: AnyRef with X) extends AnyVal + ^ +t10530.scala:12: error: value class may not wrap another user-defined value class +class B[T <: A](val a: T) extends AnyVal + ^ +8 errors found diff --git a/test/files/neg/t10530.scala b/test/files/neg/t10530.scala new file mode 100644 index 000000000000..4c971c2d65ca --- /dev/null +++ b/test/files/neg/t10530.scala @@ -0,0 +1,12 @@ +class X(val u: Any with X) extends AnyVal +class Y(val u: Y with Y) extends AnyVal +class Z(val u: Z with String) extends AnyVal +class U(val u: U with Int) extends AnyVal + +class W(val u: Z with U) extends AnyVal +class R(val u: Z {}) extends AnyVal + +class Q(val u: AnyRef with X) extends AnyVal + +class A(val a: Int) extends AnyVal +class B[T <: A](val a: T) extends AnyVal \ No newline at end of file From 961041f260e2479f28de39e6acdbd4386ac2100b Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 17 Nov 2017 15:42:11 +0100 Subject: [PATCH 1153/2793] [backport] Update ASM to 6.0 (cherry picked from commit 9cadd50e5a787d84be95c0f85fb62231b2f30a40) --- test/files/run/t10594.scala | 132 ++++++++++++++++++++++++++++++++++++ versions.properties | 2 +- 2 files changed, 133 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t10594.scala diff --git a/test/files/run/t10594.scala b/test/files/run/t10594.scala new file mode 100644 index 000000000000..9c9ea0eb2713 --- /dev/null +++ b/test/files/run/t10594.scala @@ -0,0 +1,132 @@ +class C { + var x = 0 + + def m(): Unit = x += 1 + + def t(b: Boolean): Unit = { + if (b) { + m() + } + + if (b) { + // 10*100 invocations + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 1k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 2k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 3k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 4k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 5k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 6k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 7k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 8k + + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m();m() + // 8200 + m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m() + m();m();m();m();m();m();m();m();m();m() + // 8270 + m();m() + // 8272 + } + } +} + +object Test { + def main(args: Array[String]): Unit = { + val c = new C + c.t(true) + assert(c.x == 8273) + } +} diff --git a/versions.properties b/versions.properties index 2cb8aa78f40a..b1d884356274 100644 --- a/versions.properties +++ b/versions.properties @@ -33,7 +33,7 @@ scala-swing.version.number=1.0.2 akka-actor.version.number=2.3.16 actors-migration.version.number=1.1.0 jline.version=2.14.3 -scala-asm.version=5.2.0-scala-2 +scala-asm.version=6.0.0-scala-1 # external modules, used internally (not shipped) partest.version.number=1.0.16 From 99b4253de7c8a265a2a5a66aa56e3da1aacead72 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 8 Dec 2016 09:15:01 +1000 Subject: [PATCH 1154/2793] [backport] S-10098 Fix regression in Unix runner script with JAVA_HOME unset Rework bfa7ade0 to unconditionally set the system property with the contents of the bootclasspath, rather than trying to do this only for JVM 9+. The attempted JVM version detection code assumed JAVA_HOME was set, which isn't always the case. (cherry picked from commit 60ea98e412de0a2a9f631a5b4b048107e0b251f0) --- .../scala/tools/ant/templates/tool-unix.tmpl | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl index b5a238f7be6d..70ae9af444e0 100755 --- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl @@ -197,14 +197,11 @@ fi # to java to suppress "." from materializing. if [[ "$usebootcp" == "true" ]]; then classpath_args=("-Xbootclasspath/a:$TOOL_CLASSPATH" -classpath "\"\"") - # Note that the version numbers go 1.7, 1.8, 9, 10, ... - java_release="$(cat $JAVA_HOME/release | grep JAVA_VERSION)" - if [[ ! "$java_release" =~ JAVA_VERSION=\"1\. ]]; then - # Java 9 removed sun.boot.class.path, and the supposed replacement to at least see - # the appended boot classpath (jdk.boot.class.path.append) is not visible. - # So we have to pass a custom system property that PathResolver will find. - classpath_args+=("-Dscala.boot.class.path=$TOOL_CLASSPATH") - fi + # Java 9 removed sun.boot.class.path, and the supposed replacement to at least see + # the appended boot classpath (jdk.boot.class.path.append) is not visible. + # So we have to pass a custom system property that PathResolver will find. + # We do this for all JVM versions, rather than getting into the business of JVM version detection. + classpath_args+=("-Dscala.boot.class.path=$TOOL_CLASSPATH") else classpath_args=(-classpath "$TOOL_CLASSPATH") fi From 0cddad7946b3f96aaa35e780ffb10e64350509ae Mon Sep 17 00:00:00 2001 From: Jasper Moeys Date: Tue, 25 Jul 2017 17:12:08 +0200 Subject: [PATCH 1155/2793] Deprecate PartialFunction.apply PartialFunction.apply causes confusion because at first glance it looks like a general purpose factory method for creating PartialFunctions, but it is only meant to convert ordinary to partial functions (with `pf.isDefinedAt(x) == true` for all x). When used in the wrong way it can have confusing semantics. --- src/library/scala/PartialFunction.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index c1a413d516fa..c054e001d418 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -245,6 +245,7 @@ object PartialFunction { /** Converts ordinary function to partial one * @since 2.10 */ + @deprecated("""For converting an ordinary function f to a partial function pf, use `val pf: PartialFunction[A, B] = { case x => f(x) }`. For creating a new PartialFunction, use an explicit type annotation instead, like in `val pf: PartialFunction[Int, String] = { case 1 => "one" }`.""", "2.12.5") def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) } private[this] val constFalse: Any => Boolean = { _ => false} From 3e28d97e676f0bf50514fc65d3b0cef7e885da80 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 1 Dec 2017 14:17:55 -0800 Subject: [PATCH 1156/2793] -Ywarn-unused ignores filter of refutable patterns The call has the form, `qual.withFilter(check$refutable => body)` where the body is `{ case mypat => true }` which must always spuriously warn about any pattern variables. --- .../tools/nsc/typechecker/TypeDiagnostics.scala | 17 ++++++++++------- test/files/pos/t10394.flags | 1 + test/files/pos/t10394.scala | 4 ++++ 3 files changed, 15 insertions(+), 7 deletions(-) create mode 100644 test/files/pos/t10394.flags create mode 100644 test/files/pos/t10394.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 38fcdccdc15c..baf36e56b5b9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -501,6 +501,7 @@ trait TypeDiagnostics { override def traverse(t: Tree): Unit = { val sym = t.symbol + var bail = false t match { case m: MemberDef if qualifies(t.symbol) => defnTrees += m @@ -508,22 +509,24 @@ trait TypeDiagnostics { case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => if (sym.isPrimaryConstructor) for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa - else if (sym.isSynthetic && sym.isImplicit) return + else if (sym.isSynthetic && sym.isImplicit) bail = true else if (!sym.isConstructor) for (vs <- vparamss) params ++= vs.map(_.symbol) case _ => } - case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars - => pat.foreach { - // TODO don't warn in isDefinedAt of $anonfun - case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol - case _ => - } + case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars => + pat.foreach { + case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol + case _ => + } case _: RefTree if sym ne null => targets += sym case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol case Bind(_, _) if atBounded(t) => atBounds += sym + case Apply(Select(_, nme.withFilter), Function(vparams, _) :: Nil) => + bail = vparams.exists(_.name startsWith nme.CHECK_IF_REFUTABLE_STRING) case _ => } + if (bail) return if (t.tpe ne null) { for (tp <- t.tpe if !treeTypes(tp)) { // Include references to private/local aliases (which might otherwise refer to an enclosing class) diff --git a/test/files/pos/t10394.flags b/test/files/pos/t10394.flags new file mode 100644 index 000000000000..437ae36b0ea3 --- /dev/null +++ b/test/files/pos/t10394.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Ywarn-unused:patvars diff --git a/test/files/pos/t10394.scala b/test/files/pos/t10394.scala new file mode 100644 index 000000000000..091fa5bc8d17 --- /dev/null +++ b/test/files/pos/t10394.scala @@ -0,0 +1,4 @@ + +trait T { + def f = for (i: Int <- List(42)) yield i +} From bf61c1bd7973556136043bb63594d34cc33bb11b Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 30 Nov 2017 00:56:01 -0800 Subject: [PATCH 1157/2793] Class literal is a usage Notice ConstantType and record it under -Ywarn-unused. --- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 3 +++ test/files/neg/warn-unused-privates.scala | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index baf36e56b5b9..07f7271e9cba 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -540,6 +540,9 @@ trait TypeDiagnostics { case NullaryMethodType(_) => case MethodType(_, _) => case SingleType(_, _) => + case ConstantType(Constant(k: Type)) => + log(s"classOf $k referenced from $currentOwner") + treeTypes += k case _ => log(s"${if (isAlias) "alias " else ""}$tp referenced from $currentOwner") treeTypes += tp diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala index a2b78d29d2d2..4640f80d365b 100644 --- a/test/files/neg/warn-unused-privates.scala +++ b/test/files/neg/warn-unused-privates.scala @@ -231,3 +231,8 @@ class `nonprivate alias is enclosing` { type C2 = C private class D extends C2 // warn } + +object `classof something` { + private class intrinsically + def f = classOf[intrinsically].toString() +} From 0d352b385e3dd11be3a57b44552655ac838bc848 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 1 Dec 2017 20:32:20 -0800 Subject: [PATCH 1158/2793] Simplify warning for unset private var Although vars have setters, it's more uniform to report that the private var is not updated. (It's not possible that the setter is overriding a synthetic setter for a var, so there can be no ambiguity.) --- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 9 +++++---- test/files/neg/warn-unused-privates.check | 4 ++-- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 07f7271e9cba..905e0eed2015 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -574,7 +574,6 @@ trait TypeDiagnostics { && (m.isValueParameter || !ignoreNames(m.name.toTermName)) // serialization methods && !isConstantType(m.info.resultType) // subject to constant inlining && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar - //&& !(m.isVal && m.info.resultType =:= typeOf[Unit]) // Unit val is uninteresting ) def isUnusedParam(m: Symbol): Boolean = ( isUnusedTerm(m) @@ -632,6 +631,7 @@ trait TypeDiagnostics { unusedPrivates.traverse(body) if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { + val valAdvice = "is never updated: consider using immutable val" for (defn: DefTree <- unusedPrivates.unusedTerms) { val sym = defn.symbol val pos = ( @@ -643,6 +643,7 @@ trait TypeDiagnostics { } ) val why = if (sym.isPrivate) "private" else "local" + var cond = "is never used" val what = ( if (sym.isDefaultGetter) "default argument" else if (sym.isConstructor) "constructor" @@ -655,15 +656,15 @@ trait TypeDiagnostics { || sym.isGetter && (sym.accessed.isVal || (sym.owner.isTrait && sym.hasFlag(STABLE))) || sym.isLazy ) s"val ${sym.name.decoded}" - else if (sym.isSetter) s"setter of ${sym.name.getterName.decoded}" + else if (sym.isSetter) { cond = valAdvice ; s"var ${sym.name.getterName.decoded}" } else if (sym.isMethod) s"method ${sym.name.decoded}" else if (sym.isModule) s"object ${sym.name.decoded}" else "term" ) - context.warning(pos, s"$why $what in ${sym.owner} is never used") + context.warning(pos, s"$why $what in ${sym.owner} $cond") } for (v <- unusedPrivates.unsetVars) { - context.warning(v.pos, s"local var ${v.name} in ${v.owner} is never set: consider using immutable val") + context.warning(v.pos, s"local var ${v.name} in ${v.owner} ${valAdvice}") } for (t <- unusedPrivates.unusedTypes) { val sym = t.symbol diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index 10c9c1664b23..e83cfdebdee9 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -28,7 +28,7 @@ warn-unused-privates.scala:45: warning: private var v3 in trait Accessors is nev warn-unused-privates.scala:56: warning: private var s1 in class StableAccessors is never used private var s1: Int = 0 // warn ^ -warn-unused-privates.scala:57: warning: private setter of s2 in class StableAccessors is never used +warn-unused-privates.scala:57: warning: private var s2 in class StableAccessors is never updated: consider using immutable val private var s2: Int = 0 // warn, never set ^ warn-unused-privates.scala:58: warning: private var s3 in class StableAccessors is never used @@ -79,7 +79,7 @@ warn-unused-privates.scala:166: warning: local val x in method v is never used warn-unused-privates.scala:170: warning: local val x in method w is never used val D(x @ _) = d // warn, fixme (valdef pos is different) ^ -warn-unused-privates.scala:97: warning: local var x in method f2 is never set: consider using immutable val +warn-unused-privates.scala:97: warning: local var x in method f2 is never updated: consider using immutable val var x = 100 // warn about it being a var ^ warn-unused-privates.scala:104: warning: private class Bar1 in object Types is never used From 44748ae2fcc504250d059e5400613e2b5f1977da Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Sun, 3 Dec 2017 23:50:04 +0000 Subject: [PATCH 1159/2793] combine classBTypeCacheFromSymbol and classBTypeCacheFromClassfile --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 33 ++++++++++++------- .../nsc/backend/jvm/BTypesFromClassfile.scala | 4 +-- .../nsc/backend/jvm/BTypesFromSymbols.scala | 8 ++--- .../jvm/opt/BTypesFromClassfileTest.scala | 3 +- .../nsc/backend/jvm/opt/CallGraphTest.scala | 3 +- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 3 +- .../jvm/opt/InlinerIllegalAccessTest.scala | 3 +- .../nsc/backend/jvm/opt/InlinerTest.scala | 3 +- 8 files changed, 33 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 18930ec247c6..f436920fbcb2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -36,12 +36,12 @@ abstract class BTypes { * name. The method assumes that every class type that appears in the bytecode exists in the map */ def cachedClassBType(internalName: InternalName): Option[ClassBType] = - classBTypeCacheFromSymbol.get(internalName).orElse(classBTypeCacheFromClassfile.get(internalName)) + classBTypeCache.get(internalName) // Concurrent maps because stack map frames are computed when in the class writer, which // might run on multiple classes concurrently. - val classBTypeCacheFromSymbol: concurrent.Map[InternalName, ClassBType] = recordPerRunCache(FlatConcurrentHashMap.empty) - val classBTypeCacheFromClassfile: concurrent.Map[InternalName, ClassBType] = recordPerRunCache(FlatConcurrentHashMap.empty) + // Note usage should be private to this file, except for tests + val classBTypeCache: concurrent.Map[InternalName, ClassBType] = recordPerRunCache(FlatConcurrentHashMap.empty) /** * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType @@ -607,7 +607,8 @@ abstract class BTypes { * a missing info. In order not to crash the compiler unnecessarily, the inliner does not force * infos using `get`, but it reports inliner warnings for missing infos that prevent inlining. */ - final class ClassBType private (val internalName: InternalName) extends RefBType { + sealed abstract class ClassBType protected(val internalName: InternalName) extends RefBType { + def fromSymbol: Boolean /** * Write-once variable allows initializing a cyclic graph of infos. This is required for * nested classes. Example: for the definition `class A { class B }` we have @@ -814,18 +815,28 @@ abstract class BTypes { ) def unapply(cr:ClassBType) = Some(cr.internalName) - def apply(internalName: InternalName, cache: mutable.Map[InternalName, ClassBType])(init: (ClassBType) => Either[NoClassBTypeInfo, ClassInfo]) = { - val res = new ClassBType(internalName) + def apply(internalName: InternalName, fromSymbol: Boolean)(init: (ClassBType) => Either[NoClassBTypeInfo, ClassInfo]) = { + val newRes = if (fromSymbol) new ClassBTypeFromSymbol(internalName) else new ClassBTypeFromClassfile(internalName) // synchronized s required to ensure proper initialisation if info. // see comment on def info - res.synchronized { - cache(internalName) = res - res._info = init(res) - res.checkInfoConsistency() + newRes.synchronized { + classBTypeCache.putIfAbsent(internalName, newRes) match { + case None => + newRes._info = init(newRes) + newRes.checkInfoConsistency() + newRes + case Some(old) => + old + } } - res } } + private final class ClassBTypeFromSymbol(internalName: InternalName) extends ClassBType(internalName) { + override def fromSymbol: Boolean = true + } + private final class ClassBTypeFromClassfile(internalName: InternalName) extends ClassBType(internalName) { + override def fromSymbol: Boolean = false + } /** * The type info for a class. Used for symboltable-independent subtype checks in the backend. diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index da27d29b62b6..095e5911313a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -47,7 +47,7 @@ abstract class BTypesFromClassfile { */ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = { cachedClassBType(internalName).getOrElse{ - ClassBType(internalName, classBTypeCacheFromClassfile){ res:ClassBType => + ClassBType(internalName, false){ res:ClassBType => byteCodeRepository.classNode(internalName) match { case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) case Right(c) => computeClassInfoFromClassNode(c, res) @@ -61,7 +61,7 @@ abstract class BTypesFromClassfile { */ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = { cachedClassBType(classNode.name).getOrElse { - ClassBType(classNode.name, classBTypeCacheFromClassfile) { res: ClassBType => + ClassBType(classNode.name, false) { res: ClassBType => computeClassInfoFromClassNode(classNode, res) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 3376d4253020..c919c81a346c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -96,12 +96,12 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { cachedClassBType(internalName) match { case Some(bType) => if (currentRun.compiles(classSym)) - assert(classBTypeCacheFromSymbol.contains(internalName), s"ClassBType for class being compiled was already created from a classfile: ${classSym.fullName}") + assert(bType fromSymbol, s"ClassBType for class being compiled was already created from a classfile: ${classSym.fullName}") bType case None => // The new ClassBType is added to the map via its apply, before we set its info. This // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. - ClassBType(internalName, classBTypeCacheFromSymbol) { res:ClassBType => + ClassBType(internalName, true) { res:ClassBType => if (completeSilentlyAndCheckErroneous(classSym)) Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) else computeClassInfo(classSym, res) @@ -624,7 +624,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym") val internalName = moduleClassSym.javaBinaryNameString.stripSuffix(nme.MODULE_SUFFIX_STRING) cachedClassBType(internalName).getOrElse { - ClassBType(internalName, classBTypeCacheFromSymbol) { c: ClassBType => + ClassBType(internalName, true) { c: ClassBType => val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) Right(ClassInfo( @@ -641,7 +641,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def beanInfoClassClassBType(mainClass: Symbol): ClassBType = { val internalName = mainClass.javaBinaryNameString + "BeanInfo" cachedClassBType(internalName).getOrElse { - ClassBType(internalName, classBTypeCacheFromSymbol) { c: ClassBType => + ClassBType(internalName, true) { c: ClassBType => Right(ClassInfo( superClass = Some(sbScalaBeanInfoRef), interfaces = Nil, diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala index bedc9c0ef364..c93d7792dc1e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -30,8 +30,7 @@ class BTypesFromClassfileTest extends BytecodeTesting { } def clearCache() = { - bTypes.classBTypeCacheFromSymbol.clear() - bTypes.classBTypeCacheFromClassfile.clear() + bTypes.classBTypeCache.clear() } def sameBType(fromSym: ClassBType, fromClassfile: ClassBType, checked: Set[InternalName] = Set.empty): Set[InternalName] = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index a7bbaab55f77..4af8b317a833 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -24,8 +24,7 @@ class CallGraphTest extends BytecodeTesting { compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCacheFromSymbol, - bTypes.classBTypeCacheFromClassfile, + bTypes.classBTypeCache, postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses, postProcessor.callGraph.callsites)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index d39804d2b9e0..1f1eace35073 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -20,8 +20,7 @@ class InlineInfoTest extends BytecodeTesting { override def compilerArgs = "-opt:l:inline -opt-inline-from:**" compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCacheFromSymbol, - bTypes.classBTypeCacheFromClassfile, + bTypes.classBTypeCache, postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala index 76a5a3334bcf..f81ad5a4d40e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerIllegalAccessTest.scala @@ -26,8 +26,7 @@ class InlinerIllegalAccessTest extends BytecodeTesting { throw new AssertionError(textify(i)) def clearClassBTypeCaches(): Unit = { - classBTypeCacheFromSymbol.clear() - classBTypeCacheFromClassfile.clear() + classBTypeCache.clear() } @Test diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index c46164a6de7f..3688c7aada1d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -26,8 +26,7 @@ class InlinerTest extends BytecodeTesting { compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCacheFromSymbol, - bTypes.classBTypeCacheFromClassfile, + bTypes.classBTypeCache, postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses, postProcessor.callGraph.callsites)) From d1883fc3837fef17cb7e0d9a0fa137a9c8501340 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 4 Dec 2017 00:04:44 +0000 Subject: [PATCH 1160/2793] allow perRunCache to support java maps and collections directly --- .../jvm/PostProcessorFrontendAccess.scala | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 32c85f9bf699..4266988ff9a7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -5,6 +5,7 @@ import scala.collection.generic.Clearable import scala.reflect.internal.util.Position import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.jvm.BTypes.InternalName +import java.util.{Map => JMap, Collection => JCollection} /** * Functionality needed in the post-processor whose implementation depends on the compiler @@ -29,6 +30,10 @@ sealed abstract class PostProcessorFrontendAccess { def javaDefinedClasses: Set[InternalName] def recordPerRunCache[T <: Clearable](cache: T): T + + def recordPerRunJavaMapCache[T <: JMap[_,_]](cache: T): T + + def recordPerRunJavaCache[T <: JCollection[_]](cache: T): T } object PostProcessorFrontendAccess { @@ -163,5 +168,18 @@ object PostProcessorFrontendAccess { def recordPerRunCache[T <: Clearable](cache: T): T = frontendSynch(perRunCaches.recordCache(cache)) + + def recordPerRunJavaMapCache[T <: JMap[_,_]](cache: T): T = { + recordPerRunJavaCache(cache.keySet()) + cache + } + + def recordPerRunJavaCache[T <: JCollection[_]](cache: T): T = { + recordPerRunCache(new JavaClearable(cache)) + cache + } + private class JavaClearable(data: JCollection[_]) extends Clearable { + override def clear(): Unit = data.clear + } } } \ No newline at end of file From b683c720f1478e563201de5f82edff2bc852c467 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Sun, 19 Nov 2017 22:29:37 +0000 Subject: [PATCH 1161/2793] [backport] improve benchmarking of multi-threaded compilation - an enabler for GenBCode (and other future) optimisations --- .../scala/tools/nsc/profile/AsyncHelper.scala | 139 ++++++++++ .../scala/tools/nsc/profile/InPhase.scala | 84 ------ .../scala/tools/nsc/profile/Profiler.scala | 254 ++++++------------ 3 files changed, 226 insertions(+), 251 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/profile/AsyncHelper.scala delete mode 100644 src/compiler/scala/tools/nsc/profile/InPhase.scala diff --git a/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala b/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala new file mode 100644 index 000000000000..820b44949a71 --- /dev/null +++ b/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala @@ -0,0 +1,139 @@ +package scala.tools.nsc.profile + +import java.util.Collections +import java.util.concurrent.ThreadPoolExecutor.AbortPolicy +import java.util.concurrent._ +import java.util.concurrent.atomic.{AtomicInteger, AtomicLong} + +import scala.tools.nsc.{Global, Phase} + +sealed trait AsyncHelper { + + def newUnboundedQueueFixedThreadPool + (nThreads: Int, + shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor + def newBoundedQueueFixedThreadPool + (nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, + shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor + +} + +object AsyncHelper { + def apply(global: Global, phase: Phase): AsyncHelper = global.currentRun.profiler match { + case NoOpProfiler => new BasicAsyncHelper(global, phase) + case r: RealProfiler => new ProfilingAsyncHelper(global, phase, r) + } + + private abstract class BaseAsyncHelper(global: Global, phase: Phase) extends AsyncHelper { + val baseGroup = new ThreadGroup(s"scalac-${phase.name}") + private def childGroup(name: String) = new ThreadGroup(baseGroup, name) + + protected def wrapRunnable(r: Runnable): Runnable + + protected class CommonThreadFactory(shortId: String, + daemon: Boolean = true, + priority: Int) extends ThreadFactory { + private val group: ThreadGroup = childGroup(shortId) + private val threadNumber: AtomicInteger = new AtomicInteger(1) + private val namePrefix = s"${baseGroup.getName}-$shortId-" + + override def newThread(r: Runnable): Thread = { + val wrapped = wrapRunnable(r) + val t: Thread = new Thread(group, wrapped, namePrefix + threadNumber.getAndIncrement, 0) + if (t.isDaemon != daemon) t.setDaemon(daemon) + if (t.getPriority != priority) t.setPriority(priority) + t + } + } + } + + private final class BasicAsyncHelper(global: Global, phase: Phase) extends BaseAsyncHelper(global, phase) { + + override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory) + } + + override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) + } + + override protected def wrapRunnable(r: Runnable): Runnable = r + } + + private class ProfilingAsyncHelper(global: Global, phase: Phase, private val profiler: RealProfiler) extends BaseAsyncHelper(global, phase) { + + override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable], threadFactory, new AbortPolicy) + } + + override def newBoundedQueueFixedThreadPool(nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, shortId: String, priority: Int): ThreadPoolExecutor = { + val threadFactory = new CommonThreadFactory(shortId, priority = priority) + //like Executors.newFixedThreadPool + new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) + } + + override protected def wrapRunnable(r: Runnable): Runnable = () => { + val data = new ThreadProfileData + localData.set(data) + + val profileStart = Profiler.emptySnap + try r.run finally { + val snap = profiler.snapThread() + val threadRange = ProfileRange(profileStart, snap, phase, 0, "", Thread.currentThread()) + profiler.completeBackground(threadRange) + } + } + + /** + * data for thread run. Not threadsafe, only written from a single thread + */ + final class ThreadProfileData { + var firstStartNs = 0L + var taskCount = 0 + + var idleNs = 0L + var runningNs = 0L + + var lastStartNs = 0L + var lastEndNs = 0L + } + + val localData = new ThreadLocal[ThreadProfileData] + + private class SinglePhaseInstrumentedThreadPoolExecutor + ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, + workQueue: BlockingQueue[Runnable], threadFactory: ThreadFactory, handler: RejectedExecutionHandler + ) extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler) { + + override def beforeExecute(t: Thread, r: Runnable): Unit = { + val data = localData.get + data.taskCount += 1 + val now = System.nanoTime() + + if (data.firstStartNs == 0) data.firstStartNs = now + else data.idleNs += now - data.lastEndNs + + data.lastStartNs = now + + super.beforeExecute(t, r) + } + + override def afterExecute(r: Runnable, t: Throwable): Unit = { + val now = System.nanoTime() + val data = localData.get + + data.lastEndNs = now + data.runningNs += now - data.lastStartNs + + super.afterExecute(r, t) + } + + } + } +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/profile/InPhase.scala b/src/compiler/scala/tools/nsc/profile/InPhase.scala deleted file mode 100644 index 53e113c62e34..000000000000 --- a/src/compiler/scala/tools/nsc/profile/InPhase.scala +++ /dev/null @@ -1,84 +0,0 @@ -package scala.tools.nsc.profile - -import java.util.concurrent.atomic.AtomicInteger - -import scala.concurrent.duration.Duration -import scala.concurrent.{Await, ExecutionContext, Future} -import scala.tools.nsc.{Global, Phase} -object InPhase { - val idGen = new AtomicInteger -} -/** - * A wrapper to allow actions to be associated to a Phase. This aids profiling, particularly where a actions occur in - * multiple threads, or out of order - * - * When you are running a compilation task that involved some activity on a background thread - * (not the one running [[Global.compileUnits]]) the profiler is not aware of that thread and so cannot account - * for the activity. - * - * By wrapping the activity in this class or one of it children the profiler (if enabled) is informed - * and the statistics can be gathered - * - * No InPhase should run concurrently with another InPhase on the same thread - the statistics dont cope with nesting - */ -sealed abstract class InPhase(global: Global, val phase:Phase, val comment:String) { - - private[profile] final val id = InPhase.idGen.incrementAndGet() - private[profile] final val profiler = global.currentRun.profiler - private[profile] final var idleNs = 0L - profiler.registerInPhase(this) - - @inline protected [profile] def doAction[T] (fn : => T) : T = { - val before = profiler.beforeInPhase(this) - try fn - finally profiler.afterInPhase(this, before, idleNs) - } - - /** - * If the compilation activity has some idle time waiting on a future, then this can be recorded by - * using this method to perform the wait for you. This allow the profiler to distinguish idle time (waiting for some - * related activity to complete), from for example waiting on I/O - * @param future the future that you are waiting on - * @param duration the maximum duration to wait - */ - def idle(future: Future[_], duration:Duration = Duration.Inf): Unit = { - if (!future.isCompleted) { - val start = System.nanoTime() - try Await.ready(future, duration) - finally idleNs += (System.nanoTime() - start) - } - } - -} -/** - * an InPhase for Runnables - * - * By enclosing the activity in the doRun method of this class the profiler (if enabled) is informed - * and the statistics can be gathered - */ - -object RunnableInPhase { - def apply(global: Global, phase:Phase, comment:String)(fn: => Unit)(implicit executionContext: ExecutionContext) = { - new RunnableInPhase(global, phase, comment)(fn) - } -} -class RunnableInPhase(global: Global, phase:Phase, comment:String)(fn: => Unit) extends InPhase(global, phase, comment) with Runnable { - final def run(): Unit = doAction(fn) -} - -/** - * an InPhase for Futures - * - * By enclosing the activity in this wrapper the profiler (if enabled) is informed - * and the statistics can be gathered - */ -object FutureInPhase { - def apply[T](global: Global, phase:Phase, comment:String)(fn: => T)(implicit executionContext: ExecutionContext) = { - val inPhase = new FutureInPhase(global, phase, comment)(fn) - Future(inPhase.exec()) - } -} - -class FutureInPhase[T](global: Global, phase:Phase, comment:String)(fn: => T) extends InPhase(global, phase, comment) { - final def exec() = doAction(fn) -} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index 93d3e27890ce..02732ca43df5 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -2,11 +2,11 @@ package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} import java.lang.management.ManagementFactory +import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} -import scala.collection.mutable import scala.tools.nsc.{Phase, Settings} object Profiler { @@ -18,92 +18,61 @@ object Profiler { else ConsoleProfileReporter new RealProfiler(reporter, settings) } + + private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) } -case class GcEventData(pool:String, gcStartMillis:Long, gcEndMillis:Long) -//TODO separate the main thread wall clock time from the background threads times -case class ProfileCounters(wallClockTimeNanos : Long, - idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, retainedHeapBytes:Long, gcTimeMillis:Long) { - def +(that: ProfileCounters) = { - ProfileCounters( - wallClockTimeNanos = this.wallClockTimeNanos + that.wallClockTimeNanos, - idleTimeNanos = this.idleTimeNanos + that.idleTimeNanos, - cpuTimeNanos = this.cpuTimeNanos + that.cpuTimeNanos, - userTimeNanos = this.userTimeNanos + that.userTimeNanos, - allocatedBytes = this.allocatedBytes + that.allocatedBytes, - retainedHeapBytes = this.retainedHeapBytes + that.retainedHeapBytes, - gcTimeMillis = this.gcTimeMillis + that.gcTimeMillis) - } +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) - def -(that: ProfileCounters) = { - ProfileCounters( - wallClockTimeNanos = this.wallClockTimeNanos - that.wallClockTimeNanos, - idleTimeNanos = this.idleTimeNanos - that.idleTimeNanos, - cpuTimeNanos = this.cpuTimeNanos - that.cpuTimeNanos, - userTimeNanos = this.userTimeNanos - that.userTimeNanos, - allocatedBytes = this.allocatedBytes - that.allocatedBytes, - retainedHeapBytes = this.retainedHeapBytes - that.retainedHeapBytes, - gcTimeMillis = this.gcTimeMillis - that.gcTimeMillis) +case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, + idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, + allocatedBytes:Long, heapBytes:Long) { + def updateHeap(heapBytes:Long) = { + copy(heapBytes = heapBytes) } +} +case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, id:Int, purpose:String, thread:Thread) { + def allocatedBytes = end.allocatedBytes - start.allocatedBytes + + def userNs = end.userTimeNanos - start.userTimeNanos + + def cpuNs = end.cpuTimeNanos - start.cpuTimeNanos + + def idleNs = end.idleTimeNanos - start.idleTimeNanos + + def runNs = end.snapTimeNanos - start.snapTimeNanos - def updateHeap(heapDetails: ProfileCounters) = { - copy(retainedHeapBytes = heapDetails.retainedHeapBytes) - } private def toMillis(ns: Long) = ns / 1000000.0D private def toMegaBytes(bytes: Long) = bytes / 1000000.0D - def wallClockTimeMillis = toMillis(wallClockTimeNanos) - def idleTimeMillis = toMillis(idleTimeNanos) + def wallClockTimeMillis = toMillis(end.snapTimeNanos - start.snapTimeNanos) - def cpuTimeMillis = toMillis(cpuTimeNanos) + def idleTimeMillis = toMillis(end.idleTimeNanos - start.idleTimeNanos) - def userTimeMillis = toMillis(userTimeNanos) + def cpuTimeMillis = toMillis(end.cpuTimeNanos - start.cpuTimeNanos) - def allocatedMB = toMegaBytes(allocatedBytes) + def userTimeMillis = toMillis(end.userTimeNanos - start.userTimeNanos) - def retainedHeapMB = toMegaBytes(retainedHeapBytes) + def allocatedMB = toMegaBytes(end.allocatedBytes - start.allocatedBytes) + def retainedHeapMB = toMegaBytes(end.heapBytes - start.heapBytes) } sealed trait Profiler { - /** Register an action. The action may be in the main thread or more typically in a background thread. - * registration may occur in a different thread to execution - */ - private[profile] def registerInPhase(action: InPhase): Unit - - /** Start to record an action. The action may be in the main thread or more typically in a background thread - */ - private[profile] def beforeInPhase(action: InPhase): ProfileCounters - - /** Called after an action completes work - */ - private[profile] def afterInPhase(action: InPhase, counterBefore: ProfileCounters, idleNs: Long): Unit def finished(): Unit - def beforePhase(phase: Phase): ProfileCounters - - def afterPhase(phase: Phase, profileBefore: ProfileCounters): Unit + def beforePhase(phase: Phase): ProfileSnap - protected val emptySnap = ProfileCounters(0, 0, 0, 0, 0, 0, 0) + def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit } private [profile] object NoOpProfiler extends Profiler { - private[profile] override def registerInPhase(action: InPhase): Unit = () - /** Start to record an action. The action may be in the main thread or more typically in a background thread - */ - private[profile] override def beforeInPhase(action: InPhase): ProfileCounters = emptySnap - - /** Called after an action completes work - */ - private[profile] override def afterInPhase(action: InPhase, counterBefore: ProfileCounters, idleNs: Long): Unit = () + override def beforePhase(phase: Phase): ProfileSnap = Profiler.emptySnap - override def beforePhase(phase: Phase): ProfileCounters = emptySnap - - override def afterPhase(phase: Phase, profileBefore: ProfileCounters): Unit = () + override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () override def finished(): Unit = () } @@ -120,6 +89,10 @@ private [profile] object RealProfiler { } private [profile] class RealProfiler(reporter : ProfileReporter, val settings: Settings) extends Profiler with NotificationListener { + def completeBackground(threadRange: ProfileRange): Unit = { + reporter.reportBackground(this, threadRange) + } + def outDir = settings.outputDirs.getSingleOutput.getOrElse(settings.outputDirs.outputs.head._2.file).toString val id = RealProfiler.idGen.incrementAndGet() @@ -130,32 +103,22 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S private val mainThread = Thread.currentThread() - private def snap: ProfileCounters = { + private[profile] def snapThread(): ProfileSnap = { import RealProfiler._ - ProfileCounters( - wallClockTimeNanos = System.nanoTime(), - idleTimeNanos = 0L, - cpuTimeNanos = threadMx.getCurrentThreadCpuTime, - userTimeNanos = threadMx.getCurrentThreadUserTime, - allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - retainedHeapBytes = memoryMx.getHeapMemoryUsage.getUsed, - gcTimeMillis = gcMx.foldLeft(0L) { case (sum, bean) => bean.getCollectionTime + sum } - ) - } + val current = Thread.currentThread() - private def snapBackground(idleNs:Long): ProfileCounters = { - import RealProfiler._ - ProfileCounters( - wallClockTimeNanos = System.nanoTime(), - idleTimeNanos = idleNs, + ProfileSnap( + threadId = current.getId, + threadName = current.getName, + snapTimeNanos = System.nanoTime(), + idleTimeNanos = 0, cpuTimeNanos = threadMx.getCurrentThreadCpuTime, userTimeNanos = threadMx.getCurrentThreadUserTime, allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - retainedHeapBytes = 0L, - gcTimeMillis = 0L - + heapBytes = readHeapUsage() ) } + private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed private def doGC: Unit = { System.gc() @@ -176,6 +139,8 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S override def handleNotification(notification: Notification, handback: scala.Any): Unit = { import java.lang.{Long => jLong} + import java.lang.{Integer => jInt} + val reportNs = System.nanoTime() val data = notification.getUserData val seq = notification.getSequenceNumber val message = notification.getMessage @@ -183,40 +148,34 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val time= notification.getTimeStamp data match { case cd: CompositeData if tpe == "com.sun.management.gc.notification" => -// val name = cd.get("gcName").toString -// val action = cd.get("gcAction").toString -// val cause = cd.get("gcCause").toString + val name = cd.get("gcName").toString + val action = cd.get("gcAction").toString + val cause = cd.get("gcCause").toString val info = cd.get("gcInfo").asInstanceOf[CompositeData] -// val duration = info.get("duration").asInstanceOf[jLong].longValue() + val duration = info.get("duration").asInstanceOf[jLong].longValue() val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() -// val threads = info.get("GcThreadCount").asInstanceOf[jLong].longValue() - reporter.reportGc(new GcEventData("", startTime, endTime)) + val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() + reporter.reportGc(new GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) } - } - var total = emptySnap - - override def afterPhase(phase: Phase, profileBefore: ProfileCounters): Unit = { + override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snap + val initialSnap = snapThread() if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook stop") ExternalToolHook.after() } val finalSnap = if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) { doGC - initialSnap.updateHeap(snap) + initialSnap.updateHeap(readHeapUsage()) } else initialSnap - val mainThreadUsage = finalSnap - profileBefore - threadInfo.synchronized { - total += mainThreadUsage - threadInfo(phase).afterPhase(mainThreadUsage) - } + + reporter.reportForeground(this, new ProfileRange(snapBefore, finalSnap, phase, id, "", Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileCounters = { + override def beforePhase(phase: Phase): ProfileSnap = { assert(mainThread eq Thread.currentThread()) if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) doGC @@ -224,89 +183,40 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S println("Profile hook start") ExternalToolHook.before() } - threadInfo(phase) = new ThreadInfo(phase) - snap - } - - private val threadInfo = mutable.Map[Phase, ThreadInfo]() - - /** called after an action completes work - */ - - override def registerInPhase(action: InPhase): Unit = threadInfo.synchronized{ - threadInfo.getOrElseUpdate(action.phase, new ThreadInfo(action.phase)).registerInPhase(action) + snapThread() } - override def beforeInPhase(action: InPhase) = snapBackground(0L) - - override def afterInPhase(action: InPhase, profileBefore: ProfileCounters, idleNs: Long): Unit = threadInfo.synchronized { - val inPhaseUsage = snapBackground(idleNs) - profileBefore - threadInfo(action.phase).afterInPhase(action, inPhaseUsage) - } - - class ThreadInfo(phase: Phase) { - private var otherThreadsTotalUsage = emptySnap - private var mainThreadUsage: ProfileCounters = _ - private var hasInPhase = false - private val pending = mutable.Set[Int]() - - def registerInPhase(action: InPhase): Unit = { - hasInPhase = true - pending += action.id - } - - def afterInPhase(action: InPhase, inPhaseUsage: ProfileCounters): Unit = { - pending -= action.id - if (mainThread != Thread.currentThread()) { - otherThreadsTotalUsage += inPhaseUsage - reporter.report(RealProfiler.this, phase, EventType.TASK, action.id, action.comment, inPhaseUsage) - if ((pending isEmpty) && (mainThreadUsage ne null)) { - reporter.report(RealProfiler.this, phase, EventType.TOTAL, -1, "--", mainThreadUsage + otherThreadsTotalUsage) - } - } else { - reporter.report(RealProfiler.this, phase, EventType.TASK, action.id, action.comment, inPhaseUsage) - } - } - - def afterPhase(mainThreadUsage: ProfileCounters): Unit = { - this.mainThreadUsage = mainThreadUsage - val eventType = if (hasInPhase) EventType.MAIN else EventType.SINGLE - reporter.report(RealProfiler.this, phase, eventType, -1, "--", mainThreadUsage) - - if (pending isEmpty) { - reporter.report(RealProfiler.this, phase, EventType.TOTAL, -1, "--", mainThreadUsage + otherThreadsTotalUsage) - total += otherThreadsTotalUsage - } else { - println("late reporting for " + phase) - } - } - } } object EventType extends Enumeration { - // only one report for a phase - val SINGLE = Value("single") + type value = Value //main thread with other tasks val MAIN = Value("main") //other task ( background thread) - val TASK = Value("task") - //total for phase - val TOTAL = Value("total") + val BACKGROUND = Value("background") //total for compile - val ALL = Value("all") + val GC = Value("GC") } + sealed trait ProfileReporter { - def reportGc(data: GcEventData): Unit + def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit + def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit - def report(profiler: RealProfiler, phase: Phase, eventType:EventType.Value, id:Int, desc:String, diff: ProfileCounters) : Unit + def reportGc(data: GcEventData): Unit def header(profiler: RealProfiler) :Unit def close(profiler: RealProfiler) :Unit } object ConsoleProfileReporter extends ProfileReporter { - override def report(profiler: RealProfiler, phase: Phase, eventType:EventType.Value, id:Int, desc:String, diff: ProfileCounters): Unit = - println(f"Profiler compile ${profiler.id} after phase ${phase.id}%2d:${phase.name}%20s ${eventType}%10s ${desc}%20s wallClockTime: ${diff.wallClockTimeMillis}%12.4fms, idleTime: ${diff.idleTimeMillis}%12.4fms, cpuTime ${diff.cpuTimeMillis}%12.4fms, userTime ${diff.userTimeMillis}%12.4fms, allocatedBytes ${diff.allocatedMB}%12.4fMB, retainedHeapBytes ${diff.retainedHeapMB}%12.4fMB, gcTime ${diff.gcTimeMillis}%6.0fms") + + + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = + // TODO + ??? + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = + // TODO + ??? override def close(profiler: RealProfiler): Unit = () @@ -322,14 +232,24 @@ object ConsoleProfileReporter extends ProfileReporter { class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { override def header(profiler: RealProfiler): Unit = { out.println(s"info, ${profiler.id}, ${profiler.outDir}") - out.println(s"header,id,phaseId,phaseName,type,id,comment,wallClockTimeMs,idleTimeMs,cpuTimeMs,userTimeMs,allocatedMB,retainedHeapMB,gcTimeMs") + out.println(s"header(main/background),startNs,endNs,runId,phaseId,phaseName,purpose,threadId,threadName,runNs,idleNs,cpuTimeNs,userTimeNs,allocatedByte,heapSize") + out.println(s"header(GC),startNs,endNs,startMs,endMs,name,action,cause,threads") + } + + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = { + reportCommon(EventType.BACKGROUND, profiler, threadRange) + } + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = { + reportCommon(EventType.MAIN, profiler, threadRange) } - override def report(profiler: RealProfiler, phase: Phase, eventType:EventType.Value, id:Int, desc:String, diff: ProfileCounters): Unit = { - out.println(s"data,${profiler.id},${phase.id},${phase.name},${eventType},$id,$desc, ${diff.wallClockTimeMillis},${diff.idleTimeMillis},${diff.cpuTimeMillis},${diff.userTimeMillis},${diff.allocatedMB},${diff.retainedHeapMB},${diff.gcTimeMillis}") + private def reportCommon(tpe:EventType.value, profiler: RealProfiler, threadRange: ProfileRange): Unit = { + out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.name},${threadRange.purpose},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${if(tpe == EventType.MAIN) threadRange.end.heapBytes else ""}") } override def reportGc(data: GcEventData): Unit = { - out.println(s"GC,${data.gcStartMillis}, ${data.gcEndMillis}") + val duration = TimeUnit.MILLISECONDS.toNanos(data.gcEndMillis - data.gcStartMillis + 1) + val start = data.reportTimeNs - duration + out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } From fe0165c6863a64accea4c6c87c2af5fc1c79d368 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 29 Nov 2017 23:59:22 +0000 Subject: [PATCH 1162/2793] optimise use of indyLamdaMethods map use a java concurrent map for performance provide API to perform conditional operation based on presence --- .../tools/nsc/backend/jvm/PostProcessor.scala | 6 +- .../backend/jvm/analysis/BackendUtils.scala | 57 ++++++++++++------- 2 files changed, 38 insertions(+), 25 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index e14b0824072b..82f4f6348412 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -49,9 +49,9 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P val bytes = try { if (!isArtifact) { localOptimizations(classNode) - val lambdaImplMethods = backendUtils.getIndyLambdaImplMethods(classNode.name) - if (lambdaImplMethods.nonEmpty) - backendUtils.addLambdaDeserialize(classNode, lambdaImplMethods) + backendUtils.onIndyLambdaImplMethodIfPresent(classNode.name) { + methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) + } } setInnerClasses(classNode) serializeClass(classNode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 8caf274b5bd7..8e33ddd56b78 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -7,6 +7,8 @@ import java.lang.invoke.LambdaMetafactory import scala.annotation.{switch, tailrec} import scala.collection.JavaConverters._ import scala.collection.mutable +import java.util.concurrent.ConcurrentHashMap + import scala.tools.asm import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ @@ -35,7 +37,7 @@ abstract class BackendUtils extends PerRunInit { import bTypes._ import callGraph.ClosureInstantiation import coreBTypes._ - import frontendAccess.{compilerSettings, recordPerRunCache} + import frontendAccess.{compilerSettings, recordPerRunJavaMapCache} /** * Classes with indyLambda closure instantiations where the SAM type is serializable (e.g. Scala's @@ -44,7 +46,9 @@ abstract class BackendUtils extends PerRunInit { * inlining: when inlining an indyLambda instruction into a class, we need to make sure the class * has the method. */ - val indyLambdaImplMethods: mutable.AnyRefMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunCache(mutable.AnyRefMap()) + private val indyLambdaImplMethods: ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunJavaMapCache{ + new ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] + } // unused objects created by these constructors are eliminated by pushPop private[this] lazy val sideEffectFreeConstructors: LazyVar[Set[(String, String)]] = perRunLazy(this) { @@ -364,38 +368,47 @@ abstract class BackendUtils extends PerRunInit { } } - /** + def onIndyLambdaImplMethodIfPresent(hostClass: InternalName) (action : mutable.LinkedHashSet[asm.Handle] => Unit): Unit = + indyLambdaImplMethods.get(hostClass) match { + case null => + case xs => xs.synchronized(action(xs)) + } + + def onIndyLambdaImplMethod[T](hostClass: InternalName) (action: mutable.LinkedHashSet[asm.Handle] => T): T ={ + val methods = indyLambdaImplMethods.computeIfAbsent(hostClass, (_) => mutable.LinkedHashSet[asm.Handle]()) + + methods.synchronized (action(methods)) + } + + /** * add methods * @return the added methods. Note the order is undefined */ def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Seq[asm.Handle] = { - if (handle.isEmpty) Nil else { - val set = indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) - if (set.isEmpty) { - set ++= handle - handle - } else { - var added = List.empty[asm.Handle] - handle foreach { h => if (set.add(h)) added ::= h} - added - } + if (handle.isEmpty) Nil else onIndyLambdaImplMethod(hostClass) { + case set => + if (set.isEmpty) { + set ++= handle + handle + } else { + var added = List.empty[asm.Handle] + handle foreach { h => if (set.add(h)) added ::= h } + added + } } } def addIndyLambdaImplMethod(hostClass: InternalName, handle: asm.Handle): Boolean = { - indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()).add(handle) + onIndyLambdaImplMethod(hostClass) { + _ add handle + } } def removeIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = { if (handle.nonEmpty) - indyLambdaImplMethods.get(hostClass).foreach(_ --= handle) - } - - def getIndyLambdaImplMethods(hostClass: InternalName): Iterable[asm.Handle] = { - indyLambdaImplMethods.getOrNull(hostClass) match { - case null => Nil - case xs => xs - } + onIndyLambdaImplMethodIfPresent(hostClass) { + _ --= handle + } } /** From b80987552849e4303e406239fffa6d85da19165c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 6 Dec 2017 11:10:08 -0800 Subject: [PATCH 1163/2793] Backtick underscore in REPL Since underscore can be a member, it must be backticked in a path such as `X._`. --- src/repl/scala/tools/nsc/interpreter/IMain.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 2ae860fee6ea..060a6044defe 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -287,7 +287,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def backticked(s: String): String = ( (s split '.').toList map { - case "_" => "_" + case "_" => "`_`" case s if nme.keywords(newTermName(s)) => s"`$s`" case s => s } mkString "." @@ -314,8 +314,10 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends /** For class based repl mode we use an .INSTANCE accessor. */ val readInstanceName = if (isClassBased) ".INSTANCE" else "" def translateOriginalPath(p: String): String = { - val readName = java.util.regex.Matcher.quoteReplacement(sessionNames.read) - p.replaceFirst(readName, readName + readInstanceName) + if (isClassBased) { + val readName = java.util.regex.Matcher.quoteReplacement(sessionNames.read) + p.replaceFirst(readName, readName + readInstanceName) + } else p } def flatPath(sym: Symbol): String = flatOp shift sym.javaClassName From 6d1391233d8710d6bb5c8ba5b2be76c6fb437c96 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 8 Dec 2017 12:25:07 +0100 Subject: [PATCH 1164/2793] Revert "optimise use of indyLamdaMethods map" This reverts commit fe0165c6863a64accea4c6c87c2af5fc1c79d368. See scala/scala-dev#457 --- .../tools/nsc/backend/jvm/PostProcessor.scala | 6 +- .../backend/jvm/analysis/BackendUtils.scala | 57 +++++++------------ 2 files changed, 25 insertions(+), 38 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 82f4f6348412..e14b0824072b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -49,9 +49,9 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P val bytes = try { if (!isArtifact) { localOptimizations(classNode) - backendUtils.onIndyLambdaImplMethodIfPresent(classNode.name) { - methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) - } + val lambdaImplMethods = backendUtils.getIndyLambdaImplMethods(classNode.name) + if (lambdaImplMethods.nonEmpty) + backendUtils.addLambdaDeserialize(classNode, lambdaImplMethods) } setInnerClasses(classNode) serializeClass(classNode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 8e33ddd56b78..8caf274b5bd7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -7,8 +7,6 @@ import java.lang.invoke.LambdaMetafactory import scala.annotation.{switch, tailrec} import scala.collection.JavaConverters._ import scala.collection.mutable -import java.util.concurrent.ConcurrentHashMap - import scala.tools.asm import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ @@ -37,7 +35,7 @@ abstract class BackendUtils extends PerRunInit { import bTypes._ import callGraph.ClosureInstantiation import coreBTypes._ - import frontendAccess.{compilerSettings, recordPerRunJavaMapCache} + import frontendAccess.{compilerSettings, recordPerRunCache} /** * Classes with indyLambda closure instantiations where the SAM type is serializable (e.g. Scala's @@ -46,9 +44,7 @@ abstract class BackendUtils extends PerRunInit { * inlining: when inlining an indyLambda instruction into a class, we need to make sure the class * has the method. */ - private val indyLambdaImplMethods: ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunJavaMapCache{ - new ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] - } + val indyLambdaImplMethods: mutable.AnyRefMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunCache(mutable.AnyRefMap()) // unused objects created by these constructors are eliminated by pushPop private[this] lazy val sideEffectFreeConstructors: LazyVar[Set[(String, String)]] = perRunLazy(this) { @@ -368,47 +364,38 @@ abstract class BackendUtils extends PerRunInit { } } - def onIndyLambdaImplMethodIfPresent(hostClass: InternalName) (action : mutable.LinkedHashSet[asm.Handle] => Unit): Unit = - indyLambdaImplMethods.get(hostClass) match { - case null => - case xs => xs.synchronized(action(xs)) - } - - def onIndyLambdaImplMethod[T](hostClass: InternalName) (action: mutable.LinkedHashSet[asm.Handle] => T): T ={ - val methods = indyLambdaImplMethods.computeIfAbsent(hostClass, (_) => mutable.LinkedHashSet[asm.Handle]()) - - methods.synchronized (action(methods)) - } - - /** + /** * add methods * @return the added methods. Note the order is undefined */ def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Seq[asm.Handle] = { - if (handle.isEmpty) Nil else onIndyLambdaImplMethod(hostClass) { - case set => - if (set.isEmpty) { - set ++= handle - handle - } else { - var added = List.empty[asm.Handle] - handle foreach { h => if (set.add(h)) added ::= h } - added - } + if (handle.isEmpty) Nil else { + val set = indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) + if (set.isEmpty) { + set ++= handle + handle + } else { + var added = List.empty[asm.Handle] + handle foreach { h => if (set.add(h)) added ::= h} + added + } } } def addIndyLambdaImplMethod(hostClass: InternalName, handle: asm.Handle): Boolean = { - onIndyLambdaImplMethod(hostClass) { - _ add handle - } + indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()).add(handle) } def removeIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = { if (handle.nonEmpty) - onIndyLambdaImplMethodIfPresent(hostClass) { - _ --= handle - } + indyLambdaImplMethods.get(hostClass).foreach(_ --= handle) + } + + def getIndyLambdaImplMethods(hostClass: InternalName): Iterable[asm.Handle] = { + indyLambdaImplMethods.getOrNull(hostClass) match { + case null => Nil + case xs => xs + } } /** From 06347fc82d3d438d57988d93c665ac21aaef89bf Mon Sep 17 00:00:00 2001 From: ghik Date: Thu, 7 Dec 2017 21:12:10 +0100 Subject: [PATCH 1165/2793] emit parameter names for static forwarders --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 1 + test/files/run/{t9437c.check => t10650.check} | 5 ----- .../run/{t9437b/Test_2.scala => t10650/Test.scala} | 10 ++++++---- test/files/run/t9437b/Foo_1.scala | 3 --- test/files/run/{t9437c => t9437b}/Test.scala | 0 5 files changed, 7 insertions(+), 12 deletions(-) rename test/files/run/{t9437c.check => t10650.check} (50%) rename test/files/run/{t9437b/Test_2.scala => t10650/Test.scala} (59%) delete mode 100644 test/files/run/t9437b/Foo_1.scala rename test/files/run/{t9437c => t9437b}/Test.scala (100%) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 49d2b3e67263..0f65f9e4c7cc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -839,6 +839,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { mkArray(thrownExceptions) ) + emitParamNames(mirrorMethod, m.info.params) emitAnnotations(mirrorMethod, others) emitParamAnnotations(mirrorMethod, m.info.params.map(_.annotations)) diff --git a/test/files/run/t9437c.check b/test/files/run/t10650.check similarity index 50% rename from test/files/run/t9437c.check rename to test/files/run/t10650.check index 564213c5877b..f011cd849116 100644 --- a/test/files/run/t9437c.check +++ b/test/files/run/t10650.check @@ -3,8 +3,3 @@ name: _; isNamePresent: true; isSynthetic: false name: ***; isNamePresent: true; isSynthetic: false name: unary_!; isNamePresent: true; isSynthetic: false name: ABC; isNamePresent: true; isSynthetic: false -name: a; isNamePresent: true; isSynthetic: false -name: _; isNamePresent: true; isSynthetic: false -name: ***; isNamePresent: true; isSynthetic: false -name: unary_!; isNamePresent: true; isSynthetic: false -name: ABC; isNamePresent: true; isSynthetic: false diff --git a/test/files/run/t9437b/Test_2.scala b/test/files/run/t10650/Test.scala similarity index 59% rename from test/files/run/t9437b/Test_2.scala rename to test/files/run/t10650/Test.scala index 521f525f1dd9..a32e8d4df5a9 100644 --- a/test/files/run/t9437b/Test_2.scala +++ b/test/files/run/t10650/Test.scala @@ -1,5 +1,9 @@ +class Foo +object Foo { + def bar(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) = null +} + object Test extends App { - val constrParams = classOf[Foo].getConstructors.head.getParameters val methodParams = classOf[Foo].getDeclaredMethods.head.getParameters def printParams(params: Array[java.lang.reflect.Parameter]) = { @@ -8,9 +12,7 @@ object Test extends App { } } - printParams(constrParams) printParams(methodParams) - val foo = new Foo(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0) - foo.bar(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0) + Foo.bar(a = 1, `_` = "2", *** = 3L, `unary_!` = 4.0f, ABC = 5.0) } diff --git a/test/files/run/t9437b/Foo_1.scala b/test/files/run/t9437b/Foo_1.scala deleted file mode 100644 index ca6c9c6156af..000000000000 --- a/test/files/run/t9437b/Foo_1.scala +++ /dev/null @@ -1,3 +0,0 @@ -class Foo(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) { - def bar(a: Int, `_`: String, *** : Long, `unary_!` : Float, ABC: Double) = null -} diff --git a/test/files/run/t9437c/Test.scala b/test/files/run/t9437b/Test.scala similarity index 100% rename from test/files/run/t9437c/Test.scala rename to test/files/run/t9437b/Test.scala From 7de41a40ecb0c5fbb8450823dc08218d93f4dc81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ha=CC=8Akon=20Hjelde=20Wold?= Date: Fri, 14 Jul 2017 11:59:49 +0200 Subject: [PATCH 1166/2793] Optimized tails in LinearSeqOptimized Fixes scala/bug/#9892 --- .../scala/collection/LinearSeqOptimized.scala | 3 +++ .../collection/LinearSeqOptimizedTest.scala | 16 ++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index 68b85dcfe508..e545953b2558 100644 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -315,4 +315,7 @@ trait LinearSeqOptimized[+A, +Repr <: LinearSeqOptimized[A, Repr]] extends Linea } last } + + override /*TraversableLike*/ + def tails: Iterator[Repr] = Iterator.iterate(repr)(_.tail).takeWhile(_.nonEmpty) ++ Iterator(newBuilder.result) } diff --git a/test/junit/scala/collection/LinearSeqOptimizedTest.scala b/test/junit/scala/collection/LinearSeqOptimizedTest.scala index 2e22705fb462..563eab36109a 100644 --- a/test/junit/scala/collection/LinearSeqOptimizedTest.scala +++ b/test/junit/scala/collection/LinearSeqOptimizedTest.scala @@ -16,4 +16,20 @@ class LinearSeqOptimizedTest { assertEquals(2, "abcde".toList.indexWhere(_ == 'c', -1)) assertEquals(2, "abcde".toList.indexWhere(_ == 'c', -2)) } + + @Test def test_efficientTails_list_SI9892: Unit = { + val tails = List(1,2,3,4).tails.toList + + assert(tails(0).tail eq tails(1)) + assert(tails(0).tail.tail eq tails(2)) + assert(tails(1).tail eq tails(2)) + assert(tails(3).tail eq tails(4)) + assert(tails(4) eq List()) + } + + @Test def test_efficientTails_stream_SI9892: Unit = { + val stream = Stream.from(1) + val tails = stream.tails.toStream + assert(tails.head eq stream) + } } From 434c4138fe93b162c78c6a16267434c07b192876 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Tue, 12 Dec 2017 13:14:00 +0100 Subject: [PATCH 1167/2793] [backport] Use ":" as separator for sbt ScalaVersionSetting --- project/ScalaOptionParser.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index 0208921959df..94a92a1acdca 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -37,7 +37,7 @@ object ScalaOptionParser { MultiChoiceSetting(name, phases) } def ScalaVersionSetting(name: String): Parser[String] = { - concat(concat(token(name ~ Space.string)) ~ token(StringBasic, TokenCompletions.displayOnly(""))) + concat(concat(token(name ~ ":")) ~ token(StringBasic, TokenCompletions.displayOnly(""))) } val Property: Parser[String] = { val PropName = concat(token("-D" ~ oneOrMore(NotSpaceClass & not('=', "not =")).string, TokenCompletions.displayOnly("-D"))) From 6282dd42e7a09b8a97086d2548376b05761377fa Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Fri, 15 Dec 2017 20:14:12 +0000 Subject: [PATCH 1168/2793] remove some redundant calls to Symbol.sourceFile SymbolSourceFile can walk a tree if not root so can be expensive --- .../tools/nsc/symtab/BrowsingLoaders.scala | 24 +++++++++++-------- .../tools/nsc/typechecker/Implicits.scala | 12 ++++++---- .../scala/tools/nsc/typechecker/Namers.scala | 12 ++++++---- .../interactive/tests/core/CoreTestDefs.scala | 7 +++--- .../tools/nsc/doc/model/ModelFactory.scala | 11 +++++---- 5 files changed, 39 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index 3ac283b9a43a..1051dc7afbca 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -36,18 +36,22 @@ abstract class BrowsingLoaders extends GlobalSymbolLoaders { if (existing == NoSymbol) { decls enter member member - } else if (existing.sourceFile == null) { - decls unlink existing - decls enter member - member } else { - if (member.sourceFile != null) { - if (existing.sourceFile != member.sourceFile) - error(member+"is defined twice,"+ - "\n in "+existing.sourceFile+ - "\n and also in "+member.sourceFile) + val existingSourceFile = existing.sourceFile + if (existingSourceFile == null) { + decls unlink existing + decls enter member + member + } else { + val memberSourceFile = member.sourceFile + if (memberSourceFile != null) { + if (existingSourceFile != memberSourceFile) + error(member+"is defined twice,"+ + "\n in "+existingSourceFile+ + "\n and also in "+memberSourceFile) + } + existing } - existing } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index b2e01aa203db..b8bd86a709ea 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -818,11 +818,13 @@ trait Implicits { } else !(owner hasTransOwner sym)) // faster than owner.ownerChain contains sym } - sym.isInitialized || - sym.sourceFile == null || - (sym.sourceFile ne context.unit.source.file) || - hasExplicitResultType(sym) || - comesBefore(sym, context.owner) + sym.isInitialized || { + val sourceFile = sym.sourceFile + sourceFile == null || + (sourceFile ne context.unit.source.file) || + hasExplicitResultType(sym) || + comesBefore(sym, context.owner) + } } /** Prune ImplicitInfos down to either all the eligible ones or the best one. diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 78c9d2964e69..196f4156f32f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -358,13 +358,15 @@ trait Namers extends MethodSynthesis { } private def enterClassSymbol(tree: ClassDef, clazz: ClassSymbol): Symbol = { - if (clazz.sourceFile != null && clazz.sourceFile != contextFile) - devWarning(s"Source file mismatch in $clazz: ${clazz.sourceFile} vs. $contextFile") + var sourceFile = clazz.sourceFile + if (sourceFile != null && sourceFile != contextFile) + devWarning(s"Source file mismatch in $clazz: ${sourceFile} vs. $contextFile") clazz.associatedFile = contextFile - if (clazz.sourceFile != null) { - assert(currentRun.canRedefine(clazz) || clazz.sourceFile == currentRun.symSource(clazz), clazz.sourceFile) - currentRun.symSource(clazz) = clazz.sourceFile + sourceFile = clazz.sourceFile + if (sourceFile != null) { + assert(currentRun.canRedefine(clazz) || sourceFile == currentRun.symSource(clazz), sourceFile) + currentRun.symSource(clazz) = sourceFile } registerTopLevelSym(clazz) assert(clazz.name.toString.indexOf('(') < 0, clazz.name) // ) diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index 343986a45dd9..f5cc0f65bc2f 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -100,10 +100,11 @@ private[tests] trait CoreTestDefs else { reporter.println("\naskHyperlinkPos for `" + tree.symbol.name + "` at " + format(pos) + " " + pos.source.file.name) val r = new Response[Position] + val sourceFile = tree.symbol.sourceFile // `tree.symbol.sourceFile` was discovered to be null when testing using virtpatmat on the akka presentation test, where a position had shifted to point to `Int` // askHyperlinkPos for `Int` at (73,19) pi.scala --> class Int in package scala has null sourceFile! - val treePath = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.path else null - val treeName = if (tree.symbol.sourceFile ne null) tree.symbol.sourceFile.name else null + val treePath = if (sourceFile ne null) sourceFile.path else null + val treeName = if (sourceFile ne null) sourceFile.name else null sourceFiles.find(_.path == treePath) match { case Some(source) => @@ -112,7 +113,7 @@ private[tests] trait CoreTestDefs case Left(pos) => val resolvedPos = if (tree.symbol.pos.isDefined) tree.symbol.pos else pos withResponseDelimiter { - reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + tree.symbol.sourceFile.name) + reporter.println("[response] found askHyperlinkPos for `" + tree.symbol.name + "` at " + format(resolvedPos) + " " + sourceFile.name) } case Right(ex) => ex.printStackTrace() diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 1e3ec82bced2..918093f302e6 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -279,11 +279,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { protected def reprSymbol: Symbol = sym - def inSource = - if (reprSymbol.sourceFile != null && ! reprSymbol.isSynthetic) - Some((reprSymbol.sourceFile, reprSymbol.pos.line)) + def inSource = { + val sourceFile = reprSymbol.sourceFile + if (sourceFile != null && !reprSymbol.isSynthetic) + Some((sourceFile, reprSymbol.pos.line)) else None + } def sourceUrl = { def fixPath(s: String) = s.replaceAll("\\" + java.io.File.separator, "/") @@ -878,8 +880,9 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { override val name = newName def defaultValue = if (aSym.hasDefault) { + val sourceFile = aSym.sourceFile // units.filter should return only one element - (currentRun.units filter (_.source.file == aSym.sourceFile)).toList match { + (currentRun.units filter (_.source.file == sourceFile)).toList match { case List(unit) => // scala/bug#4922 `sym == aSym` is insufficient if `aSym` is a clone of symbol // of the parameter in the tree, as can happen with type parameterized methods. From e34ba609b1111b1f84cc346d0af0520aa4f8c769 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 17 Dec 2017 14:56:29 -0800 Subject: [PATCH 1169/2793] Extra caution in namer treating parents Parents have not been validated yet, so only use `addChild` when `isClass`. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 4 ++-- test/files/neg/t10661.check | 4 ++++ test/files/neg/t10661.scala | 4 ++++ 3 files changed, 10 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t10661.check create mode 100644 test/files/neg/t10661.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 78c9d2964e69..114149b3e6e9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1115,7 +1115,7 @@ trait Namers extends MethodSynthesis { val pending = mutable.ListBuffer[AbsTypeError]() parentTrees foreach { tpt => val ptpe = tpt.tpe - if(!ptpe.isError) { + if (!ptpe.isError) { val psym = ptpe.typeSymbol val sameSourceFile = context.unit.source.file == psym.sourceFile @@ -1124,7 +1124,7 @@ trait Namers extends MethodSynthesis { psym addChild context.owner else pending += ParentSealedInheritanceError(tpt, psym) - if (psym.isLocalToBlock && !phase.erasedTypes) + if (psym.isLocalToBlock && psym.isClass && !phase.erasedTypes) psym addChild context.owner } } diff --git a/test/files/neg/t10661.check b/test/files/neg/t10661.check new file mode 100644 index 000000000000..02e41b4c79bf --- /dev/null +++ b/test/files/neg/t10661.check @@ -0,0 +1,4 @@ +t10661.scala:3: error: class type required but A found + def f[A] = new C with A + ^ +one error found diff --git a/test/files/neg/t10661.scala b/test/files/neg/t10661.scala new file mode 100644 index 000000000000..fe1187331eb2 --- /dev/null +++ b/test/files/neg/t10661.scala @@ -0,0 +1,4 @@ + +class C { + def f[A] = new C with A +} From 9691d8dbb54185bce327cd4a095666b1f400277c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 19 Dec 2017 22:40:50 +1000 Subject: [PATCH 1170/2793] Reduce overhead of enabling -Ystatistics MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The implementation trick of using an AlmostFinalValue to have zero cost for the "isEnabled" check in the common case has a small flaw: the switchpoint is tripped _every_ time stats is enabled, rather than just on the first time. This discards a swathe of JIT compiled code each time a Global is started with `-Ystatistics`. This commit avoids tripping the switchpoint redundantly. Performance: ``` ⚡ for extra in "-Ystatistics:_" ""; do for v in 2.12.5-bin-91649d1-SNAPSHOT 2.12.4; do echo $v $extra; sbt 'set scalaVersion in compilation := "'$v'"' 'hot -psource=scalap -f1 -wi 5 -i 3 -pextraArgs='$extra | egrep 'HotScalacBenchmark.compile\s'; done; done 2.12.5-bin-91649d1-SNAPSHOT -Ystatistics:_ [info] HotScalacBenchmark.compile a8c43dc -Ystatistics:_ false scalap sample 33 973.523 ± 23.389 ms/op 2.12.4 -Ystatistics:_ [info] HotScalacBenchmark.compile a8c43dc -Ystatistics:_ false scalap sample 12 2921.333 ± 177.831 ms/op 2.12.5-bin-91649d1-SNAPSHOT [info] HotScalacBenchmark.compile a8c43dc false scalap sample 38 811.846 ± 13.436 ms/op 2.12.4 [info] HotScalacBenchmark.compile a8c43dc false scalap sample 38 820.814 ± 17.809 ms/op ``` There is still more overhead than I would like, and it might still make sense to move a few stats back into the "hot" category. --- .../scala/reflect/internal/util/StatisticsStatics.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index a7a2e02f7144..3670af20588c 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -48,7 +48,8 @@ public static boolean areSomeHotStatsEnabled() { } public static void enableColdStats() { - COLD_STATS.setValue(new TrueContainer()); + if (!areSomeColdStatsEnabled()) + COLD_STATS.setValue(new TrueContainer()); } public static void disableColdStats() { @@ -56,7 +57,8 @@ public static void disableColdStats() { } public static void enableHotStats() { - HOT_STATS.setValue(new TrueContainer()); + if (!areSomeHotStatsEnabled()) + HOT_STATS.setValue(new TrueContainer()); } public static void disableHotStats() { From 627781b53aed6c6d2407b79d4f114aa3b89a4b7f Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 12 Dec 2017 12:36:12 -0500 Subject: [PATCH 1171/2793] Tail-recursive Tseitin model solver. Large (hopefully computer-generated) matches can lead to a search to become deep enough to send `findTseitinModelFor` into a `StackOverflowError`. The change here is a faithful reification of the call stack as a `List`. This fixes scala/bug#10387. Additionally, some `Set[Int]`s in which the elements won't be negative are changed to use `BitSet`s instead, to maybe help performance. My wholly unscientific benchmark against the attached test case yields: ===== BEFORE ===== time spent in patmat : 11 spans, 145843ms of which DPLL : 280 spans, 62026ms (42.5%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 129ms (0.1%) of which in exhaustivity : 1 spans, 26361ms (18.1%) of which in unreachability : 8 spans, 101925ms (69.9%) time spent in patmat : 11 spans, 161592ms of which DPLL : 280 spans, 64320ms (39.8%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 137ms (0.1%) of which in exhaustivity : 1 spans, 29818ms (18.5%) of which in unreachability : 8 spans, 110926ms (68.6%) time spent in patmat : 11 spans, 161660ms of which DPLL : 280 spans, 68797ms (42.6%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 142ms (0.1%) of which in exhaustivity : 1 spans, 27751ms (17.2%) of which in unreachability : 8 spans, 114899ms (71.1%) time spent in patmat : 11 spans, 151320ms of which DPLL : 280 spans, 64325ms (42.5%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 126ms (0.1%) of which in exhaustivity : 1 spans, 26645ms (17.6%) of which in unreachability : 8 spans, 106730ms (70.5%) time spent in patmat : 11 spans, 143872ms of which DPLL : 280 spans, 62331ms (43.3%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 134ms (0.1%) of which in exhaustivity : 1 spans, 24667ms (17.1%) of which in unreachability : 8 spans, 102261ms (71.1%) ===== AFTER ===== time spent in patmat : 11 spans, 138693ms of which DPLL : 280 spans, 42176ms (30.4%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 195ms (0.1%) of which in exhaustivity : 1 spans, 30043ms (21.7%) of which in unreachability : 8 spans, 85335ms (61.5%) time spent in patmat : 11 spans, 124888ms of which DPLL : 280 spans, 40456ms (32.4%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 134ms (0.1%) of which in exhaustivity : 1 spans, 25034ms (20.0%) of which in unreachability : 8 spans, 82255ms (65.9%) time spent in patmat : 11 spans, 167081ms of which DPLL : 280 spans, 40552ms (24.3%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 223ms (0.1%) of which in exhaustivity : 1 spans, 27742ms (16.6%) of which in unreachability : 8 spans, 119810ms (71.7%) time spent in patmat : 11 spans, 130727ms of which DPLL : 280 spans, 40632ms (31.1%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 158ms (0.1%) of which in exhaustivity : 1 spans, 25878ms (19.8%) of which in unreachability : 8 spans, 86081ms (65.8%) time spent in patmat : 11 spans, 132853ms of which DPLL : 280 spans, 41660ms (31.4%) of which in CNF conversion : 0 spans, 0ms (0.0%) of which variable equality : 17 spans, 142ms (0.1%) of which in exhaustivity : 1 spans, 28138ms (21.2%) of which in unreachability : 8 spans, 86297ms (65.0%) --- .../tools/nsc/transform/patmat/Solving.scala | 126 +++++--- test/files/pos/t10387.flags | 1 + test/files/pos/t10387.scala | 269 ++++++++++++++++++ 3 files changed, 353 insertions(+), 43 deletions(-) create mode 100644 test/files/pos/t10387.flags create mode 100644 test/files/pos/t10387.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index 93b1c746af14..ecd2211441c8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -1,14 +1,13 @@ /* NSC -- new Scala compiler * - * Copyright 2011-2013 LAMP/EPFL + * Copyright 2011-2017 LAMP/EPFL * @author Adriaan Moors */ package scala.tools.nsc.transform.patmat import scala.collection.mutable.ArrayBuffer -import scala.language.postfixOps -import scala.collection.mutable +import scala.collection.{immutable,mutable} import scala.reflect.internal.util.Collections._ import scala.reflect.internal.util.Position import scala.reflect.internal.util.StatisticsStatics @@ -57,7 +56,8 @@ trait Solving extends Logic { val symForVar: Map[Int, Sym] = variableForSymbol.map(_.swap) - val relevantVars: Set[Int] = symForVar.keySet.map(math.abs) + val relevantVars: immutable.BitSet = + symForVar.keySet.map(math.abs)(collection.breakOut) def lit(sym: Sym): Lit = Lit(variableForSymbol(sym)) @@ -186,7 +186,7 @@ trait Solving extends Logic { // (!o \/ op1) /\ (!o \/ op2) ... (!o \/ opx) /\ (!op1 \/ !op2 \/... \/ !opx \/ o) val new_bv = bv - constTrue // ignore `True` val o = newLiteral() // auxiliary Tseitin variable - new_bv.map(op => addClauseProcessed(clause(op, -o))) + new_bv.foreach(op => addClauseProcessed(clause(op, -o))) o } } @@ -374,7 +374,7 @@ trait Solving extends Logic { def cnfString(f: Array[Clause]): String = { val lits: Array[List[String]] = f map (_.map(_.toString).toList) - val xss: List[List[String]] = lits toList + val xss: List[List[String]] = lits.toList val aligned: String = alignAcrossRows(xss, "\\/", " /\\\n") aligned } @@ -401,7 +401,7 @@ trait Solving extends Logic { // we must take all vars from non simplified formula // otherwise if we get `T` as formula, we don't expand the variables // that are not in the formula... - val relevantVars: Set[Int] = solvable.symbolMapping.relevantVars + val relevantVars: immutable.BitSet = solvable.symbolMapping.relevantVars // debug.patmat("vars "+ vars) // the negation of a model -(S1=True/False /\ ... /\ SN=True/False) = clause(S1=False/True, ...., SN=False/True) @@ -455,7 +455,7 @@ trait Solving extends Logic { */ private def dropUnit(clauses: Array[Clause], unitLit: Lit): Array[Clause] = { val negated = -unitLit - val simplified = new ArrayBuffer[Clause](clauses.size) + val simplified = new ArrayBuffer[Clause](clauses.length) clauses foreach { case trivial if trivial contains unitLit => // drop case clause => simplified += clause - negated @@ -468,50 +468,90 @@ trait Solving extends Logic { } def findTseitinModelFor(clauses: Array[Clause]): TseitinModel = { - @inline def orElse(a: TseitinModel, b: => TseitinModel) = if (a ne NoTseitinModel) a else b - debug.patmat(s"DPLL\n${cnfString(clauses)}") val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.patmatAnaDPLL) else null - val satisfiableWithModel: TseitinModel = - if (clauses isEmpty) EmptyTseitinModel - else if (clauses exists (_.isEmpty)) NoTseitinModel - else clauses.find(_.size == 1) match { - case Some(unitClause) => - val unitLit = unitClause.head - withLit(findTseitinModelFor(dropUnit(clauses, unitLit)), unitLit) - case _ => - // partition symbols according to whether they appear in positive and/or negative literals - val pos = new mutable.HashSet[Int]() - val neg = new mutable.HashSet[Int]() - mforeach(clauses)(lit => if (lit.positive) pos += lit.variable else neg += lit.variable) - - // appearing in both positive and negative - val impures = pos intersect neg - // appearing only in either positive/negative positions - val pures = (pos ++ neg) -- impures - - if (pures nonEmpty) { - val pureVar = pures.head - // turn it back into a literal - // (since equality on literals is in terms of equality - // of the underlying symbol and its positivity, simply construct a new Lit) - val pureLit = Lit(if (neg(pureVar)) -pureVar else pureVar) - // debug.patmat("pure: "+ pureLit +" pures: "+ pures +" impures: "+ impures) - val simplified = clauses.filterNot(_.contains(pureLit)) - withLit(findTseitinModelFor(simplified), pureLit) - } else { - val split = clauses.head.head - // debug.patmat("split: "+ split) - orElse(findTseitinModelFor(clauses :+ clause(split)), findTseitinModelFor(clauses :+ clause(-split))) - } - } + val satisfiableWithModel = findTseitinModel0((clauses, Set.empty[Lit]) :: Nil) if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatAnaDPLL, start) satisfiableWithModel } + type TseitinSearch = List[(Array[Clause], Set[Lit])] + + /** An implementation of the DPLL algorithm for checking statisfiability + * of a Boolean formula in CNF (conjunctive normal form). + * + * This is a backtracking, depth-first algorithm, which searches a + * (conceptual) decision tree the nodes of which represent assignments + * of truth values to variables. The algorithm works like so: + * + * - If there are any empty clauses, the formula is unsatisifable. + * - If there are no clauses, the formula is trivially satisfiable. + * - If there is a clause with a single positive (rsp. negated) variable + * in it, any solution must assign it the value `true` (rsp. `false`). + * Therefore, assign it that value, and perform Boolean Constraint + * Propagation on the remaining clauses: + * - Any disjunction containing the variable in a positive (rsp. negative) + * usage is trivially true, and can be dropped. + * - Any disjunction containing the variable in a negative (rsp. positive) + * context will not be satisfied using that variable, so it can be + * removed from the disjunction. + * - Otherwise, pick a variable: + * - If it always (rsp. never) appears negated (a pure variable), then + * any solution must assign the value `true` to it (rsp. `false`) + * - Otherwise, try to solve the formula assuming that the variable is + * `true`; if no model is found, try to solve assuming it is `false`. + * + * See also [[https://en.wikipedia.org/wiki/DPLL_algorithm]]. + * + * This implementation uses a `List` to reify the seach stack, thus making + * it run in constant stack space. The stack is composed of pairs of + * `(remaining clauses, variable assignments)`, and depth-first search + * is achieved by using a stack rather than a queue. + * + */ + @annotation.tailrec + private def findTseitinModel0(state: TseitinSearch): TseitinModel = { + state match { + case Nil => NoTseitinModel + case (clauses, assignments) :: rest => + if (clauses.isEmpty) assignments + else if (clauses exists (_.isEmpty)) findTseitinModel0(rest) + else clauses.find(_.size == 1) match { + case Some(unitClause) => + val unitLit = unitClause.head + findTseitinModel0((dropUnit(clauses, unitLit), assignments + unitLit) :: rest) + case _ => + // partition symbols according to whether they appear in positive and/or negative literals + val pos = new mutable.BitSet() + val neg = new mutable.BitSet() + mforeach(clauses)(lit => if (lit.positive) pos += lit.variable else neg += lit.variable) + + // appearing only in either positive/negative positions + val pures = pos ^ neg + + if (pures.nonEmpty) { + val pureVar = pures.head + // turn it back into a literal + // (since equality on literals is in terms of equality + // of the underlying symbol and its positivity, simply construct a new Lit) + val pureLit = Lit(if (neg(pureVar)) -pureVar else pureVar) + // debug.patmat("pure: "+ pureLit +" pures: "+ pures) + val simplified = clauses.filterNot(_.contains(pureLit)) + findTseitinModel0((simplified, assignments + pureLit) :: rest) + } else { + val split = clauses.head.head + // debug.patmat("split: "+ split) + val pos = (clauses :+ clause(split), assignments) + val neg = (clauses :+ clause(-split), assignments) + findTseitinModel0(pos :: neg :: rest) + } + } + } + } + private def projectToModel(model: TseitinModel, symForVar: Map[Int, Sym]): Model = if (model == NoTseitinModel) NoModel else if (model == EmptyTseitinModel) EmptyModel diff --git a/test/files/pos/t10387.flags b/test/files/pos/t10387.flags new file mode 100644 index 000000000000..2ae3d24b9ccc --- /dev/null +++ b/test/files/pos/t10387.flags @@ -0,0 +1 @@ +-Ystop-after:patmat diff --git a/test/files/pos/t10387.scala b/test/files/pos/t10387.scala new file mode 100644 index 000000000000..0268a14c889b --- /dev/null +++ b/test/files/pos/t10387.scala @@ -0,0 +1,269 @@ +object foo { + abstract sealed class num + final case class One() extends num + final case class Bit0(a: num) extends num + final case class Bit1(a: num) extends num + + abstract sealed class char + final case class zero_char() extends char + final case class Char(a: num) extends char + + def integer_of_char(x0: char): BigInt = x0 match { + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(255) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(254) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(253) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(252) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(251) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(250) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(249) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(248) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(247) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(246) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(245) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(244) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(243) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(242) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(241) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(240) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(239) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(238) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(237) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(236) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(235) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(234) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(233) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(232) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(231) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(230) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(229) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(228) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(227) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(226) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(225) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(224) + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(223) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(222) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(221) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(220) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(219) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(218) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(217) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(216) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(215) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(214) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(213) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(212) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(211) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(210) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(209) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(208) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(207) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(206) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(205) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(204) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(203) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(202) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(201) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(200) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(199) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(198) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(197) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(196) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(195) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(194) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(193) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(192) + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(191) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(190) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(189) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(188) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(187) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(186) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(185) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(184) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(183) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(182) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(181) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(180) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(179) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(178) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(177) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(176) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(175) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(174) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(173) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(172) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(171) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(170) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(169) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(168) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(167) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(166) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(165) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(164) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(163) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(162) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(161) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(160) + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(159) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(158) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(157) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(156) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(155) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(154) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(153) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(152) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(151) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(150) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(149) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(148) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(147) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(146) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(145) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(144) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(143) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(142) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(141) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(140) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(139) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(138) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(137) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(136) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(135) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(134) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(133) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(132) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(131) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(130) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(129) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(128) + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(127) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(126) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(125) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(124) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(123) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(122) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(121) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(120) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(119) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(118) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(117) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(116) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(115) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(114) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(113) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(112) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(111) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(110) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(109) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(108) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(107) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(106) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(105) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(104) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(103) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(102) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(101) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(100) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(99) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(98) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(97) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(96) + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(95) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(94) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(93) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(92) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(91) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(90) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(89) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(88) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(87) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(86) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(85) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(84) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(83) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(82) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(81) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(80) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(79) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(78) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(77) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(76) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(75) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(74) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(73) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(72) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(71) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(70) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(69) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(68) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(67) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(66) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(65) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(64) + case Char(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))) => BigInt(63) + case Char(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))) => BigInt(62) + case Char(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))) => BigInt(61) + case Char(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))) => BigInt(60) + case Char(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))) => BigInt(59) + case Char(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))) => BigInt(58) + case Char(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))) => BigInt(57) + case Char(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))) => BigInt(56) + case Char(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))) => BigInt(55) + case Char(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))) => BigInt(54) + case Char(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))) => BigInt(53) + case Char(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))) => BigInt(52) + case Char(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))) => BigInt(51) + case Char(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))) => BigInt(50) + case Char(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))) => BigInt(49) + case Char(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))) => BigInt(48) + case Char(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))) => BigInt(47) + case Char(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))) => BigInt(46) + case Char(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))) => BigInt(45) + case Char(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))) => BigInt(44) + case Char(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))) => BigInt(43) + case Char(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))) => BigInt(42) + case Char(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))) => BigInt(41) + case Char(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))) => BigInt(40) + case Char(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))) => BigInt(39) + case Char(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))) => BigInt(38) + case Char(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))) => BigInt(37) + case Char(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))) => BigInt(36) + case Char(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))) => BigInt(35) + case Char(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))) => BigInt(34) + case Char(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))) => BigInt(33) + case Char(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))) => BigInt(32) + case Char(Bit1(Bit1(Bit1(Bit1(One()))))) => BigInt(31) + case Char(Bit0(Bit1(Bit1(Bit1(One()))))) => BigInt(30) + case Char(Bit1(Bit0(Bit1(Bit1(One()))))) => BigInt(29) + case Char(Bit0(Bit0(Bit1(Bit1(One()))))) => BigInt(28) + case Char(Bit1(Bit1(Bit0(Bit1(One()))))) => BigInt(27) + case Char(Bit0(Bit1(Bit0(Bit1(One()))))) => BigInt(26) + case Char(Bit1(Bit0(Bit0(Bit1(One()))))) => BigInt(25) + case Char(Bit0(Bit0(Bit0(Bit1(One()))))) => BigInt(24) + case Char(Bit1(Bit1(Bit1(Bit0(One()))))) => BigInt(23) + case Char(Bit0(Bit1(Bit1(Bit0(One()))))) => BigInt(22) + case Char(Bit1(Bit0(Bit1(Bit0(One()))))) => BigInt(21) + case Char(Bit0(Bit0(Bit1(Bit0(One()))))) => BigInt(20) + case Char(Bit1(Bit1(Bit0(Bit0(One()))))) => BigInt(19) + case Char(Bit0(Bit1(Bit0(Bit0(One()))))) => BigInt(18) + case Char(Bit1(Bit0(Bit0(Bit0(One()))))) => BigInt(17) + case Char(Bit0(Bit0(Bit0(Bit0(One()))))) => BigInt(16) + case Char(Bit1(Bit1(Bit1(One())))) => BigInt(15) + case Char(Bit0(Bit1(Bit1(One())))) => BigInt(14) + case Char(Bit1(Bit0(Bit1(One())))) => BigInt(13) + case Char(Bit0(Bit0(Bit1(One())))) => BigInt(12) + case Char(Bit1(Bit1(Bit0(One())))) => BigInt(11) + case Char(Bit0(Bit1(Bit0(One())))) => BigInt(10) + case Char(Bit1(Bit0(Bit0(One())))) => BigInt(9) + case Char(Bit0(Bit0(Bit0(One())))) => BigInt(8) + case Char(Bit1(Bit1(One()))) => BigInt(7) + case Char(Bit0(Bit1(One()))) => BigInt(6) + case Char(Bit1(Bit0(One()))) => BigInt(5) + case Char(Bit0(Bit0(One()))) => BigInt(4) + case Char(Bit1(One())) => BigInt(3) + case Char(Bit0(One())) => BigInt(2) + case Char(One()) => BigInt(1) + case zero_char() => BigInt(0) + } +} From abef11199b9e140e7241fa17b3f5e08ea117c453 Mon Sep 17 00:00:00 2001 From: mkeskells Date: Fri, 22 Dec 2017 23:31:02 +0000 Subject: [PATCH 1172/2793] allow per-run init to be used with java collections add tests for per-run init --- .../jvm/PostProcessorFrontendAccess.scala | 11 +- .../scala/reflect/internal/SymbolTable.scala | 20 ++- .../reflect/internal/util/JavaClearable.scala | 38 ++++++ .../nsc/backend/jvm/PerRunInitTest.scala | 127 ++++++++++++++++++ 4 files changed, 186 insertions(+), 10 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/JavaClearable.scala create mode 100644 test/junit/scala/tools/nsc/backend/jvm/PerRunInitTest.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 4266988ff9a7..077c18630b36 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -2,10 +2,10 @@ package scala.tools.nsc package backend.jvm import scala.collection.generic.Clearable -import scala.reflect.internal.util.Position +import scala.reflect.internal.util.{JavaClearable, Position} import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.jvm.BTypes.InternalName -import java.util.{Map => JMap, Collection => JCollection} +import java.util.{Collection => JCollection, Map => JMap} /** * Functionality needed in the post-processor whose implementation depends on the compiler @@ -170,16 +170,13 @@ object PostProcessorFrontendAccess { def recordPerRunCache[T <: Clearable](cache: T): T = frontendSynch(perRunCaches.recordCache(cache)) def recordPerRunJavaMapCache[T <: JMap[_,_]](cache: T): T = { - recordPerRunJavaCache(cache.keySet()) + recordPerRunCache(JavaClearable.forMap(cache)) cache } def recordPerRunJavaCache[T <: JCollection[_]](cache: T): T = { - recordPerRunCache(new JavaClearable(cache)) + recordPerRunCache(JavaClearable.forCollection(cache)) cache } - private class JavaClearable(data: JCollection[_]) extends Clearable { - override def clear(): Unit = data.clear - } } } \ No newline at end of file diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 3e78a60a8ce8..01df81a59498 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -365,9 +365,15 @@ abstract class SymbolTable extends macros.Universe // letting us know when a cache is really out of commission. import java.lang.ref.WeakReference private var caches = List[WeakReference[Clearable]]() + private var javaCaches = List[JavaClearable[_]]() def recordCache[T <: Clearable](cache: T): T = { - caches ::= new WeakReference(cache) + cache match { + case jc: JavaClearable[_] => + javaCaches ::= jc + case _ => + caches ::= new WeakReference(cache) + } cache } @@ -376,13 +382,21 @@ abstract class SymbolTable extends macros.Universe * compiler and then inspect the state of a cache. */ def unrecordCache[T <: Clearable](cache: T): Unit = { - caches = caches.filterNot(_.get eq cache) + cache match { + case jc: JavaClearable[_] => + javaCaches = javaCaches.filterNot(cache == _) + case _ => + caches = caches.filterNot(_.get eq cache) + } } def clearAll() = { - debuglog("Clearing " + caches.size + " caches.") + debuglog("Clearing " + (caches.size + javaCaches.size) + " caches.") caches foreach (ref => Option(ref.get).foreach(_.clear)) caches = caches.filterNot(_.get == null) + + javaCaches foreach (_.clear) + javaCaches = javaCaches.filter(_.isValid) } def newWeakMap[K, V]() = recordCache(mutable.WeakHashMap[K, V]()) diff --git a/src/reflect/scala/reflect/internal/util/JavaClearable.scala b/src/reflect/scala/reflect/internal/util/JavaClearable.scala new file mode 100644 index 000000000000..10de913c8f24 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/JavaClearable.scala @@ -0,0 +1,38 @@ +package scala.reflect.internal.util + +import java.lang.ref.WeakReference +import java.util.{Collection => JCollection, Map => JMap} + +import scala.collection.generic.Clearable + +object JavaClearable { + def forCollection[T <: JCollection[_]](data: T): JavaClearable[T] = new JavaClearableCollection(new WeakReference(data)) + def forMap[T <: JMap[_,_]](data: T): JavaClearable[T] = new JavaClearableMap(new WeakReference(data)) + + private final class JavaClearableMap[T <: JMap[_,_]](dataRef:WeakReference[T]) extends JavaClearable(dataRef) { + override def clear: Unit = Option(dataRef.get) foreach (_.clear()) + } + private final class JavaClearableCollection[T <: JCollection[_]](dataRef:WeakReference[T]) extends JavaClearable(dataRef) { + override def clear: Unit = Option(dataRef.get) foreach (_.clear()) + } +} +sealed abstract class JavaClearable[T <: AnyRef] protected (protected val dataRef: WeakReference[T]) extends Clearable { + + //just maintained hashCode to be consistent with equals + override val hashCode = System.identityHashCode(dataRef.get()) + override def equals(obj: scala.Any) = obj match { + case that: JavaClearable[_] => { + if (this eq that) true + else { + val thisData = this.dataRef.get + val thatData = that.dataRef.get + (thisData eq thatData) && (thisData ne null) + } + } + case _ => false + } + + def clear : Unit + + def isValid = dataRef.get() ne null +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/PerRunInitTest.scala b/test/junit/scala/tools/nsc/backend/jvm/PerRunInitTest.scala new file mode 100644 index 000000000000..55ca5d57d136 --- /dev/null +++ b/test/junit/scala/tools/nsc/backend/jvm/PerRunInitTest.scala @@ -0,0 +1,127 @@ +package scala.tools.nsc.backend.jvm +import java.util + +import org.junit._ +import org.junit.Assert._ + +import scala.collection.mutable +import scala.ref.WeakReference +import scala.reflect.internal.util.JavaClearable +import scala.tools.nsc.{Global, Settings} +import scala.tools.nsc.backend.jvm.PostProcessorFrontendAccess.PostProcessorFrontendAccessImpl +import scala.tools.nsc.reporters.StoreReporter + +class PerRunInitTestMap extends PerRunInitTest { + type Data = mutable.Map[String, String] + override def newData(): Data = underTest.recordPerRunCache(mutable.Map.empty) + override def dontClear(data: Data): Unit = underTest.global.perRunCaches.unrecordCache(data) + + override def add(id: Int, data: Data): Unit = data.put(s"key $id", s"value $id") + + override def sizeOf(data: Data): Int = data.size + +} +class PerRunInitTestSet extends PerRunInitTest { + type Data = mutable.Set[String] + override def newData(): Data = underTest.recordPerRunCache(mutable.Set.empty) + override def dontClear(data: Data): Unit = underTest.global.perRunCaches.unrecordCache(data) + + override def add(id: Int, data: Data): Unit = data += s"value $id" + + override def sizeOf(data: Data): Int = data.size +} +class PerRunInitTestJMap extends PerRunInitTest { + type Data = java.util.Map[String, String] + override def newData(): Data = underTest.recordPerRunJavaMapCache(new util.HashMap[String,String]()) + override def dontClear(data: Data): Unit = underTest.global.perRunCaches.unrecordCache(JavaClearable.forMap(data)) + + override def add(id: Int, data: Data): Unit = data.put(s"key $id", s"value $id") + + override def sizeOf(data: Data): Int = data.size +} +class PerRunInitTestJSet extends PerRunInitTest { + type Data = java.util.Set[String] + override def newData(): Data = underTest.recordPerRunJavaCache(new util.HashSet[String]()) + override def dontClear(data: Data): Unit = underTest.global.perRunCaches.unrecordCache(JavaClearable.forCollection(data)) + + override def add(id: Int, data: Data): Unit = data.add(s"value $id") + + override def sizeOf(data: Data): Int = data.size +} +class PerRunInitTestJCMap extends PerRunInitTestJMap { + override def newData(): Data = underTest.recordPerRunJavaMapCache(new java.util.concurrent.ConcurrentHashMap[String,String]()) +} +abstract class PerRunInitTest { + type Data >: Null <: AnyRef + var underTest : PostProcessorFrontendAccessImpl = _ + @Before def init() = { + def global = { + def showError(s: String) = throw new Exception(s) + + val settings = new Settings(showError) + + new Global(settings, new StoreReporter) + } + underTest = new PostProcessorFrontendAccessImpl(global) + } + @After def clear() = { + underTest = null + } + + def newData(): Data + def dontClear(data:Data): Unit + + def add(id: Int, data: Data): Unit + + def sizeOf(data: Data): Int + + def clearCaches() = underTest.global.perRunCaches.clearAll() + + def doGc() = { + System.gc() + System.runFinalization() + } + + @Test + def clearedWhenExpired: Unit = { + val data = newData() + + add(1, data) + + assertEquals(s"$data", 1, sizeOf(data)) + doGc() + assertEquals(s"$data", 1, sizeOf(data)) + + clearCaches() + assertEquals(s"$data", 0, sizeOf(data)) + } + + @Test + def clearedWeakOnly: Unit = { + var data = newData() + val ref = WeakReference(data) + + assertTrue(ref.get.isDefined) + data = null + doGc() + assertFalse(ref.get.isDefined) + //to check that dereference doesn't cause a problem + clearCaches() + } + + @Test + def notClearedIfRequested: Unit = { + val data = newData() + dontClear(data) + + add(1, data) + assertEquals(s"$data", 1, sizeOf(data)) + doGc() + assertEquals(s"$data", 1, sizeOf(data)) + clearCaches() + assertEquals(s"$data", 1, sizeOf(data)) + } + + + +} From ccf34454df1d39b786446d12dcd99eb79cb9d2e5 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 22 Dec 2017 15:15:28 -0800 Subject: [PATCH 1173/2793] Patternly apply reduces apparent parens When typing an apply in pattern mode, if a method type results, use the method result directly. This adjustment was previously applied in typedCase, but not in typedBind. Now it should happen in any pattern context. This occurs when a case class has more than two parameter lists. --- .../scala/tools/nsc/typechecker/Typers.scala | 13 +++++++------ test/files/pos/t10667.scala | 17 +++++++++++++++++ 2 files changed, 24 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/t10667.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d6d8e1e9bb45..92ebc167dd8c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2497,12 +2497,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // This adjustment is awfully specific to continuations, but AFAICS the // whole AnnotationChecker framework is. val pat1 = typedPattern(cdef.pat, pattpe.withoutAnnotations) - // When case classes have more than two parameter lists, the pattern ends - // up typed as a method. We only pattern match on the first parameter - // list, so substitute the final result type of the method, i.e. the type - // of the case class. - if (pat1.tpe.paramSectionCount > 0) - pat1 modifyType (_.finalResultType) for (bind @ Bind(name, _) <- cdef.pat) { val sym = bind.symbol @@ -4758,6 +4752,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper normalTypedApply(tree, fun, args) match { case ArrayInstantiation(tree1) => if (tree1.isErrorTyped) tree1 else typed(tree1, mode, pt) case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //scala/bug#5696 + case tree1 if mode.inPatternMode && tree1.tpe.paramSectionCount > 0 => + // For a case class C with more than two parameter lists, + // C(_) is typed as C(_)() which is a method type like ()C. + // In a pattern, just use the final result type, C in this case. + // The enclosing context may be case c @ C(_) => or val c @ C(_) = v. + tree1 modifyType (_.finalResultType) + tree1 case tree1 => tree1 } } diff --git a/test/files/pos/t10667.scala b/test/files/pos/t10667.scala new file mode 100644 index 000000000000..408a9d514f36 --- /dev/null +++ b/test/files/pos/t10667.scala @@ -0,0 +1,17 @@ + +case class C(i: Int)(j: Int)(s: String) +case class D(i: Int)(j: Int)(implicit s: String) + +trait T { + val v = C(42)(17)("hello") + def f: C = v match { + case c @ C(_) => c + case C(_) if true => v + } + + val c @ C(_) = v + + def g = D(42)(17)("hello") match { + case d @ D(_) => "OK" + } +} From 16379739e9efa2e48eac09d046e591355fa9eb2d Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 30 Dec 2017 20:48:57 -0500 Subject: [PATCH 1174/2793] Enable issue navigation for IntelliJ. - Cmd-click (Ctrl-click) on scala/bug#1234 goes to that issue page on GitHub. - SD-1234 ticket references changed to scala/scala-dev#1234. 9acab45aee normalized all bug references to this form. --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 2 +- .../tools/nsc/backend/jvm/BCodeSkelBuilder.scala | 11 ++++++----- .../tools/nsc/backend/jvm/BCodeSyncAndTry.scala | 2 +- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 2 +- .../tools/nsc/typechecker/SuperAccessors.scala | 15 ++++++++------- src/intellij/scala.ipr.SAMPLE | 10 ++++++++++ test/junit/scala/lang/traits/BytecodeTest.scala | 2 +- 7 files changed, 28 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0d0aedc3c58d..0ba7dad971d3 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -351,7 +351,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { case mt @ MethodType(params, res) => copyMethodType(mt, selfParamSym :: params, res) }) val selfParam = ValDef(selfParamSym) - val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // SD-186 intentionally leaving Ident($this) is unpositioned + val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // scala/scala-dev#186 intentionally leaving Ident($this) is unpositioned .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym -> newSym) treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 5e033f6c17a6..65c1dd46f360 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -179,11 +179,12 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { */ private def addModuleInstanceField() { // TODO confirm whether we really don't want ACC_SYNTHETIC nor ACC_DEPRECATED - // SD-194 This can't be FINAL on JVM 1.9+ because we assign it from within the - // instance constructor, not from directly. Assignment from , - // after the constructor has completely finished, seems like the principled - // thing to do, but it would change behaviour when "benign" cyclic references - // between modules exist. + // scala/scala-dev#194: + // This can't be FINAL on JVM 1.9+ because we assign it from within the + // instance constructor, not from directly. Assignment from , + // after the constructor has completely finished, seems like the principled + // thing to do, but it would change behaviour when "benign" cyclic references + // between modules exist. val mods = GenBCode.PublicStatic val fv = cnode.visitField(mods, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala index eace87eb9e82..65129d5d9647 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala @@ -74,7 +74,7 @@ abstract class BCodeSyncAndTry extends BCodeBodyBuilder { * Reached upon abrupt termination of (2). * Protected by whatever protects the whole synchronized expression. * null => "any" exception in bytecode, like we emit for finally. - * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (SD-233) + * Important not to use j/l/Throwable which dooms the method to a life of interpretation! (scala/scala-dev#233) * ------ */ protect(startProtected, endProtected, currProgramPoint(), null) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index f8bb26b57339..a19495fcf1a5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -313,7 +313,7 @@ abstract class CallGraph { // TODO: type analysis can render more calls statically resolved. Example: // new A.f // can be inlined, the receiver type is known to be exactly A. val isStaticallyResolved: Boolean = { - isNonVirtualCall(call) || // SD-86: super calls (invokespecial) can be inlined -- TODO: check if that's still needed, and if it's correct: scala-dev#143 + isNonVirtualCall(call) || // scala/scala-dev#86: super calls (invokespecial) can be inlined -- TODO: check if that's still needed, and if it's correct: scala-dev#143 methodInlineInfo.effectivelyFinal || receiverType.info.orThrow.inlineInfo.isEffectivelyFinal // (1) } diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 5667c4a76192..edd95007c604 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -152,13 +152,14 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT } } - // SD-143: a call super[T].m that resolves to A.m cannot be translated to correct bytecode if - // - A is a class (not a trait / interface), but not the direct superclass. Invokespecial - // would select an overriding method in the direct superclass, rather than A.m. - // We allow this if there are statically no intervening overrides. - // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokespecial - // - A is a java-defined interface and not listed as direct parent of the class. In this - // case, `invokespecial A.m` would be invalid. + // scala/scala-dev#143: + // a call super[T].m that resolves to A.m cannot be translated to correct bytecode if + // - A is a class (not a trait / interface), but not the direct superclass. Invokespecial + // would select an overriding method in the direct superclass, rather than A.m. + // We allow this if there are statically no intervening overrides. + // https://docs.oracle.com/javase/specs/jvms/se8/html/jvms-6.html#jvms-6.5.invokespecial + // - A is a java-defined interface and not listed as direct parent of the class. In this + // case, `invokespecial A.m` would be invalid. def hasClassOverride(member: Symbol, subclass: Symbol): Boolean = { if (subclass == ObjectClass || subclass == member.owner) false else if (member.overridingSymbol(subclass) != NoSymbol) true diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 1f631202ea9c..6384b4863f94 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -22,6 +22,16 @@ + + + diff --git a/test/junit/scala/lang/traits/BytecodeTest.scala b/test/junit/scala/lang/traits/BytecodeTest.scala index 5eb2dd357bb1..5f0c4b1854cf 100644 --- a/test/junit/scala/lang/traits/BytecodeTest.scala +++ b/test/junit/scala/lang/traits/BytecodeTest.scala @@ -321,7 +321,7 @@ class BytecodeTest extends BytecodeTesting { val jCode = List("interface A { default int m() { return 1; } }" -> "A.java") - // used to crash in the backend (SD-210) under `-Xmixin-force-forwarders:true` + // used to crash in the backend (scala/scala-dev#210) under `-Xmixin-force-forwarders:true` val code1 = """trait B1 extends A // called "B1" not "B" due to scala-dev#214 |class C extends B1 From 38813bf5a3ac98ac89dc62104a4291793ed68a30 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 31 Dec 2017 11:38:27 -0800 Subject: [PATCH 1175/2793] Allow Nothing sequence argument There's nothing wrong with `List(??? : _*)` pun intended. --- .../scala/tools/nsc/typechecker/PatternTypers.scala | 10 ++++++---- test/files/pos/t8343.scala | 4 ++++ 2 files changed, 10 insertions(+), 4 deletions(-) create mode 100644 test/files/pos/t8343.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 3ff22a4117d8..100480a6d29f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -133,8 +133,9 @@ trait PatternTypers { val Typed(expr, tpt) = tree val exprTyped = typed(expr, mode) val baseClass = exprTyped.tpe.typeSymbol match { - case ArrayClass => ArrayClass - case _ => SeqClass + case ArrayClass => ArrayClass + case NothingClass => NothingClass + case _ => SeqClass } val starType = baseClass match { case ArrayClass if isPrimitiveValueType(pt) || !isFullyDefined(pt) => arrayType(pt) @@ -143,8 +144,9 @@ trait PatternTypers { } val exprAdapted = adapt(exprTyped, mode, starType) exprAdapted.tpe baseType baseClass match { - case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp - case _ => setError(tree) + case TypeRef(_, _, elemtp :: Nil) => treeCopy.Typed(tree, exprAdapted, tpt setType elemtp) setType elemtp + case _ if baseClass eq NothingClass => exprAdapted + case _ => setError(tree) } } diff --git a/test/files/pos/t8343.scala b/test/files/pos/t8343.scala new file mode 100644 index 000000000000..f27d15ff5a69 --- /dev/null +++ b/test/files/pos/t8343.scala @@ -0,0 +1,4 @@ + +trait T { + def f = List[Int](??? : _*) +} From 261dde55d1b3b4ac87bfa948cd6960feba698bec Mon Sep 17 00:00:00 2001 From: mkeskells Date: Tue, 2 Jan 2018 19:23:14 +0000 Subject: [PATCH 1176/2793] remove ClassBType hashCode and equals, and adjust test cases --- src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala | 9 --------- .../nsc/backend/jvm/opt/BTypesFromClassfileTest.scala | 2 +- 2 files changed, 1 insertion(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index f436920fbcb2..d5f765907be4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -783,15 +783,6 @@ abstract class BTypes { } while (fcs == null) fcs } - - // equallity and hashcode is based on internalName - override def equals(obj: scala.Any): Boolean = obj match { - case o:ClassBType => internalName == o.internalName - case _ => false - } - - // equallity and hashcode is based on internalName - override def hashCode(): Int = internalName.hashCode } object ClassBType { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala index c93d7792dc1e..0f2acc3328e9 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/BTypesFromClassfileTest.scala @@ -36,7 +36,7 @@ class BTypesFromClassfileTest extends BytecodeTesting { def sameBType(fromSym: ClassBType, fromClassfile: ClassBType, checked: Set[InternalName] = Set.empty): Set[InternalName] = { if (checked(fromSym.internalName)) checked else { - assert(fromSym == fromClassfile, s"$fromSym != $fromClassfile") + assert(fromSym.internalName == fromClassfile.internalName, s"${fromSym.internalName} != ${fromClassfile.internalName}") sameInfo(fromSym.info.get, fromClassfile.info.get, checked + fromSym.internalName) } } From 7fd1ceab0152bf46b87d98eb617fc24dba47d16a Mon Sep 17 00:00:00 2001 From: howtonotwin Date: Wed, 3 Jan 2018 22:15:28 -0500 Subject: [PATCH 1177/2793] Make updateDynamic work with context bounds Avoid inferring any type parameters to updateDynamic too early, and allow them to be inferred from the RHS. Fixes scala/bug#10406 --- .../scala/tools/nsc/typechecker/Typers.scala | 7 +++---- test/files/pos/t10406.scala | 13 +++++++++++++ 2 files changed, 16 insertions(+), 4 deletions(-) create mode 100644 test/files/pos/t10406.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d6d8e1e9bb45..1f9b9277b929 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4341,7 +4341,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedAssign(lhs: Tree, rhs: Tree): Tree = { // see scala/bug#7617 for an explanation of why macro expansion is suppressed - def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode) + def typedLhs(lhs: Tree) = typed(lhs, EXPRmode | LHSmode | POLYmode) val lhs1 = unsuppressMacroExpansion(typedLhs(suppressMacroExpansion(lhs))) val varsym = lhs1.symbol @@ -4371,9 +4371,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe } else if(dyna.isDynamicallyUpdatable(lhs1)) { - val rhs1 = typedByValueExpr(rhs) - val t = atPos(lhs1.pos.withEnd(rhs1.pos.end)) { - Apply(lhs1, List(rhs1)) + val t = atPos(lhs1.pos.withEnd(rhs.pos.end)) { + Apply(lhs1, List(rhs)) } dyna.wrapErrors(t, _.typed1(t, mode, pt)) } diff --git a/test/files/pos/t10406.scala b/test/files/pos/t10406.scala new file mode 100644 index 000000000000..af1e3232bc97 --- /dev/null +++ b/test/files/pos/t10406.scala @@ -0,0 +1,13 @@ +import language.dynamics + +trait Typeclass[T] +class TCInstance +object TCInstance { + implicit object instance extends Typeclass[TCInstance] +} +class Dyn extends Dynamic { + def updateDynamic[T: Typeclass](f: String)(t: T) = println(s"$f: $t") +} +object Dyn { + new Dyn().foo = new TCInstance +} From 930df6d35aaa6fa29f04b056c5500aad3526e27a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 4 Jan 2018 14:52:17 -0800 Subject: [PATCH 1178/2793] Deprecate subtype notation for trait extends --- .../scala/tools/nsc/ast/parser/Parsers.scala | 15 +++++++++------ test/files/neg/t10678.check | 11 +++++++++++ test/files/neg/t10678.flags | 1 + test/files/neg/t10678.scala | 10 ++++++++++ 4 files changed, 31 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t10678.check create mode 100644 test/files/neg/t10678.flags create mode 100644 test/files/neg/t10678.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0084c21e684d..51714826b5c2 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2886,9 +2886,8 @@ self => val name = ident() val tstart = in.offset atPos(start, if (name == nme.ERROR) start else nameOffset) { - val mods1 = if (in.token == SUBTYPE) mods | Flags.DEFERRED else mods - val template = templateOpt(mods1, name, NoMods, Nil, tstart) - ModuleDef(mods1, name.toTermName, template) + val template = templateOpt(mods, name, NoMods, Nil, tstart) + ModuleDef(mods, name.toTermName, template) } } @@ -2990,13 +2989,17 @@ self => /** {{{ * ClassTemplateOpt ::= `extends' ClassTemplate | [[`extends'] TemplateBody] - * TraitTemplateOpt ::= TraitExtends TraitTemplate | [[`extends'] TemplateBody] | `<:' TemplateBody - * TraitExtends ::= `extends' | `<:' + * TraitTemplateOpt ::= TraitExtends TraitTemplate | [[TraitExtends] TemplateBody] + * TraitExtends ::= `extends' | `<:' (deprecated) * }}} */ def templateOpt(mods: Modifiers, name: Name, constrMods: Modifiers, vparamss: List[List[ValDef]], tstart: Offset): Template = { + def deprecatedUsage(): Boolean = { + deprecationWarning(in.offset, "Using `<:` for `extends` is deprecated", since = "2.12.5") + true + } val (parents, self, body) = ( - if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait) { + if (in.token == EXTENDS || in.token == SUBTYPE && mods.isTrait && deprecatedUsage()) { in.nextToken() template() } diff --git a/test/files/neg/t10678.check b/test/files/neg/t10678.check new file mode 100644 index 000000000000..a5f3f473db35 --- /dev/null +++ b/test/files/neg/t10678.check @@ -0,0 +1,11 @@ +t10678.scala:4: warning: Using `<:` for `extends` is deprecated +trait U <: T + ^ +t10678.scala:6: error: ';' expected but '<:' found. +class C <: T { + ^ +t10678.scala:9: error: ';' expected but '<:' found. +object O <: T { + ^ +one warning found +two errors found diff --git a/test/files/neg/t10678.flags b/test/files/neg/t10678.flags new file mode 100644 index 000000000000..c6bfaf1f64a4 --- /dev/null +++ b/test/files/neg/t10678.flags @@ -0,0 +1 @@ +-deprecation -Xfatal-warnings diff --git a/test/files/neg/t10678.scala b/test/files/neg/t10678.scala new file mode 100644 index 000000000000..3c5ede025536 --- /dev/null +++ b/test/files/neg/t10678.scala @@ -0,0 +1,10 @@ + +trait T + +trait U <: T + +class C <: T { +} + +object O <: T { +} From 6abf29a565f1dda5ce7abf1cd6d3d2c3928cf515 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Fri, 13 Oct 2017 15:04:12 -0400 Subject: [PATCH 1179/2793] Don't assume that class literals of value classes wrap TypeRefs. When erasure comes around to munge on a TypeRef to a derived value class, it swaps it out for an ErasedValueType that will get posterased to the underlying type. This behavior is wholly inappropriate for class literals, which one would prefer remain referencing the derived value class itself, and so such TypeRefs are rightfully exempted from the specialScalaErasure. However, TypeRefs aren't the only way to reference derived value classes: the type may be an ExistentialType or some other wrapper of a TypeRef to a derived value class. The easy way to check is to call typeSymbol.isDerivedValueClass, and use the javaErasure (which erases derived value class TypeRefs the same as normal TypeRefs). There are three cases where this matters: - erasing classOf during erasure (scala/bug#10551) - erasing the element type of ArrayValues during erasure (scala/bug#10646) - erasing classOf inside Java annotations during jvm fixes scala/bug#10551; fixes scala/bug#10646 (not like github honors this these days) --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 7 +-- .../scala/tools/nsc/transform/Erasure.scala | 9 ++-- .../reflect/internal/transform/Erasure.scala | 23 +++++++-- .../jvm/value-class-in-jannotation.check | 1 + .../jvm/value-class-in-jannotation.scala | 10 ++++ test/files/run/t10551.check | 21 ++++++++ test/files/run/t10551.scala | 48 +++++++++++++++++++ test/files/run/t10646.scala | 13 +++++ .../valueclasses-classtag-existential.check | 2 +- 9 files changed, 119 insertions(+), 15 deletions(-) create mode 100644 test/files/jvm/value-class-in-jannotation.check create mode 100644 test/files/jvm/value-class-in-jannotation.scala create mode 100644 test/files/run/t10551.check create mode 100644 test/files/run/t10551.scala create mode 100644 test/files/run/t10646.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 0f65f9e4c7cc..6535ff29db1a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -453,12 +453,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * non-erased existential type. */ def erasedType(tp: Type): Type = enteringErasure { - // make sure we don't erase value class references to the type that the value class boxes - // this is basically the same logic as in erasure's preTransform, case Literal(classTag). - tp.dealiasWiden match { - case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => erasure.scalaErasure.eraseNormalClassRef(tr) - case tpe => erasure.erasure(tpe.typeSymbol)(tpe) - } + erasure.erasure(tp.typeSymbol).applyInArray(tp) } def descriptorForErasedType(tp: Type): String = typeToBType(erasedType(tp)).descriptor diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 09c18fd113ef..4e775bb786a8 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1216,14 +1216,13 @@ abstract class Erasure extends InfoTransform Match(Typed(selector, TypeTree(selector.tpe)), cases) case Literal(ct) => - // We remove the original tree attachments in pre-easure to free up memory + // We remove the original tree attachments in pre-erasure to free up memory val cleanLiteral = tree.removeAttachment[OriginalTreeAttachment] if (ct.tag == ClazzTag && ct.typeValue.typeSymbol != definitions.UnitClass) { - val erased = ct.typeValue.dealiasWiden match { - case tr @ TypeRef(_, clazz, _) if clazz.isDerivedValueClass => scalaErasure.eraseNormalClassRef(tr) - case tpe => specialScalaErasure(tpe) - } + val typeValue = ct.typeValue.dealiasWiden + val erased = erasure(typeValue.typeSymbol) applyInArray typeValue + treeCopy.Literal(cleanLiteral, Constant(erased)) } else cleanLiteral diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index 99d76c334077..fff3ef59ae92 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -170,9 +170,26 @@ trait Erasure { mapOver(tp) } - def applyInArray(tp: Type): Type = tp match { - case tref @ TypeRef(_, sym, _) if sym.isDerivedValueClass => eraseNormalClassRef(tref) - case _ => apply(tp) + /* scala/bug#10551, scala/bug#10646: + * + * There are a few contexts in which it's important to erase types referencing + * derived value classes to the value class itself, not the underlying. As + * of right now, those are: + * - inside of `classOf` + * - the element type of an `ArrayValue` + * In those cases, the value class needs to be detected and erased using + * `javaErasure`, which treats refs to value classes the same as any other + * `TypeRef`. This used to be done by matching on `tr@TypeRef(_,sym,_)`, and + * checking whether `sym.isDerivedValueClass`, but there are more types with + * `typeSymbol.isDerivedValueClass` than just `TypeRef`s (`ExistentialType` + * is one of the easiest to bump into, e.g. `classOf[VC[_]]`). + * + * tl;dr if you're trying to erase a value class ref to the value class itself + * and not going through this method, you're inviting trouble into your life. + */ + def applyInArray(tp: Type): Type = { + if (tp.typeSymbol.isDerivedValueClass) javaErasure(tp) + else apply(tp) } } diff --git a/test/files/jvm/value-class-in-jannotation.check b/test/files/jvm/value-class-in-jannotation.check new file mode 100644 index 000000000000..c389887ee5aa --- /dev/null +++ b/test/files/jvm/value-class-in-jannotation.check @@ -0,0 +1 @@ +class Foo diff --git a/test/files/jvm/value-class-in-jannotation.scala b/test/files/jvm/value-class-in-jannotation.scala new file mode 100644 index 000000000000..bc466ce510f7 --- /dev/null +++ b/test/files/jvm/value-class-in-jannotation.scala @@ -0,0 +1,10 @@ +import javax.annotation.{Resource => R} + +final class Foo[T](val t: T) extends AnyVal + +@R(`type` = classOf[Foo[_]]) +class It + +object Test extends App { + println(classOf[It].getAnnotation(classOf[R]).`type`) +} \ No newline at end of file diff --git a/test/files/run/t10551.check b/test/files/run/t10551.check new file mode 100644 index 000000000000..8f5739d8d5c9 --- /dev/null +++ b/test/files/run/t10551.check @@ -0,0 +1,21 @@ +class test.NotNoPrefix$Id +class test.NotNoPrefix$Id +class test.NotNoPrefix$Ids +class test.NotNoPrefix$Ids +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Id +class test.NotNoPrefix$Id +class test.NotNoPrefix$Ids +class test.NotNoPrefix$Ids +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid +class test.NotNoPrefix$Bid diff --git a/test/files/run/t10551.scala b/test/files/run/t10551.scala new file mode 100644 index 000000000000..4c635860cc28 --- /dev/null +++ b/test/files/run/t10551.scala @@ -0,0 +1,48 @@ +package test { + object NotNoPrefix { + final class Id[A](val a: A) extends AnyVal + final class Ids[A](val as: Seq[A]) extends AnyVal + final class Bid[A, B](val ab: Map[A, B]) extends AnyVal + } +} + +object Test extends App { + import test.NotNoPrefix._ + + println(classOf[Id[Int]]) + println(classOf[Id[_]]) + + println(classOf[Ids[Int]]) + println(classOf[Ids[_]]) + + println(classOf[Bid[Int, Int]]) + println(classOf[Bid[Int, _]]) + println(classOf[Bid[_, Int]]) + println(classOf[Bid[_, _]]) + + type Iddy[A] = Id[A] + type Idsy[A] = Ids[A] + type Biddy[A, B] = Bid[A, B] + type Biddouble[A] = Bid[A, Double] + type Bixt[L] = Biddouble[_] + type Bixty = Bixt[_] + + println(classOf[Iddy[Int]]) + println(classOf[Iddy[_]]) + + println(classOf[Idsy[Int]]) + println(classOf[Idsy[_]]) + + println(classOf[Biddy[Int, Int]]) + println(classOf[Biddy[Int, _]]) + println(classOf[Biddy[_, Int]]) + println(classOf[Biddy[_, _]]) + + println(classOf[Biddouble[Int]]) + println(classOf[Biddouble[_]]) + + println(classOf[Bixt[Int]]) + println(classOf[Bixt[_]]) + + println(classOf[Bixty]) +} \ No newline at end of file diff --git a/test/files/run/t10646.scala b/test/files/run/t10646.scala new file mode 100644 index 000000000000..fd63afe4b382 --- /dev/null +++ b/test/files/run/t10646.scala @@ -0,0 +1,13 @@ +case class A[X](val a: X) extends AnyVal +case class B[X <: Serializable](val b: X) extends AnyVal + +object Test extends App { + val it = Array(A(1), A("foo")) + it(0) = A(123) + it.head + it.last + + val that = Array(A("baz"), A('fff)) + that.head + that.last +} diff --git a/test/files/run/valueclasses-classtag-existential.check b/test/files/run/valueclasses-classtag-existential.check index 9e2b9e1da7a4..bc56c4d89448 100644 --- a/test/files/run/valueclasses-classtag-existential.check +++ b/test/files/run/valueclasses-classtag-existential.check @@ -1 +1 @@ -Object +Foo From 566ed7164a35b05785acf52167b4f67041bc6043 Mon Sep 17 00:00:00 2001 From: howtonotwin Date: Sat, 6 Jan 2018 13:42:06 -0500 Subject: [PATCH 1180/2793] Make Typer#dyna.mkInvoke more accurate When searching the context for the tree in question, make sure to actually check that the tree under scrutiny matches the one we're looking for. This means that the check for varargs won't give a false positive if the context is too large. Fixes scala/bug#7420. Adjust the "vararg not supported" error so it doesn't hide other errors. --- .../tools/nsc/typechecker/ContextErrors.scala | 6 ++- .../scala/tools/nsc/typechecker/Typers.scala | 45 +++++++++---------- test/files/neg/applydynamic_sip.check | 17 ++++++- test/files/pos/t7420.scala | 13 ++++++ 4 files changed, 55 insertions(+), 26 deletions(-) create mode 100644 test/files/pos/t7420.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 27c62cde6006..7e3e954387fc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -622,8 +622,10 @@ trait ContextErrors { NormalTypeError(tree, fun.tpe+" does not take parameters") // Dynamic - def DynamicVarArgUnsupported(tree: Tree, name: Name) = - issueNormalTypeError(tree, name+ " does not support passing a vararg parameter") + def DynamicVarArgUnsupported(tree: Tree, name: Name) = { + issueNormalTypeError(tree, name + " does not support passing a vararg parameter") + setError(tree) + } def DynamicRewriteError(tree: Tree, err: AbsTypeError) = { issueTypeError(PosAndMsgTypeError(err.errPos, err.errMsg + diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d6d8e1e9bb45..604f80ecd5d5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4105,14 +4105,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else None def isDynamicallyUpdatable(tree: Tree) = tree match { - case DynamicUpdate(qual, name) => - // if the qualifier is a Dynamic, that's all we need to know - acceptsApplyDynamic(qual.tpe) + // if the qualifier is a Dynamic, that's all we need to know + case DynamicUpdate(qual, name) => acceptsApplyDynamic(qual.tpe) case _ => false } def isApplyDynamicNamed(fun: Tree): Boolean = fun match { - case DynamicApplicationNamed(qual, _) if acceptsApplyDynamic(qual.tpe.widen) => true + case DynamicApplicationNamed(qual, _) => acceptsApplyDynamic(qual.tpe.widen) case _ => false // look deeper? // val treeInfo.Applied(methPart, _, _) = fun @@ -4169,10 +4168,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all // here - it is for scala-virtualized, where tp will be passed as an argument (for // selection on a staged Struct) - def hasNamed(args: List[Tree]): Boolean = args exists (_.isInstanceOf[AssignOrNamedArg]) - // not supported: foo.bar(a1,..., an: _*) - def hasStar(args: List[Tree]) = treeInfo.isWildcardStarArgList(args) - def applyOp(args: List[Tree]) = if (hasNamed(args)) nme.applyDynamicNamed else nme.applyDynamic def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection /* Note that the trees which arrive here are potentially some distance from @@ -4184,22 +4179,26 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * See scala/bug#6731 among others. */ def findSelection(t: Tree): Option[(TermName, Tree)] = t match { - case Apply(fn, args) if hasStar(args) => DynamicVarArgUnsupported(tree, applyOp(args)) ; None - case Apply(fn, args) if matches(fn) => Some((applyOp(args), fn)) - case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs)) - case _ if matches(t) => Some((nme.selectDynamic, t)) - case _ => (t.children flatMap findSelection).headOption + case Apply(fn, args) if matches(fn) => + val op = if(args.exists(_.isInstanceOf[AssignOrNamedArg])) nme.applyDynamicNamed else nme.applyDynamic + // not supported: foo.bar(a1,..., an: _*) + val fn1 = if(treeInfo.isWildcardStarArgList(args)) DynamicVarArgUnsupported(fn, op) else fn + Some((op, fn)) + case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs)) + case _ if matches(t) => Some((nme.selectDynamic, t)) + case _ => t.children.flatMap(findSelection).headOption } - findSelection(cxTree) match { - case Some((opName, treeInfo.Applied(_, targs, _))) => - val fun = gen.mkTypeApply(Select(qual, opName), targs) - if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // scala/bug#7617 - val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) { - Literal(Constant(name.decode)) - } - markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit)))) - case _ => - setError(tree) + findSelection(cxTree) map { case (opName, treeInfo.Applied(_, targs, _)) => + val fun = gen.mkTypeApply(Select(qual, opName), targs) + if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // scala/bug#7617 + val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) { + Literal(Constant(name.decode)) + } + markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit)))) + } getOrElse { + // While there may be an error in the found tree itself, it should not be possible to *not find* it at all. + devWarning(s"Tree $tree not found in the context $cxTree while trying to do a dynamic application") + setError(tree) } } } diff --git a/test/files/neg/applydynamic_sip.check b/test/files/neg/applydynamic_sip.check index 2cb2e7f095ee..43602a126c11 100644 --- a/test/files/neg/applydynamic_sip.check +++ b/test/files/neg/applydynamic_sip.check @@ -1,15 +1,30 @@ applydynamic_sip.scala:7: error: applyDynamic does not support passing a vararg parameter qual.sel(a, a2: _*) ^ +applydynamic_sip.scala:7: error: value applyDynamic is not a member of Dynamic +error after rewriting to Test.this.qual.("sel") +possible cause: maybe a wrong Dynamic method signature? + qual.sel(a, a2: _*) + ^ applydynamic_sip.scala:8: error: applyDynamicNamed does not support passing a vararg parameter qual.sel(arg = a, a2: _*) ^ +applydynamic_sip.scala:8: error: value applyDynamicNamed is not a member of Dynamic +error after rewriting to Test.this.qual.("sel") +possible cause: maybe a wrong Dynamic method signature? + qual.sel(arg = a, a2: _*) + ^ applydynamic_sip.scala:8: error: not found: value arg qual.sel(arg = a, a2: _*) ^ applydynamic_sip.scala:9: error: applyDynamicNamed does not support passing a vararg parameter qual.sel(arg, arg2 = "a2", a2: _*) ^ +applydynamic_sip.scala:9: error: value applyDynamicNamed is not a member of Dynamic +error after rewriting to Test.this.qual.("sel") +possible cause: maybe a wrong Dynamic method signature? + qual.sel(arg, arg2 = "a2", a2: _*) + ^ applydynamic_sip.scala:9: error: not found: value arg qual.sel(arg, arg2 = "a2", a2: _*) ^ @@ -70,4 +85,4 @@ error after rewriting to Test.this.bad2.updateDynamic("sel") possible cause: maybe a wrong Dynamic method signature? bad2.sel = 1 ^ -16 errors found +19 errors found diff --git a/test/files/pos/t7420.scala b/test/files/pos/t7420.scala new file mode 100644 index 000000000000..ad03e839688d --- /dev/null +++ b/test/files/pos/t7420.scala @@ -0,0 +1,13 @@ +import language.dynamics + +case class ArtifactGroup(org: String, pre: String, rev: String) extends Dynamic { + def selectDynamic(name: String) = s"$org:$pre-$name:$rev" +} + +object Test { + val library = ArtifactGroup("org.scala", "amazing-library", "7.2.4") + + def a = Seq(library.core, library.mail) + def b = Seq(a: _*) + def c = Seq(Seq(library.core, library.mail): _*) +} From ea65e04beef05708f0d11377338605377c86d3b6 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Wed, 27 Dec 2017 21:10:13 -0500 Subject: [PATCH 1181/2793] Improve scaladoc link refs and syntax --- src/library/scala/DelayedInit.scala | 2 +- src/library/scala/Symbol.scala | 2 +- src/library/scala/annotation/ClassfileAnnotation.scala | 2 +- src/library/scala/collection/TraversableLike.scala | 2 +- src/library/scala/collection/concurrent/TrieMap.scala | 2 +- src/library/scala/collection/generic/BitOperations.scala | 2 +- src/library/scala/collection/immutable/BitSet.scala | 2 +- src/library/scala/collection/immutable/HashMap.scala | 2 +- src/library/scala/collection/immutable/ListMap.scala | 2 +- src/library/scala/collection/immutable/LongMap.scala | 2 +- src/library/scala/collection/immutable/Queue.scala | 2 +- src/library/scala/collection/immutable/RedBlackTree.scala | 4 ++-- src/library/scala/collection/immutable/Stack.scala | 2 +- src/library/scala/collection/immutable/TreeMap.scala | 2 +- src/library/scala/collection/immutable/TreeSet.scala | 2 +- src/library/scala/collection/mutable/ArrayBuffer.scala | 2 +- src/library/scala/collection/mutable/ArraySeq.scala | 2 +- src/library/scala/collection/mutable/ArrayStack.scala | 2 +- src/library/scala/collection/mutable/BitSet.scala | 2 +- .../scala/collection/mutable/DoubleLinkedList.scala | 2 +- src/library/scala/collection/mutable/HashMap.scala | 2 +- src/library/scala/collection/mutable/HashSet.scala | 2 +- src/library/scala/collection/mutable/LinearSeq.scala | 2 +- src/library/scala/collection/mutable/LinkedList.scala | 2 +- src/library/scala/collection/mutable/ListBuffer.scala | 2 +- src/library/scala/collection/mutable/MutableList.scala | 2 +- src/library/scala/collection/mutable/StringBuilder.scala | 2 +- src/library/scala/collection/mutable/WeakHashMap.scala | 2 +- .../scala/collection/parallel/immutable/ParHashMap.scala | 2 +- .../scala/collection/parallel/immutable/ParHashSet.scala | 2 +- .../scala/collection/parallel/immutable/ParRange.scala | 2 +- .../scala/collection/parallel/immutable/ParVector.scala | 2 +- .../scala/collection/parallel/mutable/ParArray.scala | 2 +- .../scala/collection/parallel/mutable/ParHashMap.scala | 2 +- .../scala/collection/parallel/mutable/ParHashSet.scala | 2 +- .../scala/collection/parallel/mutable/ParTrieMap.scala | 2 +- src/library/scala/concurrent/ExecutionContext.scala | 8 ++++---- src/library/scala/concurrent/SyncVar.scala | 2 +- src/library/scala/concurrent/impl/Promise.scala | 8 ++++---- src/library/scala/io/Codec.scala | 4 ++-- src/library/scala/math/PartialOrdering.scala | 2 +- src/library/scala/runtime/ScalaRunTime.scala | 2 +- src/library/scala/sys/process/Process.scala | 2 +- src/library/scala/util/MurmurHash.scala | 2 +- src/library/scala/util/control/TailCalls.scala | 2 +- test/files/neg/delayed-init-ref.check | 2 +- 46 files changed, 54 insertions(+), 54 deletions(-) diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala index 8dc841a7e38c..c1d2f28637bd 100644 --- a/src/library/scala/DelayedInit.scala +++ b/src/library/scala/DelayedInit.scala @@ -43,7 +43,7 @@ package scala * * @author Martin Odersky */ -@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1", "2.11.0") +@deprecated("DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0", "2.11.0") trait DelayedInit { def delayedInit(x: => Unit): Unit } diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala index e1efe20c8b3b..a10da86da7f9 100644 --- a/src/library/scala/Symbol.scala +++ b/src/library/scala/Symbol.scala @@ -13,7 +13,7 @@ package scala * Instances of `Symbol` can be created easily with Scala's built-in quote * mechanism. * - * For instance, the [[http://scala-lang.org/#_top Scala]] term `'mysym` will + * For instance, the Scala term `'mysym` will * invoke the constructor of the `Symbol` class in the following way: * `Symbol("mysym")`. * diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala index e32b93a5df97..bf9cf8ba8f5d 100644 --- a/src/library/scala/annotation/ClassfileAnnotation.scala +++ b/src/library/scala/annotation/ClassfileAnnotation.scala @@ -9,7 +9,7 @@ package scala.annotation /** A base class for classfile annotations. These are stored as - * [[http://docs.oracle.com/javase/7/docs/technotes/guides/language/annotations.html#_top Java annotations]]] + * [[http://docs.oracle.com/javase/8/docs/technotes/guides/language/annotations.html Java annotations]]] * in classfiles. * * @author Martin Odersky diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 12f2a7822d84..bf6c9401374d 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -415,7 +415,7 @@ trait TraversableLike[+A, +Repr] extends Any * $orderDependent * @return a $coll consisting of all elements of this $coll * except the first one. - * @throws `UnsupportedOperationException` if the $coll is empty. + * @throws java.lang.UnsupportedOperationException if the $coll is empty. */ override def tail: Repr = { if (isEmpty) throw new UnsupportedOperationException("empty.tail") diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index db3263888dbd..c1ef1ff3bf36 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -622,7 +622,7 @@ private[concurrent] case class RDCSS_Descriptor[K, V](old: INode[K, V], expected * iterator and clear operations. The cost of evaluating the (lazy) snapshot is * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. * - * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf + * For details, see: [[http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf]] * * @author Aleksandar Prokopec * @since 2.10 diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala index 2f460eee1fcf..6686dbff2fdd 100644 --- a/src/library/scala/collection/generic/BitOperations.scala +++ b/src/library/scala/collection/generic/BitOperations.scala @@ -12,7 +12,7 @@ package generic /** Some bit operations. * - * See http://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/ for + * See [[http://www.drmaciver.com/2008/08/unsigned-comparison-in-javascala/]] for * an explanation of unsignedCompare. */ private[collection] object BitOperations { diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala index ecf3326c7f95..244b1fc15a61 100644 --- a/src/library/scala/collection/immutable/BitSet.scala +++ b/src/library/scala/collection/immutable/BitSet.scala @@ -18,7 +18,7 @@ import mutable.Builder /** A class for immutable bitsets. * $bitsetinfo - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_bitsets "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-bitsets"Scala's Collection Library overview"]] * section on `Immutable BitSets` for more information. * * @define Coll `immutable.BitSet` diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 627f723cb062..dad24c172c69 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -25,7 +25,7 @@ import parallel.immutable.ParHashMap * @author Tiark Rompf * @version 2.8 * @since 2.3 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash_tries "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash-tries "Scala's Collection Library overview"]] * section on `Hash Tries` for more information. * @define Coll `immutable.HashMap` * @define coll immutable hash map diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 589f8bbba94f..ffad47878516 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -20,7 +20,7 @@ import scala.annotation.tailrec * n elements will take O(n^2^) time. This makes the builder suitable only for a small number of * elements. * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list_maps "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#list-maps "Scala's Collection Library overview"]] * section on `List Maps` for more information. * @since 1 * @define Coll ListMap diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index 930e6fe4b0cb..e67f9e69b555 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -137,7 +137,7 @@ private[immutable] class LongMapKeyIterator[V](it: LongMap[V]) extends LongMapIt /** * Specialised immutable map structure for long keys, based on - * Fast Mergeable Long Maps + * [[http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.37.5452 Fast Mergeable Long Maps]] * by Okasaki and Gill. Essentially a trie based on binary digits of the integers. * * Note: This class is as of 2.8 largely superseded by HashMap. diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 876066bb2d76..aae80cf148e7 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -27,7 +27,7 @@ import mutable.{ Builder, ListBuffer } * @author Erik Stenman * @version 1.0, 08/07/2003 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_queues "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] * section on `Immutable Queues` for more information. * * @define Coll `immutable.Queue` diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 4f2e9115fe67..cbc8a28ef790 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -168,8 +168,8 @@ object RedBlackTree { } /* Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - * Constructing Red-Black Trees, Ralf Hinze: http://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz - * Red-Black Trees in a Functional Setting, Chris Okasaki: https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf */ + * Constructing Red-Black Trees, Ralf Hinze: [[http://www.cs.ox.ac.uk/ralf.hinze/publications/WAAAPL99b.ps.gz]] + * Red-Black Trees in a Functional Setting, Chris Okasaki: [[https://wiki.rice.edu/confluence/download/attachments/2761212/Okasaki-Red-Black.pdf]] */ private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { if (isRedTree(tr)) { diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala index 02bdadb5dd11..a4f75ea41911 100644 --- a/src/library/scala/collection/immutable/Stack.scala +++ b/src/library/scala/collection/immutable/Stack.scala @@ -35,7 +35,7 @@ object Stack extends SeqFactory[Stack] { * @author Matthias Zenger * @version 1.0, 10/07/2003 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable_stacks "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-stacks "Scala's Collection Library overview"]] * section on `Immutable stacks` for more information. * * @define Coll `immutable.Stack` diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 2d1bf0f6b1d0..05e04bb514f1 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -34,7 +34,7 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * @author Matthias Zenger * @version 1.1, 03/05/2004 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#redblack_trees "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] * section on `Red-Black Trees` for more information. * * @define Coll immutable.TreeMap diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 2cdf3b352113..af3deb50a29c 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -38,7 +38,7 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @author Martin Odersky * @version 2.0, 02/01/2007 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#redblack_trees "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] * section on `Red-Black Trees` for more information. * * @define Coll `immutable.TreeSet` diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 23d386f729d0..2e7feaa37e9b 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -24,7 +24,7 @@ import parallel.mutable.ParArray * @author Martin Odersky * @version 2.8 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array_buffers "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] * section on `Array Buffers` for more information. * diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 1e82096bafcd..22c98cd3c339 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -22,7 +22,7 @@ import parallel.mutable.ParArray * @author Martin Odersky * @version 2.8 * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array_sequences "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-sequences "Scala's Collection Library overview"]] * section on `Array Sequences` for more information. * * @tparam A type of the elements contained in this array sequence. diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala index 951a90b084e4..9b52d9898c0a 100644 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ b/src/library/scala/collection/mutable/ArrayStack.scala @@ -48,7 +48,7 @@ object ArrayStack extends SeqFactory[ArrayStack] { * * @author David MacIver * @since 2.7 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array_stacks "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-stacks "Scala's Collection Library overview"]] * section on `Array Stacks` for more information. * * @tparam T type of the elements contained in this array stack. diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala index a714cce8816c..93d5ad76e34c 100644 --- a/src/library/scala/collection/mutable/BitSet.scala +++ b/src/library/scala/collection/mutable/BitSet.scala @@ -19,7 +19,7 @@ import BitSetLike.{LogWL, MaxSize} * * $bitsetinfo * - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_bitsets "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-bitsets "Scala's Collection Library overview"]] * section on `Mutable Bitsets` for more information. * * @define Coll `BitSet` diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 698d47e101ae..141468e17a49 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -21,7 +21,7 @@ import generic._ * @author Martin Odersky * @version 2.8 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#double_linked_lists "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#double-linked-lists "Scala's Collection Library overview"]] * section on `Double Linked Lists` for more information. * diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index de61ebb796df..2391080658e3 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -16,7 +16,7 @@ import scala.collection.parallel.mutable.ParHashMap /** This class implements mutable maps using a hashtable. * * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash_tables "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] * section on `Hash Tables` for more information. * * @tparam A the type of the keys contained in this hash map. diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index 3a16e4efa595..05f078098adb 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -21,7 +21,7 @@ import scala.collection.parallel.mutable.ParHashSet * @author Martin Odersky * @version 2.0, 31/12/2006 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash_tables "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] * section on `Hash Tables` for more information. * * @define Coll `mutable.HashSet` diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala index 3fa10042effd..77e56b971647 100644 --- a/src/library/scala/collection/mutable/LinearSeq.scala +++ b/src/library/scala/collection/mutable/LinearSeq.scala @@ -20,7 +20,7 @@ import generic._ * * @define Coll `LinearSeq` * @define coll linear sequence - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-lists "Scala's Collection Library overview"]] * section on `Mutable Lists` for more information. */ trait LinearSeq[A] extends Seq[A] diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala index 5d03cd44102e..d21a7a5446af 100644 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ b/src/library/scala/collection/mutable/LinkedList.scala @@ -34,7 +34,7 @@ import generic._ * @author Martin Odersky * @version 2.8 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#linked_lists "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#linked-lists "Scala's Collection Library overview"]] * section on `Linked Lists` for more information. * * @tparam A the type of the elements contained in this linked list. diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index aa79e972d560..3f7b7ab16e37 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -21,7 +21,7 @@ import java.io.{ObjectOutputStream, ObjectInputStream} * @author Martin Odersky * @version 2.8 * @since 1 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#list_buffers "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#list-buffers "Scala's Collection Library overview"]] * section on `List Buffers` for more information. * * @tparam A the type of this list buffer's elements. diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index a333eedb1a59..384b7c3eedae 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -23,7 +23,7 @@ import immutable.List * @since 1 * @define Coll `mutable.MutableList` * @define coll mutable list - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable_lists "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#mutable-lists "Scala's Collection Library overview"]] * section on `Mutable Lists` for more information. */ @SerialVersionUID(5938451523372603072L) diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala index b5b949837417..d60ae47a5d41 100644 --- a/src/library/scala/collection/mutable/StringBuilder.scala +++ b/src/library/scala/collection/mutable/StringBuilder.scala @@ -24,7 +24,7 @@ import immutable.StringLike * @since 2.7 * @define Coll `mutable.IndexedSeq` * @define coll string builder - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html# "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stringbuilders "Scala's Collection Library overview"]] * section on `StringBuilders` for more information. */ @SerialVersionUID(0 - 8525408645367278351L) diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala index 433d054bfcf5..4d1b3397c408 100644 --- a/src/library/scala/collection/mutable/WeakHashMap.scala +++ b/src/library/scala/collection/mutable/WeakHashMap.scala @@ -21,7 +21,7 @@ import convert.Wrappers._ * @tparam B type of values associated with the keys * * @since 2.8 - * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#weak_hash_maps "Scala's Collection Library overview"]] + * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#weak-hash-maps "Scala's Collection Library overview"]] * section on `Weak Hash Maps` for more information. * * @define Coll `WeakHashMap` diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index 06455ba00621..f50718343c70 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -33,7 +33,7 @@ import scala.collection.parallel.Task * * @author Aleksandar Prokopec * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tries Scala's Parallel Collections Library overview]] * section on Parallel Hash Tries for more information. * * @define Coll `immutable.ParHashMap` diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index 3a1ec7fff82f..44f2b30a399f 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -36,7 +36,7 @@ import scala.collection.parallel.Task * * @author Aleksandar Prokopec * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tries Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tries Scala's Parallel Collections Library overview]] * section on Parallel Hash Tries for more information. * * @define Coll `immutable.ParHashSet` diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index de2b53a6c0c0..56e587ae00db 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -24,7 +24,7 @@ import scala.collection.Iterator * * @author Aleksandar Prokopec * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_range Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-range Scala's Parallel Collections Library overview]] * section on `ParRange` for more information. * * @define Coll `immutable.ParRange` diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala index c2c1d042e1d9..44f0371fe7ca 100644 --- a/src/library/scala/collection/parallel/immutable/ParVector.scala +++ b/src/library/scala/collection/parallel/immutable/ParVector.scala @@ -29,7 +29,7 @@ import immutable.VectorIterator * * @author Aleksandar Prokopec * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_vector Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-vector Scala's Parallel Collections Library overview]] * section on `ParVector` for more information. * * @define Coll `immutable.ParVector` diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 8a2cf2716aa1..6b55da698ef3 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -44,7 +44,7 @@ import scala.reflect.ClassTag * * @author Aleksandar Prokopec * @since 2.9 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_array Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-array Scala's Parallel Collections Library overview]] * section on `ParArray` for more information. * * @define Coll `ParArray` diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index bb3737f18e96..1d1ca0d1751e 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -29,7 +29,7 @@ import scala.collection.parallel.Task * @define coll parallel hash map * * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tables Scala's Parallel Collections Library overview]] * section on Parallel Hash Tables for more information. */ @SerialVersionUID(1L) diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 1e3d57e0e532..d9f79d5873d9 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -30,7 +30,7 @@ import scala.collection.parallel.Task * @define coll parallel hash set * * @author Aleksandar Prokopec - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_hash_tables Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-hash-tables Scala's Parallel Collections Library overview]] * section on Parallel Hash Tables for more information. */ @SerialVersionUID(1L) diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala index 2faf223b999d..c72e4ae3aa79 100644 --- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala @@ -30,7 +30,7 @@ import scala.collection.concurrent.TrieMapIterator * * @author Aleksandar Prokopec * @since 2.10 - * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel_concurrent_tries Scala's Parallel Collections Library overview]] + * @see [[http://docs.scala-lang.org/overviews/parallel-collections/concrete-parallel-collections.html#parallel-concurrent-tries Scala's Parallel Collections Library overview]] * section on `ParTrieMap` for more information. */ final class ParTrieMap[K, V] private[collection] (private val ctrie: TrieMap[K, V]) diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index f46f29438767..5075f6466ae3 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -141,7 +141,7 @@ object ExecutionContext { /** Creates an `ExecutionContext` from the given `ExecutorService`. * - * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. * @param reporter a function for error reporting * @return the `ExecutionContext` using the given `ExecutorService` */ @@ -158,14 +158,14 @@ object ExecutionContext { * val ec = ExecutionContext.fromExecutorService(Executors.newSingleThreadExecutor()) * }}} * - * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @param e the `ExecutorService` to use. If `null`, a new `ExecutorService` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. * @return the `ExecutionContext` using the given `ExecutorService` */ def fromExecutorService(e: ExecutorService): ExecutionContextExecutorService = fromExecutorService(e, defaultReporter) /** Creates an `ExecutionContext` from the given `Executor`. * - * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. * @param reporter a function for error reporting * @return the `ExecutionContext` using the given `Executor` */ @@ -174,7 +174,7 @@ object ExecutionContext { /** Creates an `ExecutionContext` from the given `Executor` with the [[scala.concurrent.ExecutionContext$.defaultReporter default reporter]]. * - * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[http://www.scala-lang.org/api/current/index.html#scala.concurrent.ExecutionContext$@global:scala.concurrent.ExecutionContextExecutor default configuration]]. + * @param e the `Executor` to use. If `null`, a new `Executor` is created with [[scala.concurrent.ExecutionContext$.global default configuration]]. * @return the `ExecutionContext` using the given `Executor` */ def fromExecutor(e: Executor): ExecutionContextExecutor = fromExecutor(e, defaultReporter) diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 77bfa951198a..4b42582c0898 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -40,7 +40,7 @@ class SyncVar[A] { wait(timeout) val elapsed = System.nanoTime() - start // nanoTime should be monotonic, but it's not possible to rely on that. - // See http://bugs.java.com/bugdatabase/view_bug.do?bug_id=6458294. + // See http://bugs.java.com/view_bug.do?bug_id=6458294 if (elapsed < 0) 0 else TimeUnit.NANOSECONDS.toMillis(elapsed) } diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 7fcc8c9f2dd6..f5e0df261aeb 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -87,10 +87,10 @@ private[concurrent] object Promise { /** * Latch used to implement waiting on a DefaultPromise's result. * - * Inspired by: http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java + * Inspired by: [[http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/main/java/util/concurrent/locks/AbstractQueuedSynchronizer.java]] * Written by Doug Lea with assistance from members of JCP JSR-166 * Expert Group and released to the public domain, as explained at - * http://creativecommons.org/publicdomain/zero/1.0/ + * [[http://creativecommons.org/publicdomain/zero/1.0/]] */ private final class CompletionLatch[T] extends AbstractQueuedSynchronizer with (Try[T] => Unit) { override protected def tryAcquireShared(ignored: Int): Int = if (getState != 0) 1 else -1 @@ -150,8 +150,8 @@ private[concurrent] object Promise { * To make the chains flattenable, the concept of linking promises together * needed to become an explicit feature of the DefaultPromise implementation, * so that the implementation to navigate and rewire links as needed. The idea - * of linking promises is based on the [[Twitter promise implementation - * https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala]]. + * of linking promises is based on the [[https://github.com/twitter/util/blob/master/util-core/src/main/scala/com/twitter/util/Promise.scala + * Twitter promise implementation]]. * * In practice, flattening the chain cannot always be done perfectly. When a * promise is added to the end of the chain, it scans the chain and links diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala index 7cb7858b36f4..0de79a679127 100644 --- a/src/library/scala/io/Codec.scala +++ b/src/library/scala/io/Codec.scala @@ -21,8 +21,8 @@ import scala.language.implicitConversions // XML: optional encoding parameter. // // -// MacRoman vs. UTF-8: see http://osdir.com/ml/lang-jruby-devel/2009-04/msg00071.html -// -Dfile.encoding: see http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4375816 +// MacRoman vs. UTF-8: see https://groups.google.com/d/msg/jruby-developers/-qtwRhoE1WM/whSPVpTNV28J +// -Dfile.encoding: see https://bugs.java.com/view_bug.do?bug_id=4375816 /** A class for character encoding/decoding preferences. * diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala index 8d7fc3253550..d8ab265f7c7b 100644 --- a/src/library/scala/math/PartialOrdering.scala +++ b/src/library/scala/math/PartialOrdering.scala @@ -14,7 +14,7 @@ package math * of partial ordering on some type. This trait is for representing the * latter. * - * A [[http://en.wikipedia.org/wiki/Partial_order partial ordering]] is a + * A [[http://en.wikipedia.org/wiki/Partially_ordered_set partial ordering]] is a * binary relation on a type `T`, exposed as the `lteq` method of this trait. * This relation must be: * diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index ca9f98fa9fb0..b90d6f43e42a 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -135,7 +135,7 @@ object ScalaRunTime { arr } - // Java bug: http://bugs.java.com/bugdatabase/view_bug.do?bug_id=4071957 + // Java bug: https://bugs.java.com/view_bug.do?bug_id=4071957 // More background at ticket #2318. def ensureAccessible(m: JMethod): JMethod = scala.reflect.ensureAccessible(m) diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala index 0ec749e78a2e..9e0ea6e71a04 100644 --- a/src/library/scala/sys/process/Process.scala +++ b/src/library/scala/sys/process/Process.scala @@ -156,7 +156,7 @@ trait ProcessCreation { * import java.io.File * * val spde = new URL("http://technically.us/spde.html") - * val dispatch = new URL("http://dispatch.databinder.net/Dispatch.html") + * val dispatch = new URL("https://dispatchhttp.org/Dispatch.html") * val build = new File("project/build.properties") * cat(spde, dispatch, build) #| "grep -i scala" ! * }}} diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala index cdc5c821fa6a..b8df29ef7679 100644 --- a/src/library/scala/util/MurmurHash.scala +++ b/src/library/scala/util/MurmurHash.scala @@ -10,7 +10,7 @@ package scala package util /** An implementation of Austin Appleby's MurmurHash 3.0 algorithm - * (32 bit version); reference: https://github.com/aappleby/smhasher + * (32 bit version); reference: [[https://github.com/aappleby/smhasher]] * * This is the hash used by collections and case classes (including * tuples). diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala index c7fefb1ebadb..fe8866ec3fb2 100644 --- a/src/library/scala/util/control/TailCalls.scala +++ b/src/library/scala/util/control/TailCalls.scala @@ -15,7 +15,7 @@ package util.control * of evaluating a tailcalling function can be retrieved from a `Tailrec` * value using method `result`. * Implemented as described in "Stackless Scala with Free Monads" - * http://blog.higher-order.com/assets/trampolines.pdf + * [[http://blog.higher-order.com/assets/trampolines.pdf]] * * Here's a usage example: * {{{ diff --git a/test/files/neg/delayed-init-ref.check b/test/files/neg/delayed-init-ref.check index 2913b1858f72..854427466998 100644 --- a/test/files/neg/delayed-init-ref.check +++ b/test/files/neg/delayed-init-ref.check @@ -4,7 +4,7 @@ delayed-init-ref.scala:17: warning: Selecting value vall from object O, which ex delayed-init-ref.scala:19: warning: Selecting value vall from object O, which extends scala.DelayedInit, is likely to yield an uninitialized value println(vall) // warn ^ -delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0-RC1 +delayed-init-ref.scala:28: warning: trait DelayedInit in package scala is deprecated (since 2.11.0): DelayedInit semantics can be surprising. Support for `App` will continue. See the release notes for more details: https://github.com/scala/scala/releases/tag/v2.11.0 trait Before extends DelayedInit { ^ delayed-init-ref.scala:40: warning: Selecting value foo from trait UserContext, which extends scala.DelayedInit, is likely to yield an uninitialized value From 92e7a9fae82ae6e5c552128169d0cf5f3e79ee22 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 28 Dec 2017 18:15:19 -0800 Subject: [PATCH 1182/2793] Enforce maxerrs more robustly `-Xmaxerrs` is currently advisory and depends on the good will of the reporter. The standard console reporter is well-advised, but others are not, such as sbt, which uses an sbt setting to control logging. This commit adds a simple filter in front of any reporter deemed untrustworthy. Comments added as reminder that `reporters.Reporter` is obsolete and implementations will use `internal.Reporter`. --- src/compiler/scala/tools/nsc/Global.scala | 13 +++- .../nsc/reporters/AbstractReporter.scala | 2 + .../tools/nsc/reporters/ConsoleReporter.scala | 1 + .../nsc/reporters/LimitingReporter.scala | 18 +++++ .../tools/nsc/reporters/NoReporter.scala | 9 ++- .../tools/nsc/reporters/StoreReporter.scala | 7 +- .../scala/reflect/internal/Reporting.scala | 49 ++++++++++++ test/files/run/maxerrs.scala | 34 +++++++++ .../nsc/reporters/ConsoleReporterTest.scala | 75 +++++++++++++------ 9 files changed, 177 insertions(+), 31 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala create mode 100644 test/files/run/maxerrs.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 295b174f66a0..3e849bec1caa 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -33,7 +33,7 @@ import scala.tools.nsc.ast.{TreeGen => AstTreeGen} import scala.tools.nsc.classpath._ import scala.tools.nsc.profile.Profiler -class Global(var currentSettings: Settings, var reporter: Reporter) +class Global(var currentSettings: Settings, reporter0: Reporter) extends SymbolTable with CompilationUnits with Plugins @@ -75,6 +75,17 @@ class Global(var currentSettings: Settings, var reporter: Reporter) override def settings = currentSettings + private[this] var currentReporter: Reporter = { reporter = reporter0 ; currentReporter } + + def reporter: Reporter = currentReporter + def reporter_=(newReporter: Reporter): Unit = + currentReporter = newReporter match { + case _: reporters.ConsoleReporter | _: reporters.LimitingReporter => newReporter + case _ if settings.maxerrs.isSetByUser && settings.maxerrs.value < settings.maxerrs.default => + new reporters.LimitingReporter(settings, newReporter) + case _ => newReporter + } + /** Switch to turn on detailed type logs */ var printTypings = settings.Ytyperdebug.value diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index 5e4914fa832e..75afd057afb1 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -9,6 +9,8 @@ package reporters import scala.collection.mutable import scala.tools.nsc.Settings import scala.reflect.internal.util.Position +// TODO +//import scala.reflect.internal.Reporter /** * This reporter implements filtering. diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index 224de9773459..c2cbaf81ac52 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -62,6 +62,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr for (k <- List(WARNING, ERROR) if k.count > 0) printMessage(s"${countAs(k.count, label(k))} found") def display(pos: Position, msg: String, severity: Severity): Unit = { + // the count includes the current message val ok = severity match { case ERROR => ERROR.count <= settings.maxerrs.value case WARNING => WARNING.count <= settings.maxwarns.value diff --git a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala new file mode 100644 index 000000000000..1eedc4fff6a1 --- /dev/null +++ b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala @@ -0,0 +1,18 @@ +package scala.tools.nsc +package reporters + +// TODO +//import scala.reflect.internal.Reporter +import scala.reflect.internal.{Reporter => InternalReporter, FilteringReporter} +import scala.reflect.internal.util.Position + +/** A `Filter` that respects `-Xmaxerrs` and `-Xmaxwarns`. + */ +class LimitingReporter(settings: Settings, override protected val delegate: InternalReporter) extends Reporter with FilteringReporter { + override protected def filter(pos: Position, msg: String, severity: Severity) = + severity match { + case ERROR => errorCount < settings.maxerrs.value + case WARNING => warningCount < settings.maxwarns.value + case _ => true + } +} diff --git a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala index 477aacd1cb4e..6aa9b4315612 100644 --- a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala @@ -1,9 +1,12 @@ package scala.tools.nsc.reporters + import scala.reflect.internal.util.Position +// TODO +//import scala.reflect.internal.Reporter /** - * A reporter that ignores reports + * A reporter that ignores reports. */ -object NoReporter extends Reporter{ +object NoReporter extends Reporter { override protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = () -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala index 9f8e9623a7e7..ce1912c72c0d 100644 --- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala @@ -8,22 +8,23 @@ package reporters import scala.collection.mutable import scala.reflect.internal.util.Position +// TODO +//import scala.reflect.internal.Reporter /** * This class implements a Reporter that stores its reports in the set `infos`. */ class StoreReporter extends Reporter { case class Info(pos: Position, msg: String, severity: Severity) { - override def toString() = "pos: " + pos + " " + msg + " " + severity + override def toString() = s"pos: $pos $msg $severity" } val infos = new mutable.LinkedHashSet[Info] - protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean) { + override protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = { if (!force) { infos += Info(pos, msg, severity) severity.count += 1 } } - override def reset() { super.reset() infos.clear() diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala index c1f0140479b6..8238327cc730 100644 --- a/src/reflect/scala/reflect/internal/Reporting.scala +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -116,6 +116,55 @@ abstract class Reporter { } } +/** A `Reporter` that forwards all methods to a delegate. + * + * Concrete subclasses must implement the abstract `delegate` member. + */ +trait ForwardingReporter extends Reporter { + + /* Receiver of all forwarded calls. */ + protected val delegate: Reporter + + /* Always throws `UnsupportedOperationException`. */ + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Nothing = + throw new UnsupportedOperationException(s"$msg ($pos)") + + override def echo(pos: Position, msg: String) = delegate.echo(pos, msg) + override def warning(pos: Position, msg: String) = delegate.warning(pos, msg) + override def error(pos: Position, msg: String) = delegate.error(pos, msg) + + private def other(severity: Severity): delegate.Severity = severity match { + case ERROR => delegate.ERROR + case WARNING => delegate.WARNING + case _ => delegate.INFO + } + override def count(severity: Severity) = delegate.count(other(severity)) + override def resetCount(severity: Severity) = delegate.resetCount(other(severity)) + + override def errorCount = delegate.errorCount + override def warningCount = delegate.warningCount + override def hasErrors = delegate.hasErrors + override def hasWarnings = delegate.hasWarnings + override def reset() = delegate.reset() + override def flush() = delegate.flush() + override def finish() = delegate.finish() + override def rerunWithDetails(setting: MutableSettings#Setting, name: String) = + delegate.rerunWithDetails(setting, name) +} + +/** A `ForwardingReporter` that filters events before delegating. + * + * Concrete subclasses should implement just the abstract `filter` method. + */ +trait FilteringReporter extends ForwardingReporter { + /* True to permit the message. */ + protected def filter(pos: Position, msg: String, severity: Severity): Boolean + + override def echo(pos: Position, msg: String) = if (filter(pos, msg, INFO)) delegate.echo(pos, msg) + override def warning(pos: Position, msg: String) = if (filter(pos, msg, WARNING)) delegate.warning(pos, msg) + override def error(pos: Position, msg: String) = if (filter(pos, msg, ERROR)) delegate.error(pos, msg) +} + // TODO: move into superclass once partest cuts tie on Severity abstract class ReporterImpl extends Reporter { class Severity(val id: Int)(name: String) { var count: Int = 0 ; override def toString = name} diff --git a/test/files/run/maxerrs.scala b/test/files/run/maxerrs.scala new file mode 100644 index 000000000000..3edd5c4d0717 --- /dev/null +++ b/test/files/run/maxerrs.scala @@ -0,0 +1,34 @@ + +import scala.tools.partest._ +import scala.tools.nsc._ +import scala.tools.nsc.{Global, Settings} +import scala.tools.nsc.reporters.StoreReporter + +object Test extends DirectTest { + + override def code = """ + class C { + def f(vs: Int*) = vs.sum + + def g = f("","","","","","","","","","") + } + """.trim + + override def extraSettings = "-usejavacp" + + // a reporter that ignores all limits + lazy val store = new StoreReporter + + final val limit = 3 + + override def show(): Unit = { + compile() + assert(store.infos.size == limit) + } + override def newSettings(args: List[String]) = { + val s = super.newSettings(args) + s.maxerrs.value = limit + s + } + override def reporter(s: Settings) = store +} diff --git a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala index f24e11c9e28b..de907fb9db51 100644 --- a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala +++ b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala @@ -10,7 +10,6 @@ import org.junit.runners.JUnit4 import scala.reflect.internal.util._ - @RunWith(classOf[JUnit4]) class ConsoleReporterTest { val source = "Test_ConsoleReporter" @@ -20,34 +19,30 @@ class ConsoleReporterTest { val writerOut = new ByteArrayOutputStream() val echoWriterOut = new ByteArrayOutputStream() - def createConsoleReporter(inputForReader: String, errOut: ByteArrayOutputStream, echoOut: ByteArrayOutputStream = null): ConsoleReporter = { val reader = new BufferedReader(new StringReader(inputForReader)) - /** Create reporter with the same writer and echoWriter if echoOut is null */ + // Create reporter with the same writer and echoWriter if echoOut is null echoOut match { - case null => new ConsoleReporter(new Settings(), reader, new PrintWriter(errOut)) - case _ => new ConsoleReporter(new Settings(), reader, new PrintWriter(errOut), new PrintWriter(echoWriterOut)) + case null => new ConsoleReporter(new Settings, reader, new PrintWriter(errOut)) + case _ => new ConsoleReporter(new Settings, reader, new PrintWriter(errOut), new PrintWriter(echoWriterOut)) } } - - def testHelper(pos: Position = NoPosition, msg: String, severity: String = "")(test: Position => Unit) = { - test(pos) - if (msg.isEmpty && severity.isEmpty) assertTrue(writerOut.toString.isEmpty) - else { - if (!pos.isDefined) assertEquals(severity + msg, writerOut.toString.lines.next) + def testHelper(pos: Position = NoPosition, msg: String, severity: String = "")(test: Position => Unit) = + try { + test(pos) + val buf = writerOut.toString + if (msg.isEmpty && severity.isEmpty) assertTrue(buf.isEmpty) + else if (!pos.isDefined) assertEquals(severity + msg, buf.lines.next) else { - val it = writerOut.toString.lines + val it = buf.lines assertEquals(source + ":1: " + severity + msg, it.next) assertEquals(content, it.next) assertEquals(" ^", it.next) } - } - writerOut.reset - } + } finally writerOut.reset - @Test def printMessageTest(): Unit = { val reporter = createConsoleReporter("r", writerOut) @@ -56,7 +51,6 @@ class ConsoleReporterTest { testHelper(posWithSource, "Testing with Defined Position")(reporter.printMessage(_, "Testing with Defined Position")) } - @Test def echoTest(): Unit = { val reporter = createConsoleReporter("r", writerOut, echoWriterOut) @@ -68,7 +62,6 @@ class ConsoleReporterTest { testHelper(msg = "Hello World!")(_ => reporter2.echo("Hello World!")) } - @Test def printTest(): Unit = { val reporter = createConsoleReporter("r", writerOut) @@ -80,7 +73,6 @@ class ConsoleReporterTest { testHelper(posWithSource, msg = "test", severity = "error: ")(reporter.print(_, "test", reporter.ERROR)) } - @Test def printColumnMarkerTest(): Unit = { val reporter = createConsoleReporter("r", writerOut) @@ -91,12 +83,11 @@ class ConsoleReporterTest { writerOut.reset } - @Test def displayTest(): Unit = { val reporter = createConsoleReporter("r", writerOut) - /** Change maxerrs and maxwarns from default */ + // Change maxerrs and maxwarns from default reporter.settings.maxerrs.value = 1 reporter.settings.maxwarns.value = 1 @@ -121,7 +112,6 @@ class ConsoleReporterTest { testHelper(msg = "")(reporter.display(_, "Testing display for maxwarns to fail", reporter.WARNING)) } - @Test def finishTest(): Unit = { val reporter = createConsoleReporter("r", writerOut) @@ -139,7 +129,6 @@ class ConsoleReporterTest { writerOut.reset } - @Test def displayPromptTest(): Unit = { val output = "a)bort, s)tack, r)esume: " @@ -151,7 +140,7 @@ class ConsoleReporterTest { assertTrue(it.next.isEmpty) assertEquals(output + "java.lang.Throwable", it.next) assertTrue(it.hasNext) - + /** Check for no stack trace */ val writerOut2 = new ByteArrayOutputStream() val reporter2 = createConsoleReporter("w", writerOut2) @@ -170,4 +159,42 @@ class ConsoleReporterTest { assertEquals(output, it3.next) assertFalse(it3.hasNext) } + + @Test + def filterTest(): Unit = { + val reporter = createConsoleReporter("r", writerOut) + val filter = { + // Change maxerrs and maxwarns from default on filter only + val settings = new Settings + settings.maxerrs.value = 1 + settings.maxwarns.value = 1 + + new LimitingReporter(settings, reporter) + } + + // pass one message + testHelper(msg = "Testing display")(filter.echo(_, "Testing display")) + testHelper(msg = "Testing display", severity = "warning: ")(filter.warning(_, "Testing display")) + testHelper(msg = "Testing display", severity = "error: ")(filter.error(_, "Testing display")) + filter.reset() + + testHelper(posWithSource, msg = "Testing display")(filter.echo(_, "Testing display")) + testHelper(posWithSource, msg = "Testing display", severity = "warning: ")(filter.warning(_, "Testing display")) + testHelper(posWithSource, msg = "Testing display", severity = "error: ")(filter.error(_, "Testing display")) + filter.reset() + + // either reset after each test or emit warn before error so that both are output by AbstractReporter + assertEquals(0, filter.errorCount) + assertEquals(0, reporter.errorCount) + assertEquals(0, filter.warningCount) + assertEquals(0, reporter.warningCount) + + // try to pass two messages + // warn first; would be nice to flush too + testHelper(posWithSource, msg = "Testing display for maxwarns to pass", severity = "warning: ")(filter.warning(_, "Testing display for maxwarns to pass")) + testHelper(msg = "")(filter.warning(_, "Testing display for maxwarns to fail")) + + testHelper(posWithSource, msg = "Testing display for maxerrs to pass", severity = "error: ")(filter.error(_, "Testing display for maxerrs to pass")) + testHelper(msg = "")(filter.error(_, "Testing display for maxerrs to fail")) + } } From 166e9cfc602485d160ccece9610489b23d8379a3 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 8 Jan 2018 15:15:00 -0800 Subject: [PATCH 1183/2793] Case also ends block stats in special cases --- .../scala/tools/nsc/ast/parser/Parsers.scala | 7 ++++--- test/files/pos/t10684.scala | 13 +++++++++++++ 2 files changed, 17 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/t10684.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0084c21e684d..8baf47ef4211 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -3217,11 +3217,12 @@ self => * }}} */ def blockStatSeq(): List[Tree] = checkNoEscapingPlaceholders { + def acceptStatSepOptOrEndCase() = if (!isCaseDefEnd) acceptStatSepOpt() val stats = new ListBuffer[Tree] while (!isStatSeqEnd && !isCaseDefEnd) { if (in.token == IMPORT) { stats ++= importClause() - acceptStatSepOpt() + acceptStatSepOptOrEndCase() } else if (isDefIntro || isLocalModifier || isAnnotation) { if (in.token == IMPLICIT) { @@ -3231,11 +3232,11 @@ self => } else { stats ++= localDef(0) } - acceptStatSepOpt() + acceptStatSepOptOrEndCase() } else if (isExprIntro) { stats += statement(InBlock) - if (!isCaseDefEnd) acceptStatSep() + acceptStatSepOptOrEndCase() } else if (isStatSep) { in.nextToken() diff --git a/test/files/pos/t10684.scala b/test/files/pos/t10684.scala new file mode 100644 index 000000000000..aae0b872babb --- /dev/null +++ b/test/files/pos/t10684.scala @@ -0,0 +1,13 @@ + + +trait T { + + def f = List(1) map { case i if i > 0 => implicit j: Int => i + implicitly[Int] case _ => implicit j: Int => 42 } + + def g = List(1) map { case i if i > 0 => import concurrent._ case _ => implicit j: Int => 42 } + + def h = List(1) map { case i if i > 0 => val x = 42 case _ => implicit j: Int => () } + + // separator is optional + def k = List(1) map { case i if i > 0 => implicit j: Int => i + implicitly[Int] ; case _ => implicit j: Int => 42 } +} From 38d425c562f7dfee7199bb093541deec7d4ad723 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 9 Jan 2018 14:36:14 -0800 Subject: [PATCH 1184/2793] bump copyright year to 2018 --- build.xml | 2 +- doc/LICENSE.md | 4 ++-- doc/License.rtf | 4 ++-- project/VersionUtil.scala | 2 +- src/library/scala/util/Properties.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala | 2 +- src/scalap/decoder.properties | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/build.xml b/build.xml index f8c0380f41a1..1470c666141b 100644 --- a/build.xml +++ b/build.xml @@ -184,7 +184,7 @@ TODO: - + diff --git a/doc/LICENSE.md b/doc/LICENSE.md index ce29d7e7d4b9..904677e0dec3 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -2,9 +2,9 @@ Scala is licensed under the [BSD 3-Clause License](http://opensource.org/license ## Scala License -Copyright (c) 2002-2017 EPFL +Copyright (c) 2002-2018 EPFL -Copyright (c) 2011-2017 Lightbend, Inc. +Copyright (c) 2011-2018 Lightbend, Inc. All rights reserved. diff --git a/doc/License.rtf b/doc/License.rtf index adc7dfdcb812..5a328f7a6df3 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -10,8 +10,8 @@ \fs48 Scala License \fs40 \ -\fs26 Copyright (c) 2002-2017 EPFL\ -Copyright (c) 2011-2017 Lightbend, Inc.\ +\fs26 Copyright (c) 2002-2018 EPFL\ +Copyright (c) 2011-2018 Lightbend, Inc.\ All rights reserved.\ \ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\ diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 3f9b727ef097..e02bc09e996e 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -18,7 +18,7 @@ object VersionUtil { ) lazy val generatePropertiesFileSettings = Seq[Setting[_]]( - copyrightString := "Copyright 2002-2017, LAMP/EPFL and Lightbend, Inc.", + copyrightString := "Copyright 2002-2018, LAMP/EPFL and Lightbend, Inc.", resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value ) diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index a4ecd102642e..4c41138e54b9 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -105,7 +105,7 @@ private[scala] trait PropertiesTrait { * or "version (unknown)" if it cannot be determined. */ val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") - val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2017, LAMP/EPFL and Lightbend, Inc.") + val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2018, LAMP/EPFL and Lightbend, Inc.") /** This is the encoding to use reading in source files, overridden with -encoding. * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala index 08d3508a78bb..fa705fb4a3ce 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Template.scala @@ -280,7 +280,7 @@ class Template(universe: doc.Universe, generator: DiagramGenerator, tpl: DocTemp { if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value)) - + else } diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties index 0bff4c81d4b2..44dcaeabb105 100644 --- a/src/scalap/decoder.properties +++ b/src/scalap/decoder.properties @@ -1,2 +1,2 @@ version.number=2.0.1 -copyright.string=(c) 2002-2017 LAMP/EPFL +copyright.string=(c) 2002-2018 LAMP/EPFL From 48f6713fcd4146375ccb803f2aeca24fc91abd93 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 29 Nov 2017 23:59:22 +0000 Subject: [PATCH 1185/2793] optimise use of indyLamdaMethods map use a java concurrent map for performance provide API to perform conditional operation based on presence --- .../tools/nsc/backend/jvm/PostProcessor.scala | 6 +- .../backend/jvm/analysis/BackendUtils.scala | 57 ++++++++++++------- 2 files changed, 38 insertions(+), 25 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index e14b0824072b..82f4f6348412 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -49,9 +49,9 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P val bytes = try { if (!isArtifact) { localOptimizations(classNode) - val lambdaImplMethods = backendUtils.getIndyLambdaImplMethods(classNode.name) - if (lambdaImplMethods.nonEmpty) - backendUtils.addLambdaDeserialize(classNode, lambdaImplMethods) + backendUtils.onIndyLambdaImplMethodIfPresent(classNode.name) { + methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) + } } setInnerClasses(classNode) serializeClass(classNode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 8caf274b5bd7..8e33ddd56b78 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -7,6 +7,8 @@ import java.lang.invoke.LambdaMetafactory import scala.annotation.{switch, tailrec} import scala.collection.JavaConverters._ import scala.collection.mutable +import java.util.concurrent.ConcurrentHashMap + import scala.tools.asm import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ @@ -35,7 +37,7 @@ abstract class BackendUtils extends PerRunInit { import bTypes._ import callGraph.ClosureInstantiation import coreBTypes._ - import frontendAccess.{compilerSettings, recordPerRunCache} + import frontendAccess.{compilerSettings, recordPerRunJavaMapCache} /** * Classes with indyLambda closure instantiations where the SAM type is serializable (e.g. Scala's @@ -44,7 +46,9 @@ abstract class BackendUtils extends PerRunInit { * inlining: when inlining an indyLambda instruction into a class, we need to make sure the class * has the method. */ - val indyLambdaImplMethods: mutable.AnyRefMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunCache(mutable.AnyRefMap()) + private val indyLambdaImplMethods: ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] = recordPerRunJavaMapCache{ + new ConcurrentHashMap[InternalName, mutable.LinkedHashSet[asm.Handle]] + } // unused objects created by these constructors are eliminated by pushPop private[this] lazy val sideEffectFreeConstructors: LazyVar[Set[(String, String)]] = perRunLazy(this) { @@ -364,38 +368,47 @@ abstract class BackendUtils extends PerRunInit { } } - /** + def onIndyLambdaImplMethodIfPresent(hostClass: InternalName) (action : mutable.LinkedHashSet[asm.Handle] => Unit): Unit = + indyLambdaImplMethods.get(hostClass) match { + case null => + case xs => xs.synchronized(action(xs)) + } + + def onIndyLambdaImplMethod[T](hostClass: InternalName) (action: mutable.LinkedHashSet[asm.Handle] => T): T ={ + val methods = indyLambdaImplMethods.computeIfAbsent(hostClass, (_) => mutable.LinkedHashSet[asm.Handle]()) + + methods.synchronized (action(methods)) + } + + /** * add methods * @return the added methods. Note the order is undefined */ def addIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Seq[asm.Handle] = { - if (handle.isEmpty) Nil else { - val set = indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()) - if (set.isEmpty) { - set ++= handle - handle - } else { - var added = List.empty[asm.Handle] - handle foreach { h => if (set.add(h)) added ::= h} - added - } + if (handle.isEmpty) Nil else onIndyLambdaImplMethod(hostClass) { + case set => + if (set.isEmpty) { + set ++= handle + handle + } else { + var added = List.empty[asm.Handle] + handle foreach { h => if (set.add(h)) added ::= h } + added + } } } def addIndyLambdaImplMethod(hostClass: InternalName, handle: asm.Handle): Boolean = { - indyLambdaImplMethods.getOrElseUpdate(hostClass, mutable.LinkedHashSet()).add(handle) + onIndyLambdaImplMethod(hostClass) { + _ add handle + } } def removeIndyLambdaImplMethod(hostClass: InternalName, handle: Seq[asm.Handle]): Unit = { if (handle.nonEmpty) - indyLambdaImplMethods.get(hostClass).foreach(_ --= handle) - } - - def getIndyLambdaImplMethods(hostClass: InternalName): Iterable[asm.Handle] = { - indyLambdaImplMethods.getOrNull(hostClass) match { - case null => Nil - case xs => xs - } + onIndyLambdaImplMethodIfPresent(hostClass) { + _ --= handle + } } /** From 72aa381976b99546cb0de5d9d03535b5a7f98d09 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 13 Oct 2017 09:10:44 -0700 Subject: [PATCH 1186/2793] Improve error on absent import selector Put the caret where the dot is expected, and not on a subsequent line. Also, don't report a found newline as semi. Fixes scala/bug#10550 --- .../scala/tools/nsc/ast/parser/Parsers.scala | 13 ++++++++++--- test/files/neg/badimport.check | 4 ++++ test/files/neg/badimport.scala | 5 +++++ test/files/neg/macro-deprecate-idents.check | 2 +- test/files/neg/t6810.check | 8 ++++---- test/files/neg/t6810.scala | 2 ++ 6 files changed, 26 insertions(+), 8 deletions(-) create mode 100644 test/files/neg/badimport.check create mode 100644 test/files/neg/badimport.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0084c21e684d..b9dc617a2c6e 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -591,7 +591,11 @@ self => } def expectedMsgTemplate(exp: String, fnd: String) = s"$exp expected but $fnd found." - def expectedMsg(token: Token): String = expectedMsgTemplate(token2string(token), token2string(in.token)) + def expectedMsg(token: Token): String = + in.token match { + case NEWLINE | NEWLINES => s"${token2string(token)} expected." + case actual => expectedMsgTemplate(token2string(token), token2string(actual)) + } /** Consume one token of the specified type, or signal an error if it is not there. */ def accept(token: Token): Offset = { @@ -1144,7 +1148,7 @@ self => def identOrMacro(): Name = if (isMacro) rawIdent() else ident() def selector(t: Tree): Tree = { - val point = if(isIdent) in.offset else in.lastOffset //scala/bug#8459 + val point = if (isIdent) in.offset else in.lastOffset //scala/bug#8459 //assert(t.pos.isDefined, t) if (t != EmptyTree) Select(t, ident(skipIt = false)) setPos r2p(t.pos.start, point, in.lastOffset) @@ -2509,7 +2513,10 @@ self => case THIS => thisDotted(tpnme.EMPTY) case _ => val id = atPos(start)(Ident(ident())) - accept(DOT) + + if (in.token == DOT || !isStatSep) accept(DOT) + else syntaxError(in.lastOffset, s". expected", skipIt = false) + if (in.token == THIS) thisDotted(id.name.toTypeName) else id }) diff --git a/test/files/neg/badimport.check b/test/files/neg/badimport.check new file mode 100644 index 000000000000..d58b64ff7cc6 --- /dev/null +++ b/test/files/neg/badimport.check @@ -0,0 +1,4 @@ +badimport.scala:2: error: . expected +import collection + ^ +one error found diff --git a/test/files/neg/badimport.scala b/test/files/neg/badimport.scala new file mode 100644 index 000000000000..bef09d3fa2a9 --- /dev/null +++ b/test/files/neg/badimport.scala @@ -0,0 +1,5 @@ + +import collection +import concurrent.Future + +trait T diff --git a/test/files/neg/macro-deprecate-idents.check b/test/files/neg/macro-deprecate-idents.check index c5902aeea6ef..795b90e9b46e 100644 --- a/test/files/neg/macro-deprecate-idents.check +++ b/test/files/neg/macro-deprecate-idents.check @@ -55,7 +55,7 @@ macro-deprecate-idents.scala:3: error: '=' expected but '}' found. macro-deprecate-idents.scala:7: error: '=' expected but '}' found. } ^ -macro-deprecate-idents.scala:42: error: '{' expected but ';' found. +macro-deprecate-idents.scala:42: error: '{' expected. package foo { ^ macro-deprecate-idents.scala:45: error: '{' expected but '}' found. diff --git a/test/files/neg/t6810.check b/test/files/neg/t6810.check index 497ef3507070..b9a362666796 100644 --- a/test/files/neg/t6810.check +++ b/test/files/neg/t6810.check @@ -16,13 +16,13 @@ t6810.scala:20: error: unclosed quoted identifier t6810.scala:21: error: unclosed quoted identifier ` = EOL // not raw string literals aka triple-quoted, multiline strings ^ -t6810.scala:24: error: unclosed character literal +t6810.scala:26: error: unclosed character literal val b = ' ^ -t6810.scala:25: error: unclosed character literal +t6810.scala:27: error: unclosed character literal ' // CR seen as EOL by scanner ^ -t6810.scala:24: error: '=' expected but ';' found. - val b = ' +t6810.scala:25: error: '=' expected. + val a = '\u000D' // similar treatment of CR ^ 9 errors found diff --git a/test/files/neg/t6810.scala b/test/files/neg/t6810.scala index 50c305d70cb6..e7a1f032bf72 100644 --- a/test/files/neg/t6810.scala +++ b/test/files/neg/t6810.scala @@ -20,6 +20,8 @@ trait t6810 { val ` ` = EOL // not raw string literals aka triple-quoted, multiline strings + val firebreak = 42 // help parser recovery, could also use rbrace + val a = '\u000D' // similar treatment of CR val b = ' ' // CR seen as EOL by scanner val c = '\r' // traditionally From 9f0c857d7b2c9c260dc5203b917f8aa20e87186e Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 13 Oct 2017 09:11:01 -0700 Subject: [PATCH 1187/2793] Use decoded name in error message Also don't use error name in helpful example. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 14 ++++++++------ test/files/neg/t10097.check | 11 ++++++++++- test/files/neg/t10097.scala | 5 +++++ 3 files changed, 23 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index b9dc617a2c6e..022a2aba2578 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -2276,16 +2276,18 @@ self => newLineOptWhenFollowedBy(LPAREN) } if (ofCaseClass) { + def name = { + val s = owner.decodedName.toString + if (s != nme.ERROR.decodedName.toString) s else "C" + } + def elliptical = vds.map(_ => "(...)").mkString if (vds.isEmpty) - syntaxError(start, s"case classes must have a parameter list; try 'case class ${owner.encoded - }()' or 'case object ${owner.encoded}'") + syntaxError(start, s"case classes must have a parameter list; try 'case class $name()' or 'case object $name'") else if (vds.head.nonEmpty && vds.head.head.mods.isImplicit) { if (settings.isScala213) - syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class ${ - owner.encoded}()${ vds.map(vs => "(...)").mkString }'") + syntaxError(start, s"case classes must have a non-implicit parameter list; try 'case class $name()$elliptical'") else { - deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class ${ - owner.encoded}()${ vds.map(vs => "(...)").mkString }'", "2.12.2") + deprecationWarning(start, s"case classes should have a non-implicit parameter list; adapting to 'case class $name()$elliptical'", "2.12.2") vds.insert(0, List.empty[ValDef]) vds(1) = vds(1).map(vd => copyValDef(vd)(mods = vd.mods & ~Flags.CASEACCESSOR)) if (implicitSection != -1) implicitSection += 1 diff --git a/test/files/neg/t10097.check b/test/files/neg/t10097.check index 1f70546b573d..89f1493adf5e 100644 --- a/test/files/neg/t10097.check +++ b/test/files/neg/t10097.check @@ -7,4 +7,13 @@ case class D(implicit c: Int)(s: String) t10097.scala:4: error: an implicit parameter section must be last case class D(implicit c: Int)(s: String) ^ -three errors found +t10097.scala:6: error: case classes must have a non-implicit parameter list; try 'case class *()(...)' +case class *(implicit c: Int) + ^ +t10097.scala:9: error: identifier expected but 'import' found. +import collection._ +^ +t10097.scala:9: error: case classes must have a parameter list; try 'case class C()' or 'case object C' +import collection._ + ^ +6 errors found diff --git a/test/files/neg/t10097.scala b/test/files/neg/t10097.scala index b2f05e2972c1..4c14f420ac4f 100644 --- a/test/files/neg/t10097.scala +++ b/test/files/neg/t10097.scala @@ -2,3 +2,8 @@ case class C(implicit val c: Int) case class D(implicit c: Int)(s: String) + +case class *(implicit c: Int) + +case class +import collection._ From dc52818c9e0ebd3258237c9cc4ff3cbb2cafff22 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 11 Jan 2018 18:17:37 -0500 Subject: [PATCH 1188/2793] Fix overzealous protected access check for Java static members. Both Java and Scala require that, to access a protected member of a class `C`, the access must occur inside a class `S` which extends `C`. Moreover, the type of the qualifier needs to be a subclass of `S`. However, when a Java `static` member is being selected, Java doesn't care about the prefix (there is no such concept in Java-land). In Scala, however, the selection occurs from a fictional companion module made to house all the static members, and it's not likely to have any subclass relationship with the classes that we care about. Therefore, when selecting from Java-defined modules, ignore the prefix check. This worked before 01c3bbb9c, which tightened up the restriction a little too far. Fixes scala/bug#10568, and fixes scala/bug#10597, but does not fix my dignity. --- .../scala/tools/nsc/typechecker/Contexts.scala | 17 ++++++++++------- test/files/pos/parallel-classloader.scala | 3 +++ test/files/pos/t10568/Converter.java | 8 ++++++++ test/files/pos/t10568/Impl.scala | 9 +++++++++ 4 files changed, 30 insertions(+), 7 deletions(-) create mode 100644 test/files/pos/parallel-classloader.scala create mode 100644 test/files/pos/t10568/Converter.java create mode 100644 test/files/pos/t10568/Impl.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index b0f66d185cca..0351d2807f0b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -711,25 +711,28 @@ trait Contexts { self: Analyzer => /* Is protected access to target symbol permitted */ def isProtectedAccessOK(target: Symbol) = { val c = enclosingSubClassContext(sym.owner) + val preSym = pre.widen.typeSymbol if (c == NoContext) lastAccessCheckDetails = sm""" | Access to protected $target not permitted because - | enclosing ${this.enclClass.owner}${this.enclClass.owner.locationString} is not a subclass of - | ${sym.owner}${sym.owner.locationString} where target is defined""" + | enclosing ${enclClass.owner.fullLocationString} is not a subclass of + | ${sym.owner.fullLocationString} where target is defined""" c != NoContext && { - target.isType || { // allow accesses to types from arbitrary subclasses fixes #4737 + target.isType || { // allow accesses to types from arbitrary subclasses fixes scala/bug#4737 val res = - isSubClassOrCompanion(pre.widen.typeSymbol, c.owner) || - c.owner.isModuleClass && - isSubClassOrCompanion(pre.widen.typeSymbol, c.owner.linkedClassOfClass) + isSubClassOrCompanion(preSym, c.owner) || + (c.owner.isModuleClass + && isSubClassOrCompanion(preSym, c.owner.linkedClassOfClass)) || + (preSym.isJava + && preSym.isModuleClass) // java static members don't care about prefix for accessibility if (!res) lastAccessCheckDetails = sm""" | Access to protected $target not permitted because | prefix type ${pre.widen} does not conform to - | ${c.owner}${c.owner.locationString} where the access takes place""" + | ${c.owner.fullLocationString} where the access takes place""" res } } diff --git a/test/files/pos/parallel-classloader.scala b/test/files/pos/parallel-classloader.scala new file mode 100644 index 000000000000..0a4751b56e11 --- /dev/null +++ b/test/files/pos/parallel-classloader.scala @@ -0,0 +1,3 @@ +class Loader extends ClassLoader { + ClassLoader.registerAsParallelCapable() +} \ No newline at end of file diff --git a/test/files/pos/t10568/Converter.java b/test/files/pos/t10568/Converter.java new file mode 100644 index 000000000000..2f3a26635dad --- /dev/null +++ b/test/files/pos/t10568/Converter.java @@ -0,0 +1,8 @@ +package x; + +public interface Converter { + static final String STRING = "STRING"; + abstract class FactoryFactory { + protected static String getString() { return "string"; } + } +} \ No newline at end of file diff --git a/test/files/pos/t10568/Impl.scala b/test/files/pos/t10568/Impl.scala new file mode 100644 index 000000000000..09c0c8bb52c8 --- /dev/null +++ b/test/files/pos/t10568/Impl.scala @@ -0,0 +1,9 @@ +package y + +import x._ + +class Impl extends Converter.FactoryFactory { + import Converter.FactoryFactory._ + def method: String = + getString + Converter.STRING +} \ No newline at end of file From cdf74190c442ff60dc6b4ed7c7567fb58448a90e Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 15 Jan 2018 17:57:37 -0500 Subject: [PATCH 1189/2793] Use `gen.mkClassOf` in `reifyRuntimeType` To be squashed with the previous commit. --- src/compiler/scala/reflect/reify/package.scala | 7 ++++--- test/files/run/t7375b.check | 8 ++++---- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index 82a3add92d9f..591b76727165 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -51,6 +51,9 @@ package object reify { import definitions._ import analyzer.enclosingMacroPosition + if (global.phase.id < global.currentRun.erasurePhase.id) + devWarning(enclosingMacroPosition, s"reify Class[$tpe0] during ${global.phase.name}") + // scala/bug#7375 val tpe = tpe0.dealiasWiden @@ -65,9 +68,7 @@ package object reify { val componentErasure = reifyRuntimeClass(global)(typer0, componentTpe, concrete) gen.mkMethodCall(currentRun.runDefinitions.arrayClassMethod, List(componentErasure)) case _ => - var erasure = tpe.erasure - if (tpe.typeSymbol.isDerivedValueClass && global.phase.id < global.currentRun.erasurePhase.id) erasure = tpe - gen.mkNullaryCall(currentRun.runDefinitions.Predef_classOf, List(erasure)) + gen.mkClassOf(tpe) } } diff --git a/test/files/run/t7375b.check b/test/files/run/t7375b.check index 0993cceca246..69d8146446bc 100644 --- a/test/files/run/t7375b.check +++ b/test/files/run/t7375b.check @@ -1,4 +1,4 @@ -scala.Predef.classOf[C1] -scala.Predef.classOf[C2] -scala.Predef.classOf[C1] -scala.Predef.classOf[C2] +classOf[C1] +classOf[C2] +classOf[C1] +classOf[C2] From 98f622b784e1c2f9e56bcfc3c925108cd81057a7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Jan 2018 14:45:09 +1000 Subject: [PATCH 1190/2793] Expand test for classOf over value class to class tags --- test/files/run/t10551.scala | 47 ++++++++++++++++++++----------------- 1 file changed, 26 insertions(+), 21 deletions(-) diff --git a/test/files/run/t10551.scala b/test/files/run/t10551.scala index 4c635860cc28..4ae52c6e207b 100644 --- a/test/files/run/t10551.scala +++ b/test/files/run/t10551.scala @@ -8,17 +8,22 @@ package test { object Test extends App { import test.NotNoPrefix._ + + def check[A](cls: Class[A])(implicit tag: reflect.ClassTag[A]): Unit = { + val suffix = if (cls != tag.runtimeClass) " != " + tag.runtimeClass else "" + println(cls + suffix) + } - println(classOf[Id[Int]]) - println(classOf[Id[_]]) + check(classOf[Id[Int]]) + check(classOf[Id[_]]) - println(classOf[Ids[Int]]) - println(classOf[Ids[_]]) + check(classOf[Ids[Int]]) + check(classOf[Ids[_]]) - println(classOf[Bid[Int, Int]]) - println(classOf[Bid[Int, _]]) - println(classOf[Bid[_, Int]]) - println(classOf[Bid[_, _]]) + check(classOf[Bid[Int, Int]]) + check(classOf[Bid[Int, _]]) + check(classOf[Bid[_, Int]]) + check(classOf[Bid[_, _]]) type Iddy[A] = Id[A] type Idsy[A] = Ids[A] @@ -27,22 +32,22 @@ object Test extends App { type Bixt[L] = Biddouble[_] type Bixty = Bixt[_] - println(classOf[Iddy[Int]]) - println(classOf[Iddy[_]]) + check(classOf[Iddy[Int]]) + check(classOf[Iddy[_]]) - println(classOf[Idsy[Int]]) - println(classOf[Idsy[_]]) + check(classOf[Idsy[Int]]) + check(classOf[Idsy[_]]) - println(classOf[Biddy[Int, Int]]) - println(classOf[Biddy[Int, _]]) - println(classOf[Biddy[_, Int]]) - println(classOf[Biddy[_, _]]) + check(classOf[Biddy[Int, Int]]) + check(classOf[Biddy[Int, _]]) + check(classOf[Biddy[_, Int]]) + check(classOf[Biddy[_, _]]) - println(classOf[Biddouble[Int]]) - println(classOf[Biddouble[_]]) + check(classOf[Biddouble[Int]]) + check(classOf[Biddouble[_]]) - println(classOf[Bixt[Int]]) - println(classOf[Bixt[_]]) + check(classOf[Bixt[Int]]) + check(classOf[Bixt[_]]) - println(classOf[Bixty]) + check(classOf[Bixty]) } \ No newline at end of file From 990a49c3bc82e4c66ac0dd9f77a07de94b894b4f Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 15 Jan 2018 23:02:51 +0000 Subject: [PATCH 1191/2793] capture more info on background threads update csv version to 2 include idle time --- .../scala/tools/nsc/profile/AsyncHelper.scala | 14 ++++++------- .../scala/tools/nsc/profile/Profiler.scala | 20 +++++++++---------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala b/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala index 820b44949a71..2258d1fe43e4 100644 --- a/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala +++ b/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala @@ -28,7 +28,7 @@ object AsyncHelper { val baseGroup = new ThreadGroup(s"scalac-${phase.name}") private def childGroup(name: String) = new ThreadGroup(baseGroup, name) - protected def wrapRunnable(r: Runnable): Runnable + protected def wrapRunnable(r: Runnable, shortId:String): Runnable protected class CommonThreadFactory(shortId: String, daemon: Boolean = true, @@ -38,7 +38,7 @@ object AsyncHelper { private val namePrefix = s"${baseGroup.getName}-$shortId-" override def newThread(r: Runnable): Thread = { - val wrapped = wrapRunnable(r) + val wrapped = wrapRunnable(r, shortId) val t: Thread = new Thread(group, wrapped, namePrefix + threadNumber.getAndIncrement, 0) if (t.isDaemon != daemon) t.setDaemon(daemon) if (t.getPriority != priority) t.setPriority(priority) @@ -61,7 +61,7 @@ object AsyncHelper { new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) } - override protected def wrapRunnable(r: Runnable): Runnable = r + override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = r } private class ProfilingAsyncHelper(global: Global, phase: Phase, private val profiler: RealProfiler) extends BaseAsyncHelper(global, phase) { @@ -78,14 +78,14 @@ object AsyncHelper { new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) } - override protected def wrapRunnable(r: Runnable): Runnable = () => { + override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = () => { val data = new ThreadProfileData localData.set(data) - val profileStart = Profiler.emptySnap + val profileStart = profiler.snapThread(0) try r.run finally { - val snap = profiler.snapThread() - val threadRange = ProfileRange(profileStart, snap, phase, 0, "", Thread.currentThread()) + val snap = profiler.snapThread(data.idleNs) + val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } } diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index 02732ca43df5..7048fc4006e2 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -30,7 +30,7 @@ case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, copy(heapBytes = heapBytes) } } -case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, id:Int, purpose:String, thread:Thread) { +case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpose:String, taskCount:Int, thread:Thread) { def allocatedBytes = end.allocatedBytes - start.allocatedBytes def userNs = end.userTimeNanos - start.userTimeNanos @@ -103,7 +103,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S private val mainThread = Thread.currentThread() - private[profile] def snapThread(): ProfileSnap = { + private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { import RealProfiler._ val current = Thread.currentThread() @@ -111,7 +111,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S threadId = current.getId, threadName = current.getName, snapTimeNanos = System.nanoTime(), - idleTimeNanos = 0, + idleTimeNanos = idleTimeNanos, cpuTimeNanos = threadMx.getCurrentThreadCpuTime, userTimeNanos = threadMx.getCurrentThreadUserTime, allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), @@ -156,13 +156,13 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(new GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) } } override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread() + val initialSnap = snapThread(0) if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook stop") ExternalToolHook.after() @@ -172,7 +172,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S initialSnap.updateHeap(readHeapUsage()) } else initialSnap - reporter.reportForeground(this, new ProfileRange(snapBefore, finalSnap, phase, id, "", Thread.currentThread)) + reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } override def beforePhase(phase: Phase): ProfileSnap = { @@ -183,7 +183,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S println("Profile hook start") ExternalToolHook.before() } - snapThread() + snapThread(0) } } @@ -231,8 +231,8 @@ object ConsoleProfileReporter extends ProfileReporter { class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { override def header(profiler: RealProfiler): Unit = { - out.println(s"info, ${profiler.id}, ${profiler.outDir}") - out.println(s"header(main/background),startNs,endNs,runId,phaseId,phaseName,purpose,threadId,threadName,runNs,idleNs,cpuTimeNs,userTimeNs,allocatedByte,heapSize") + out.println(s"info, ${profiler.id}, version, 2, output, ${profiler.outDir}") + out.println(s"header(main/background),startNs,endNs,runId,phaseId,phaseName,purpose,task-count,threadId,threadName,runNs,idleNs,cpuTimeNs,userTimeNs,allocatedByte,heapSize") out.println(s"header(GC),startNs,endNs,startMs,endMs,name,action,cause,threads") } @@ -243,7 +243,7 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { reportCommon(EventType.MAIN, profiler, threadRange) } private def reportCommon(tpe:EventType.value, profiler: RealProfiler, threadRange: ProfileRange): Unit = { - out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.name},${threadRange.purpose},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${if(tpe == EventType.MAIN) threadRange.end.heapBytes else ""}") + out.println(s"$tpe,${threadRange.start.snapTimeNanos},${threadRange.end.snapTimeNanos},${profiler.id},${threadRange.phase.id},${threadRange.phase.name},${threadRange.purpose},${threadRange.taskCount},${threadRange.thread.getId},${threadRange.thread.getName},${threadRange.runNs},${threadRange.idleNs},${threadRange.cpuNs},${threadRange.userNs},${threadRange.allocatedBytes},${threadRange.end.heapBytes} ") } override def reportGc(data: GcEventData): Unit = { From 232fb7a898081600946e3c1364f952a763e47f3f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Jan 2018 09:50:25 +1000 Subject: [PATCH 1192/2793] Comment on problem in isOverridingSymbol and subtlety of initOwner capture. --- src/reflect/scala/reflect/internal/Symbols.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 5e5885951e68..edfc6b7600aa 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -219,7 +219,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def isAprioriThreadsafe = isThreadsafe(AllOps) if (!(isCompilerUniverse || isSynchronized || isAprioriThreadsafe)) - throw new AssertionError(s"unsafe symbol $initName (child of $initOwner) in runtime reflection universe") + throw new AssertionError(s"unsafe symbol $initName (child of $initOwner) in runtime reflection universe") // Not an assert to avoid retention of `initOwner` as a field! type AccessBoundaryType = Symbol type AnnotationType = AnnotationInfo @@ -2395,6 +2395,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) final def isOverridingSymbol: Boolean = { val curRunId = currentRunId + // TODO this cache can lead to incorrect answers if the overrider/overridee relationship changes + // with the passage of compiler phases. Details: https://github.com/scala/scala/pull/6197#discussion_r161427280 + // When fixing this problem (e.g. by ignoring the cache after erasure?), be mindful of performance if (isOverridingSymbolCache == curRunId) true else if (isOverridingSymbolCache == -curRunId) false else { From 8525c63028ffb0a244039360327fdc6d4e1089c0 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 17 Jan 2018 14:39:41 +0100 Subject: [PATCH 1193/2793] Don't emit a checkinit test for fields inherited from traits Fixes scala/bug#10692 --- .../scala/tools/nsc/transform/Fields.scala | 2 +- test/files/run/t10692.flags | 1 + test/files/run/t10692.scala | 26 +++++++++++++++++++ 3 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t10692.flags create mode 100644 test/files/run/t10692.scala diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 6ea592ae5b49..c07d6b954db0 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -117,7 +117,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor private def setFieldFlags(accessor: Symbol, fieldInSubclass: TermSymbol): Unit = fieldInSubclass setFlag (NEEDS_TREES | PrivateLocal - | (accessor getFlag MUTABLE | LAZY) + | (accessor getFlag MUTABLE | LAZY | DEFAULTINIT) | (if (accessor hasFlag STABLE) 0 else MUTABLE) ) diff --git a/test/files/run/t10692.flags b/test/files/run/t10692.flags new file mode 100644 index 000000000000..3d1ee4760af6 --- /dev/null +++ b/test/files/run/t10692.flags @@ -0,0 +1 @@ +-Xcheckinit diff --git a/test/files/run/t10692.scala b/test/files/run/t10692.scala new file mode 100644 index 000000000000..a52d078ba3a8 --- /dev/null +++ b/test/files/run/t10692.scala @@ -0,0 +1,26 @@ +trait T { + private var s: String = _ + def getS: String = { + if (s == null) { + s = "" + } + s + } +} + +class C { + private var f: String = _ + def getF: String = { + if (f == null) { + f = "" + } + f + } +} + +object Test extends C with T { + def main(args: Array[String]): Unit = { + assert(getS == "") + assert(getF == "") + } +} From f65d0b3754c94c46a4d8da93080d6f37729b7f6e Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sun, 14 Jan 2018 07:23:44 -0500 Subject: [PATCH 1194/2793] Speed up creation of [Abs]TypeErrors. `Throwable#fillInStackTrace` can be expensive, so minimize the number of calls we make to it. - `AbsTypeError` and its subclasses are never thrown, so they don't need to extend `Throwable` at all. - `TypeError`s are thrown, but if the user sees them it's a compiler bug anyhow, so only populate the stack trace if we're in `-Ydebug` mode. This also adds a minute bit of clarity to the distinction between `TypeError` and `AbsTypeError`: you can't throw the latter. Contribution note: retronym did this independently last October, because all good ideas are already had. Found this out when asking for permission on gitter rather than forgiveness on github. Still committing, though, because I've noticed that smaller changes tend to get merged sooner. --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 2 +- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- src/reflect/scala/reflect/internal/Types.scala | 3 +++ 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 27c62cde6006..582a8e1a183b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -21,7 +21,7 @@ trait ContextErrors { import global._ import definitions._ - sealed abstract class AbsTypeError extends Throwable { + sealed abstract class AbsTypeError { def errPos: Position def errMsg: String override def toString() = "[Type error at:" + errPos + "] " + errMsg diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index bc331168d9a9..59797a8bb89f 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3693,7 +3693,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** An exception for cyclic references of symbol definitions */ case class CyclicReference(sym: Symbol, info: Type) extends TypeError("illegal cyclic reference involving " + sym) { - if (settings.debug.value) printStackTrace() + if (settings.debug) printStackTrace() } /** A class for type histories */ diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index f90f78ee01d7..0f38ec46091e 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4629,6 +4629,9 @@ trait Types /** A throwable signalling a type error */ class TypeError(var pos: Position, val msg: String) extends Throwable(msg) { def this(msg: String) = this(NoPosition, msg) + + final override def fillInStackTrace() = + if (settings.debug) super.fillInStackTrace() else this } // TODO: RecoverableCyclicReference should be separated from TypeError, From c3ffa1b24ba67b248ca9a71e911ebd5980e66770 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 19 Jan 2018 16:30:10 +0100 Subject: [PATCH 1195/2793] Generalize `FileBasedCache` to accept `Seq[Path]` Let's generalize `FileBasedCache` to reuse it for caching plugins' classloaders (which can have several jar entries instead of one). --- .../ZipAndJarFileLookupFactory.scala | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 65a7e0f5ae26..4f4b8ace77ca 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -30,7 +30,7 @@ sealed trait ZipAndJarFileLookupFactory { protected def createForZipFile(zipFile: AbstractFile): ClassPath private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = { - cache.getOrCreate(zipFile.file.toPath, () => createForZipFile(zipFile)) + cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile)) } } @@ -177,20 +177,24 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { } final class FileBasedCache[T] { + import java.nio.file.Path private case class Stamp(lastModified: FileTime, fileKey: Object) - private val cache = collection.mutable.Map.empty[java.nio.file.Path, (Stamp, T)] - - def getOrCreate(path: java.nio.file.Path, create: () => T): T = cache.synchronized { - val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) - val lastModified = attrs.lastModifiedTime() - // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp - val fileKey = attrs.fileKey() - val stamp = Stamp(lastModified, fileKey) - cache.get(path) match { - case Some((cachedStamp, cached)) if cachedStamp == stamp => cached + private val cache = collection.mutable.Map.empty[Seq[Path], (Seq[Stamp], T)] + + def getOrCreate(paths: Seq[Path], create: () => T): T = cache.synchronized { + val stamps = paths.map { path => + val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) + val lastModified = attrs.lastModifiedTime() + // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp + val fileKey = attrs.fileKey() + Stamp(lastModified, fileKey) + } + + cache.get(paths) match { + case Some((cachedStamps, cached)) if cachedStamps == stamps => cached case _ => val value = create() - cache.put(path, (stamp, value)) + cache.put(paths, (stamps, value)) value } } From 0417fcf13393341fcfd938874ef4b7e4f1880ccf Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 19 Jan 2018 17:08:25 +0100 Subject: [PATCH 1196/2793] Cache classloaders for compiler plugins When users compile their sources with external compiler plugins whose classes are not found in the compiler's classpath, the compiler needs to classload them every time they want to be used. This process can slow down compilation severely, as shown in https://github.com/scala/scala-dev/issues/458. This cost is due to the JVM JIT-compiling the recently loaded classes, and the JIT deoptimizing because new classes violate the optimization assumptions taken via Class Hierarchy Analysis (CHA). The cost of dynamic classloading can be mitigated by caching the classloaders for compiler plugins based on their file stamps, in a similar way to how the compiler currently caches classpaths. If the file stamps change, the compiler plugin will be loaded again. If they don't (which is by far the most common scenario), the classloaders will be reused. Fixes scala/scala-dev#458. Thanks to Jason for suggesting the fix. --- .../scala/tools/nsc/plugins/Plugin.scala | 31 +++++++++++++------ .../scala/tools/nsc/plugins/Plugins.scala | 2 +- .../tools/nsc/settings/ScalaSettings.scala | 1 + 3 files changed, 24 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index f5cb88bb9231..5b0b77dffb26 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -8,11 +8,12 @@ package plugins import scala.tools.nsc.io.Jar import scala.reflect.internal.util.ScalaClassLoader -import scala.reflect.io.{ Directory, File, Path } +import scala.reflect.io.{Directory, File, Path} import java.io.InputStream import scala.collection.mutable -import scala.util.{ Try, Success, Failure } +import scala.tools.nsc.classpath.FileBasedCache +import scala.util.{Failure, Success, Try} /** Information about a plugin loaded from a jar file. * @@ -85,14 +86,25 @@ object Plugin { private val PluginXML = "scalac-plugin.xml" + private val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader]() + /** Create a class loader with the specified locations plus * the loader that loaded the Scala compiler. + * + * If the class loader has already been created before and the + * file stamps are the same, the previous loader is returned to + * mitigate the cost of dynamic classloading as it has been + * measured in https://github.com/scala/scala-dev/issues/458. */ - private def loaderFor(locations: Seq[Path]): ScalaClassLoader = { - val compilerLoader = classOf[Plugin].getClassLoader - val urls = locations map (_.toURL) + private def loaderFor(locations: Seq[Path], disableCache: Boolean): ScalaClassLoader = { + def newLoader = () => { + val compilerLoader = classOf[Plugin].getClassLoader + val urls = locations map (_.toURL) + ScalaClassLoader fromURLs (urls, compilerLoader) + } - ScalaClassLoader fromURLs (urls, compilerLoader) + if (disableCache || locations.exists(!Jar.isJarOrZip(_))) newLoader() + else pluginClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) } /** Try to load a plugin description from the specified location. @@ -135,7 +147,8 @@ object Plugin { def loadAllFrom( paths: List[List[Path]], dirs: List[Path], - ignoring: List[String]): List[Try[AnyClass]] = + ignoring: List[String], + disableClassLoaderCache: Boolean): List[Try[AnyClass]] = { // List[(jar, Try(descriptor))] in dir def scan(d: Directory) = @@ -146,7 +159,7 @@ object Plugin { // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d => scan(d.toDirectory) collect { - case (j, Success(pd)) => Success((pd, loaderFor(Seq(j)))) + case (j, Success(pd)) => Success((pd, loaderFor(Seq(j), disableClassLoaderCache))) } } @@ -163,7 +176,7 @@ object Plugin { loop(ps) } val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map { - case (p, Success(pd)) => Success((pd, loaderFor(p))) + case (p, Success(pd)) => Success((pd, loaderFor(p, disableClassLoaderCache))) case (_, Failure(e)) => Failure(e) } diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 4b1805479d83..7e82dbe04710 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -31,7 +31,7 @@ trait Plugins { global: Global => def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s asPath(settings.pluginsDir.value) map injectDefault map Path.apply } - val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value) + val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, settings.YdisablePluginsClassLoaderCaching.value) val (goods, errors) = maybes partition (_.isSuccess) // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index de79ac93152b..70a4e0e8ca81 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -219,6 +219,7 @@ trait ScalaSettings extends AbsScalaSettings val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") + val YdisablePluginsClassLoaderCaching = BooleanSetting ("-YdisablePluginsClassLoaderCaching", "Do not cache classloaders for compiler plugins that are dynamically loaded.") val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") From b16b2a9a033cd87dbc08f18266f8cbdedde21213 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 17 Jan 2018 23:57:23 -0800 Subject: [PATCH 1197/2793] Avoid position tests when linting Incremental compilation while erroring can result in unpositioned trees. Parser preserves the escape hatch attachment under patdef transform. Both casedef and valdef can test immediately if escape hatch was requested. Add attachment for valdefs resulting from patvardefs. When checking for redundant unused setters, try to compare using accessed, otherwise compare names. --- .../nsc/typechecker/TypeDiagnostics.scala | 30 +++++++++------- .../scala/tools/nsc/typechecker/Typers.scala | 11 +++--- .../reflect/internal/StdAttachments.scala | 4 +++ .../scala/reflect/internal/TreeGen.scala | 24 +++++++------ .../reflect/runtime/JavaUniverseForce.scala | 1 + test/files/neg/warn-unused-patvars.check | 8 +---- test/files/neg/warn-unused-patvars.scala | 4 +-- test/files/neg/warn-unused-privates.check | 35 +++++++++---------- test/files/neg/warn-unused-privates.scala | 4 +-- 9 files changed, 62 insertions(+), 59 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 905e0eed2015..dbfcfb1b031c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -79,8 +79,12 @@ trait TypeDiagnostics { prefix + name.decode } + // Bind of pattern var was `x @ _` private def atBounded(t: Tree) = t.hasAttachment[AtBoundIdentifierAttachment.type] + // ValDef was a PatVarDef `val P(x) = ???` + private def wasPatVarDef(t: Tree) = t.hasAttachment[PatVarDefAttachment.type] + /** Does the positioned line assigned to t1 precede that of t2? */ def posPrecedes(p1: Position, p2: Position) = p1.isDefined && p2.isDefined && p1.line < p2.line @@ -478,7 +482,6 @@ trait TypeDiagnostics { val targets = mutable.Set[Symbol]() val setVars = mutable.Set[Symbol]() val treeTypes = mutable.Set[Type]() - val atBounds = mutable.Set[Symbol]() val params = mutable.Set[Symbol]() val patvars = mutable.Set[Symbol]() @@ -503,16 +506,19 @@ trait TypeDiagnostics { val sym = t.symbol var bail = false t match { - case m: MemberDef if qualifies(t.symbol) => - defnTrees += m + case m: MemberDef if qualifies(sym) => t match { + case ValDef(mods@_, name@_, tpt@_, rhs@_) if wasPatVarDef(t) => + if (!atBounded(t)) patvars += sym case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => if (sym.isPrimaryConstructor) for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa else if (sym.isSynthetic && sym.isImplicit) bail = true else if (!sym.isConstructor) for (vs <- vparamss) params ++= vs.map(_.symbol) + defnTrees += m case _ => + defnTrees += m } case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars => pat.foreach { @@ -521,7 +527,6 @@ trait TypeDiagnostics { } case _: RefTree if sym ne null => targets += sym case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol - case Bind(_, _) if atBounded(t) => atBounds += sym case Apply(Select(_, nme.withFilter), Function(vparams, _) :: Nil) => bail = vparams.exists(_.name startsWith nme.CHECK_IF_REFUTABLE_STRING) case _ => @@ -562,9 +567,8 @@ trait TypeDiagnostics { && !(treeTypes.exists(_.exists(_.typeSymbolDirect == m))) ) def isSyntheticWarnable(sym: Symbol) = ( - sym.isDefaultGetter + sym.isDefaultGetter ) - def isUnusedTerm(m: Symbol): Boolean = ( m.isTerm && (!m.isSynthetic || isSyntheticWarnable(m)) @@ -594,12 +598,14 @@ trait TypeDiagnostics { def unusedTerms = { val all = defnTrees.toList.filter(v => isUnusedTerm(v.symbol)) - // filter out setters if already warning for getter, indicated by position. - // also documentary names in patterns. - all.filterNot(v => - v.symbol.isSetter && all.exists(g => g.symbol.isGetter && g.symbol.pos.point == v.symbol.pos.point) - || atBounds.exists(x => v.symbol.pos.point == x.pos.point) - ).sortBy(treepos) + // is this a getter-setter pair? and why is this a difficult question for traits? + def sameReference(g: Symbol, s: Symbol) = + if (g.accessed.exists && s.accessed.exists) g.accessed == s.accessed + else g.owner == s.owner && g.setterName == s.name //sympos(g) == sympos(s) + + // filter out setters if already warning for getter. + val clean = all.filterNot(v => v.symbol.isSetter && all.exists(g => g.symbol.isGetter && sameReference(g.symbol, v.symbol))) + clean.sortBy(treepos) } // local vars which are never set, except those already returned in unused def unsetVars = localVars.filter(v => !setVars(v) && !isUnusedTerm(v)).sortBy(sympos) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 95c58faed2db..8dedbd773f00 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4277,11 +4277,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - def typedBind(tree: Bind) = { - val name = tree.name - val body = tree.body - name match { - case name: TypeName => + def typedBind(tree: Bind) = + tree match { + case Bind(name: TypeName, body) => assert(body == EmptyTree, s"${context.unit} typedBind: ${name.debugString} ${body} ${body.getClass}") val sym = if (tree.symbol != NoSymbol) tree.symbol @@ -4297,7 +4295,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tree setSymbol sym setType sym.tpeHK - case name: TermName => + case Bind(name: TermName, body) => val sym = if (tree.symbol != NoSymbol) tree.symbol else context.owner.newValue(name, tree.pos) @@ -4327,7 +4325,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tree setSymbol sym treeCopy.Bind(tree, name, body1) setSymbol sym setType body1.tpe } - } def typedArrayValue(tree: ArrayValue) = { val elemtpt1 = typedType(tree.elemtpt, mode) diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index dfca57970743..f170a091e83d 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -68,6 +68,10 @@ trait StdAttachments { */ case object AtBoundIdentifierAttachment extends PlainAttachment + /** Indicates that a `ValDef` was synthesized from a pattern definition, `val P(x)`. + */ + case object PatVarDefAttachment extends PlainAttachment + /** Identifies trees are either result or intermediate value of for loop desugaring. */ case object ForAttachment extends PlainAttachment diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index d312582dcbdc..25dfe73b0037 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -732,11 +732,16 @@ abstract class TreeGen { def mkPatDef(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = mkPatDef(Modifiers(0), pat, rhs) + private def cpAtBoundAttachment(from: Tree, to: ValDef): to.type = + if (from.hasAttachment[AtBoundIdentifierAttachment.type]) to.updateAttachment(AtBoundIdentifierAttachment) else to + private def cpPatVarDefAttachments(from: Tree, to: ValDef): to.type = + cpAtBoundAttachment(from, to).updateAttachment(PatVarDefAttachment) + /** Create tree for pattern definition */ def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = matchVarPattern(pat) match { case Some((name, tpt)) => List(atPos(pat.pos union rhs.pos) { - ValDef(mods, name.toTermName, tpt, rhs) + cpAtBoundAttachment(pat, ValDef(mods, name.toTermName, tpt, rhs)) }) case None => @@ -778,9 +783,9 @@ abstract class TreeGen { )) } vars match { - case List((vname, tpt, pos)) => + case List((vname, tpt, pos, original)) => List(atPos(pat.pos union pos union rhs.pos) { - ValDef(mods, vname.toTermName, tpt, matchExpr) + cpPatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, matchExpr)) }) case _ => val tmp = freshTermName() @@ -790,9 +795,9 @@ abstract class TreeGen { tmp, TypeTree(), matchExpr) } var cnt = 0 - val restDefs = for ((vname, tpt, pos) <- vars) yield atPos(pos) { + val restDefs = for ((vname, tpt, pos, original) <- vars) yield atPos(pos) { cnt += 1 - ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt))) + cpPatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))) } firstDef :: restDefs } @@ -845,7 +850,7 @@ abstract class TreeGen { * synthetic for all nodes that contain a variable position. */ class GetVarTraverser extends Traverser { - val buf = new ListBuffer[(Name, Tree, Position)] + val buf = new ListBuffer[(Name, Tree, Position, Tree)] def namePos(tree: Tree, name: Name): Position = if (!tree.pos.isRange || name.containsName(nme.raw.DOLLAR)) tree.pos.focus @@ -857,7 +862,7 @@ abstract class TreeGen { override def traverse(tree: Tree): Unit = { def seenName(name: Name) = buf exists (_._1 == name) - def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name))) + def add(name: Name, t: Tree) = if (!seenName(name)) buf += ((name, t, namePos(tree, name), tree)) val bl = buf.length tree match { @@ -888,10 +893,9 @@ abstract class TreeGen { } /** Returns list of all pattern variables, possibly with their types, - * without duplicates + * without duplicates, plus position and original tree. */ - private def getVariables(tree: Tree): List[(Name, Tree, Position)] = - new GetVarTraverser apply tree + private def getVariables(tree: Tree): List[(Name, Tree, Position, Tree)] = (new GetVarTraverser)(tree) /** Convert all occurrences of (lower-case) variables in a pattern as follows: * x becomes x @ _ diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index e7e57d556c87..bc5e259678c1 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -41,6 +41,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.DelambdafyTarget this.BackquotedIdentifierAttachment this.AtBoundIdentifierAttachment + this.PatVarDefAttachment this.ForAttachment this.SyntheticUnitAttachment this.SubpatternsAttachment diff --git a/test/files/neg/warn-unused-patvars.check b/test/files/neg/warn-unused-patvars.check index 2665126a36d5..9f89a001cd1a 100644 --- a/test/files/neg/warn-unused-patvars.check +++ b/test/files/neg/warn-unused-patvars.check @@ -1,12 +1,6 @@ warn-unused-patvars.scala:9: warning: private val x in trait Boundings is never used private val x = 42 // warn, sanity check ^ -warn-unused-patvars.scala:28: warning: local val x in method v is never used - val D(x) = d // warn, fixme - ^ -warn-unused-patvars.scala:32: warning: local val x in method w is never used - val D(x @ _) = d // warn, fixme (valdef pos is different) - ^ error: No warnings can be incurred under -Xfatal-warnings. -three warnings found +one warning found one error found diff --git a/test/files/neg/warn-unused-patvars.scala b/test/files/neg/warn-unused-patvars.scala index 3d35dfedd69a..c6130fdeea8a 100644 --- a/test/files/neg/warn-unused-patvars.scala +++ b/test/files/neg/warn-unused-patvars.scala @@ -25,11 +25,11 @@ trait Boundings { } def v() = { - val D(x) = d // warn, fixme + val D(x) = d // no warn 17 } def w() = { - val D(x @ _) = d // warn, fixme (valdef pos is different) + val D(x @ _) = d // no warn 17 } diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index e83cfdebdee9..8ed83c76d37f 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -61,24 +61,6 @@ warn-unused-privates.scala:137: warning: private method x in class OtherNames is warn-unused-privates.scala:138: warning: private method y_= in class OtherNames is never used private def y_=(i: Int): Unit = ??? ^ -warn-unused-privates.scala:153: warning: local val x in method f is never used - val C(x, y, Some(z)) = c // warn - ^ -warn-unused-privates.scala:153: warning: local val y in method f is never used - val C(x, y, Some(z)) = c // warn - ^ -warn-unused-privates.scala:153: warning: local val z in method f is never used - val C(x, y, Some(z)) = c // warn - ^ -warn-unused-privates.scala:161: warning: local val z in method h is never used - val C(x @ _, y @ _, z @ Some(_)) = c // warn for z? - ^ -warn-unused-privates.scala:166: warning: local val x in method v is never used - val D(x) = d // warn - ^ -warn-unused-privates.scala:170: warning: local val x in method w is never used - val D(x @ _) = d // warn, fixme (valdef pos is different) - ^ warn-unused-privates.scala:97: warning: local var x in method f2 is never updated: consider using immutable val var x = 100 // warn about it being a var ^ @@ -103,6 +85,21 @@ warn-unused-privates.scala:216: warning: private class for your eyes only in obj warn-unused-privates.scala:232: warning: private class D in class nonprivate alias is enclosing is never used private class D extends C2 // warn ^ +warn-unused-privates.scala:153: warning: pattern var x in method f is never used; `x@_' suppresses this warning + val C(x, y, Some(z)) = c // warn + ^ +warn-unused-privates.scala:153: warning: pattern var y in method f is never used; `y@_' suppresses this warning + val C(x, y, Some(z)) = c // warn + ^ +warn-unused-privates.scala:153: warning: pattern var z in method f is never used; `z@_' suppresses this warning + val C(x, y, Some(z)) = c // warn + ^ +warn-unused-privates.scala:161: warning: pattern var z in method h is never used; `z@_' suppresses this warning + val C(x @ _, y @ _, z @ Some(_)) = c // warn for z? + ^ +warn-unused-privates.scala:166: warning: pattern var x in method v is never used; `x@_' suppresses this warning + val D(x) = d // warn + ^ warn-unused-privates.scala:201: warning: pattern var z in method f is never used; `z@_' suppresses this warning case z => "warn" ^ @@ -119,5 +116,5 @@ warn-unused-privates.scala:138: warning: parameter value i in method y_= is neve private def y_=(i: Int): Unit = ??? ^ error: No warnings can be incurred under -Xfatal-warnings. -40 warnings found +39 warnings found one error found diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala index 4640f80d365b..7df4dfcfa785 100644 --- a/test/files/neg/warn-unused-privates.scala +++ b/test/files/neg/warn-unused-privates.scala @@ -167,7 +167,7 @@ trait Boundings { 17 } def w() = { - val D(x @ _) = d // warn, fixme (valdef pos is different) + val D(x @ _) = d // no warn 17 } @@ -185,7 +185,7 @@ trait Forever { val t = Option((17, 42)) for { ns <- t - (i, j) = ns // warn, fixme + (i, j) = ns // no warn } yield 42 // val emitted only if needed, hence nothing unused } } From f18e3c59fdbb1a412c37d4d85def6f766f11cfa2 Mon Sep 17 00:00:00 2001 From: jvican Date: Sat, 20 Jan 2018 22:04:48 +0100 Subject: [PATCH 1198/2793] Add classloaders cache for macros Macros are also prey of dynamic classloading (all macro implementations have to be loaded in a classloader before being executed). Such process produces the same disadvantages described in the previous commit. This commit mitigates the cost of classloading by caching classloaders. --- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/nsc/typechecker/Macros.scala | 23 +++++++++++++++++-- 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 70a4e0e8ca81..eb5b82084bda 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -220,6 +220,7 @@ trait ScalaSettings extends AbsScalaSettings val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") val YdisablePluginsClassLoaderCaching = BooleanSetting ("-YdisablePluginsClassLoaderCaching", "Do not cache classloaders for compiler plugins that are dynamically loaded.") + val YdisableMacrosClassLoaderCaching = BooleanSetting ("-YdisableMacrosClassLoaderCaching", "Do not cache classloaders for macros that are dynamically loaded.") val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 637864c92c85..3d645278494e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -51,6 +51,9 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings + private final val macroClassLoadersCache = + new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader]() + /** Obtains a `ClassLoader` instance used for macro expansion. * * By default a new `ScalaClassLoader` is created using the classpath @@ -60,8 +63,24 @@ trait Macros extends MacroRuntimes with Traces with Helpers { */ protected def findMacroClassLoader(): ClassLoader = { val classpath = global.classPath.asURLs - macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) + def newLoader = () => { + macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) + ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) + } + + import scala.tools.nsc.io.Jar + import scala.reflect.io.{AbstractFile, Path} + val locations = classpath.map(u => Path(AbstractFile.getURL(u).file)) + val disableCache = settings.YdisableMacrosClassLoaderCaching.value + if (disableCache || locations.exists(!Jar.isJarOrZip(_))) { + if (disableCache) macroLogVerbose("macro classloader: caching is disabled by the user.") + else { + val offenders = locations.filterNot(!Jar.isJarOrZip(_)) + macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${offenders.mkString(",")}.") + } + + newLoader() + } else macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) } /** `MacroImplBinding` and its companion module are responsible for From 8479c998b0d98db2486dd626e7931fe23646327c Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Tue, 23 Jan 2018 21:06:37 +0000 Subject: [PATCH 1199/2793] provide hook for external profiler --- .../scala/tools/nsc/profile/Profiler.scala | 7 ++++ .../tools/nsc/profile/ProfilerPlugin.scala | 35 +++++++++++++++++++ 2 files changed, 42 insertions(+) create mode 100644 src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index 7048fc4006e2..d0931071b3a1 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -2,6 +2,7 @@ package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} import java.lang.management.ManagementFactory +import java.util.ServiceLoader import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger import javax.management.openmbean.CompositeData @@ -86,6 +87,7 @@ private [profile] object RealProfiler { val threadMx = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() + lazy val allPlugins = ServiceLoader.load(classOf[ProfilerPlugin]).iterator().asScala.toList } private [profile] class RealProfiler(reporter : ProfileReporter, val settings: Settings) extends Profiler with NotificationListener { @@ -101,6 +103,8 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S case gc => println(s"Cant connect gcListener to ${gc.getClass}") } + val active = RealProfiler.allPlugins map (_.generate(this, settings)) + private val mainThread = Thread.currentThread() private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { @@ -128,6 +132,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S reporter.header(this) override def finished(): Unit = { + active foreach {_.finished()} //we may miss a GC event if gc is occurring as we call this RealProfiler.gcMx foreach { case emitter: NotificationEmitter => emitter.removeNotificationListener(this) @@ -163,6 +168,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) val initialSnap = snapThread(0) + active foreach {_.afterPhase(phase)} if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook stop") ExternalToolHook.after() @@ -183,6 +189,7 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S println("Profile hook start") ExternalToolHook.before() } + active foreach {_.beforePhase(phase)} snapThread(0) } diff --git a/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala b/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala new file mode 100644 index 000000000000..9418771558ff --- /dev/null +++ b/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala @@ -0,0 +1,35 @@ +package scala.tools.nsc.profile + +import scala.tools.nsc.{Phase, Settings} + +/** + * Specifies a plugin to the profiler. + * This is generated via the ServiceLoader. See [[java.util.ServiceLoader]] javadoc for configuration information + * + * Note: this must generate a java interface only + */ +trait ProfilerPlugin { + /** + * Generate a run specific profiler + * + * @param profiler the currently enabled profiler + * @param settings the setting for the current compile + * @return the run specific profiler, that will receive updates as the compile progresses + */ + def generate(profiler: RealProfiler, settings: Settings): ProfilerPluginRun +} + +/** + * Generated by [[ProfilerPlugin]], the plugin information for a single run of the compiler + */ +trait ProfilerPluginRun { + /** called before a phase */ + def beforePhase(phase: Phase): Unit + + /** called afer a phase a phase */ + def afterPhase(phase: Phase): Unit + + /** called when the compile run completes */ + def finished(): Unit + +} From c6eba1cd46bea912210479fc4a71748800d2a3c8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 22 Jan 2018 15:40:36 +1000 Subject: [PATCH 1200/2793] Correct, stable position for mixed-in outer accessors Previously, the position was incorrectly taken from the outer accessor in the base trait. Not only was this wrong, but it was only avaiable when jointly compiling the trait and subclass, so it also a source of unstable output. --- .../tools/nsc/transform/ExplicitOuter.scala | 2 +- .../scala/tools/nsc/transform/MixinTest.scala | 39 +++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/tools/nsc/transform/MixinTest.scala diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index ffac6c60f130..94dcb8405f3b 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -182,7 +182,7 @@ abstract class ExplicitOuter extends InfoTransform debuglog(s"Reusing outer accessor symbol of $clazz for the mixin outer accessor of $mc") else { if (decls1 eq decls) decls1 = decls.cloneScope - val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED) + val newAcc = mixinOuterAcc.cloneSymbol(clazz, mixinOuterAcc.flags & ~DEFERRED).setPos(clazz.pos) newAcc setInfo (clazz.thisType memberType mixinOuterAcc) decls1 enter newAcc } diff --git a/test/junit/scala/tools/nsc/transform/MixinTest.scala b/test/junit/scala/tools/nsc/transform/MixinTest.scala new file mode 100644 index 000000000000..9288a4106df1 --- /dev/null +++ b/test/junit/scala/tools/nsc/transform/MixinTest.scala @@ -0,0 +1,39 @@ +package scala.tools.nsc +package transform + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.partest.ASMConverters.LineNumber +import scala.tools.testing.BytecodeTesting +import scala.tools.testing.BytecodeTesting._ + +@RunWith(classOf[JUnit4]) +class MixinTest extends BytecodeTesting { + import compiler._ + + @Test + def outerAccessorPosition(): Unit = { + val code = + """ // 1 + |class a { // 2 + | trait inner { // 3 + | def aa = a.this // 4 + | } // 5 + |} // 6 + |class b extends a { // 7 + | class z extends inner // 8 + |} // 9 + |""".stripMargin + + val List(_, _, _, bz) = compileClasses(code) + assertEquals("b$z", bz.name) + val method = getMethod(bz, "a$inner$$$outer") + val lineNumbers = method.instructions.collect { + case LineNumber(l, _) => l + } + assertEquals(List(8), lineNumbers) // this used to be "line 3". + } +} From 6587b19f6f8af2b85a5c47fce63ea5d711d8205e Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 25 Jan 2018 11:51:12 -0500 Subject: [PATCH 1201/2793] Ensure that parameter names read from bytecode aren't obliterated by generic signatures. c78d771e added code to parse the `MethodParameters` attribute from Java classfiles. However, if `javac` emits a `Signature` attribute after the `MethodParameters` attribute, the method info (previously parsed from the descriptor) is overwritten with the generic info, which doesn't keep the parameter symbols from the description-based info. Therefore, collect names in a buffer until all attributes are parsed, then attach them to the parameter symbols in the final info. Also, use the Java reflection `getParameters` method to populate these parameter symbols in runtime reflection. Fixes scala/bug#t10699. --- src/compiler/scala/tools/nsc/Global.scala | 2 +- .../symtab/classfile/ClassfileParser.scala | 39 ++++++++++++++----- .../scala/reflect/internal/SymbolTable.scala | 2 +- .../scala/reflect/internal/Types.scala | 13 ------- .../reflect/internal/util/Collections.scala | 13 +++++++ .../scala/reflect/runtime/JavaMirrors.scala | 30 ++++++++++---- .../run/reflect-java-param-names/J_1.java | 8 ++++ .../run/reflect-java-param-names/Test_2.scala | 16 ++++++++ test/files/run/t10699/A_1.java | 7 ++++ test/files/run/t10699/Test_2.scala | 7 ++++ .../run/{t9437b/Test.scala => t9437b.scala} | 0 11 files changed, 104 insertions(+), 33 deletions(-) create mode 100644 test/files/run/reflect-java-param-names/J_1.java create mode 100644 test/files/run/reflect-java-param-names/Test_2.scala create mode 100644 test/files/run/t10699/A_1.java create mode 100644 test/files/run/t10699/Test_2.scala rename test/files/run/{t9437b/Test.scala => t9437b.scala} (100%) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 295b174f66a0..9dc5b21f9521 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -287,7 +287,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) body } - override protected def isDeveloper = settings.developer || super.isDeveloper + override def isDeveloper = settings.developer || super.isDeveloper /** This is for WARNINGS which should reach the ears of scala developers * whenever they occur, but are not useful for normal users. They should diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index e53039d408f0..1639265796d9 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -17,6 +17,7 @@ import scala.annotation.switch import scala.reflect.internal.JavaAccFlags import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer} import scala.reflect.io.NoAbstractFile +import scala.reflect.internal.util.Collections._ import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io.AbstractFile import scala.util.control.NonFatal @@ -802,6 +803,7 @@ abstract class ClassfileParser { } // sigToType def parseAttributes(sym: Symbol, symtype: Type, removedOuterParameter: Boolean = false) { + var paramNames: ListBuffer[Name] = null // null means we didn't find any def convertTo(c: Constant, pt: Type): Constant = { if (pt.typeSymbol == BooleanClass && c.tag == IntTag) Constant(c.value != 0) @@ -843,18 +845,16 @@ abstract class ClassfileParser { in.skip(4) i += 1 } - var remainingParams = sym.paramss.head // Java only has exactly one parameter list + paramNames = new ListBuffer() while (i < paramCount) { - val name = pool.getName(u2) + val rawname = pool.getName(u2) val access = u2 - if (remainingParams.nonEmpty) { - val param = remainingParams.head - remainingParams = remainingParams.tail - if ((access & ACC_SYNTHETIC) != ACC_SYNTHETIC) { // name not synthetic - param.name = name.encode - param.resetFlag(SYNTHETIC) - } - } + + val name = + if ((access & ACC_SYNTHETIC) == 0) rawname.encode + else nme.NO_NAME + + paramNames += name i += 1 } } @@ -1088,8 +1088,27 @@ abstract class ClassfileParser { scalaSigAnnot } + def addParamNames(): Unit = + if ((paramNames ne null) && sym.hasRawInfo && sym.isMethod) { + val params = sym.rawInfo.params + (paramNames zip params).foreach { + case (nme.NO_NAME, _) => // param was ACC_SYNTHETIC; ignore + case (name, param) => + param.resetFlag(SYNTHETIC) + param.name = name + } + if (isDeveloper && !sameLength(paramNames.toList, params)) { + // there's not anything we can do, but it's slightly worrisome + devWarning( + sm"""MethodParameters length mismatch while parsing $sym: + | rawInfo.params: ${sym.rawInfo.params} + | MethodParameters: ${paramNames.toList}""") + } + } + // begin parseAttributes for (i <- 0 until u2) parseAttribute() + addParamNames() } /** Apply `@native`/`@transient`/`@volatile` annotations to `sym`, diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 01df81a59498..19e9cc84abfb 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -71,7 +71,7 @@ abstract class SymbolTable extends macros.Universe def shouldLogAtThisPhase = false def isPastTyper = false - protected def isDeveloper: Boolean = settings.debug + def isDeveloper: Boolean = settings.debug @deprecated("use devWarning if this is really a warning; otherwise use log", "2.11.0") def debugwarn(msg: => String): Unit = devWarning(msg) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index f90f78ee01d7..64afe45cd9b3 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4069,19 +4069,6 @@ trait Types /** Are `tps1` and `tps2` lists of pairwise equivalent types? */ def isSameTypes(tps1: List[Type], tps2: List[Type]): Boolean = (tps1 corresponds tps2)(_ =:= _) - /** True if two lists have the same length. Since calling length on linear sequences - * is O(n), it is an inadvisable way to test length equality. - */ - final def sameLength(xs1: List[_], xs2: List[_]) = compareLengths(xs1, xs2) == 0 - @tailrec final def compareLengths(xs1: List[_], xs2: List[_]): Int = - if (xs1.isEmpty) { if (xs2.isEmpty) 0 else -1 } - else if (xs2.isEmpty) 1 - else compareLengths(xs1.tail, xs2.tail) - - /** Again avoiding calling length, but the lengthCompare interface is clunky. - */ - final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0 - private var _basetypeRecursions: Int = 0 def basetypeRecursions = _basetypeRecursions def basetypeRecursions_=(value: Int) = _basetypeRecursions = value diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 821f19095e73..970a5d300f8f 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -298,6 +298,19 @@ trait Collections { } catch { case _: IllegalArgumentException => None } + + /** True if two lists have the same length. Since calling length on linear sequences + * is O(n), it is an inadvisable way to test length equality. + */ + final def sameLength(xs1: List[_], xs2: List[_]) = compareLengths(xs1, xs2) == 0 + @tailrec final def compareLengths(xs1: List[_], xs2: List[_]): Int = + if (xs1.isEmpty) { if (xs2.isEmpty) 0 else -1 } + else if (xs2.isEmpty) 1 + else compareLengths(xs1.tail, xs2.tail) + + /** Again avoiding calling length, but the lengthCompare interface is clunky. + */ + final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0 } object Collections extends Collections diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 635be86233ee..7d0ef7ba3c31 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -11,7 +11,7 @@ import java.lang.{Class => jClass, Package => jPackage} import java.lang.reflect.{ Method => jMethod, Constructor => jConstructor, Field => jField, Member => jMember, Type => jType, TypeVariable => jTypeVariable, - Modifier => jModifier, GenericDeclaration, GenericArrayType, + Parameter => jParameter, GenericDeclaration, GenericArrayType, ParameterizedType, WildcardType, AnnotatedElement } import java.lang.annotation.{Annotation => jAnnotation} import java.io.IOException @@ -1143,8 +1143,8 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive field } - private def setMethType(meth: Symbol, tparams: List[Symbol], paramtpes: List[Type], restpe: Type) = { - meth setInfo GenPolyType(tparams, MethodType(meth.owner.newSyntheticValueParams(paramtpes map objToAny), restpe)) + private def setMethType(meth: Symbol, tparams: List[Symbol], params: List[Symbol], restpe: Type) = { + meth setInfo GenPolyType(tparams, MethodType(params, restpe)) } /** @@ -1161,9 +1161,9 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val meth = clazz.newMethod(newTermName(jmeth.getName), NoPosition, jmeth.scalaFlags) methodCache enter (jmeth, meth) val tparams = jmeth.getTypeParameters.toList map createTypeParameter - val paramtpes = jmeth.getGenericParameterTypes.toList map typeToScala + val params = jparamsAsScala(meth, jmeth.getParameters.toList) val resulttpe = typeToScala(jmeth.getGenericReturnType) - setMethType(meth, tparams, paramtpes, resulttpe) + setMethType(meth, tparams, params, resulttpe) propagatePackageBoundary(jmeth.javaFlags, meth) copyAnnotations(meth, jmeth) if (jmeth.javaFlags.isVarargs) meth modifyInfo arrayToRepeated @@ -1187,9 +1187,8 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive val constr = clazz.newConstructor(NoPosition, jconstr.scalaFlags) constructorCache enter (jconstr, constr) val tparams = jconstr.getTypeParameters.toList map createTypeParameter - val paramtpes = jconstr.getGenericParameterTypes.toList map typeToScala - setMethType(constr, tparams, paramtpes, clazz.tpe_*) - constr setInfo GenPolyType(tparams, MethodType(clazz.newSyntheticValueParams(paramtpes), clazz.tpe)) + val params = jparamsAsScala(constr, jconstr.getParameters.toList) + setMethType(constr, tparams, params, clazz.tpe) propagatePackageBoundary(jconstr.javaFlags, constr) copyAnnotations(constr, jconstr) if (jconstr.javaFlags.isVarargs) constr modifyInfo arrayToRepeated @@ -1197,6 +1196,21 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive constr } + /** Transform Java parameters `params` into a list of value parameters + * for `meth`. + */ + private def jparamsAsScala(meth: MethodSymbol, params: List[jParameter]): List[Symbol] = { + params.zipWithIndex.map { + case (param, ix) => + val name = + if (param.isNamePresent) TermName(param.getName) + else nme.syntheticParamName(ix + 1) + meth.owner.newValueParameter(name, meth.pos) + .setInfo(objToAny(typeToScala(param.getParameterizedType))) + .setFlag(if (param.isNamePresent) 0 else SYNTHETIC) + } + } + // -------------------- Scala to Java ----------------------------------- /** The Java class corresponding to given Scala class. diff --git a/test/files/run/reflect-java-param-names/J_1.java b/test/files/run/reflect-java-param-names/J_1.java new file mode 100644 index 000000000000..61e2a765a148 --- /dev/null +++ b/test/files/run/reflect-java-param-names/J_1.java @@ -0,0 +1,8 @@ +/* + * javac: -parameters + */ +public class J_1 { + public J_1(int i, int j) {} + public void inst(int i, J j) {} + public static void statik(int i, J j) {} +} \ No newline at end of file diff --git a/test/files/run/reflect-java-param-names/Test_2.scala b/test/files/run/reflect-java-param-names/Test_2.scala new file mode 100644 index 000000000000..ffb0debe688a --- /dev/null +++ b/test/files/run/reflect-java-param-names/Test_2.scala @@ -0,0 +1,16 @@ +object Test extends App { + import reflect.runtime.universe._ + + val j_1 = symbolOf[J_1[_]] + val constr = j_1.info.decl(termNames.CONSTRUCTOR) + val inst = j_1.info.decl(TermName("inst")) + val statik = j_1.companion.info.decl(TermName("statik")) + + def check(info: Type) { + assert(info.paramLists.head.map(_.name) == List(TermName("i"), TermName("j")), info) + } + + check(constr.info) + check(inst.info) + check(statik.info) +} \ No newline at end of file diff --git a/test/files/run/t10699/A_1.java b/test/files/run/t10699/A_1.java new file mode 100644 index 000000000000..7e16862e1ec2 --- /dev/null +++ b/test/files/run/t10699/A_1.java @@ -0,0 +1,7 @@ +/* + * javac: -parameters + */ +public class A_1 { + public T identity_inst(T t, T other) { return t; } + public static T identity_static(T t, T other) { return t; } +} \ No newline at end of file diff --git a/test/files/run/t10699/Test_2.scala b/test/files/run/t10699/Test_2.scala new file mode 100644 index 000000000000..842b30d41c13 --- /dev/null +++ b/test/files/run/t10699/Test_2.scala @@ -0,0 +1,7 @@ +object Test extends App { + val a_1 = new A_1 + val t = "t" + val other = "other" + assert(a_1.identity_inst(other = other, t = t) == t) + assert(A_1.identity_static(other = other, t = t) == t) +} \ No newline at end of file diff --git a/test/files/run/t9437b/Test.scala b/test/files/run/t9437b.scala similarity index 100% rename from test/files/run/t9437b/Test.scala rename to test/files/run/t9437b.scala From 10b09dd228de6e5ee403e4bd816a40cc8948c606 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 27 Jan 2018 18:57:34 -0500 Subject: [PATCH 1202/2793] Deprecate static forwarders along with their forwardees. Evaluates one of the flags that was being dropped on the floor in `addForwarder`, and finds it being unjustly so. The attached test case shows the issue: javac won't warn on deprecation as scalac would. Fixes scala/bug#10701. --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 6 +++--- test/files/neg/t10701.check | 6 ++++++ test/files/neg/t10701/Meh.scala | 3 +++ test/files/neg/t10701/Test.java | 8 ++++++++ 4 files changed, 20 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t10701.check create mode 100644 test/files/neg/t10701/Meh.scala create mode 100644 test/files/neg/t10701/Test.java diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 0f65f9e4c7cc..dcdd51e4e659 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -818,9 +818,9 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ // TODO: evaluate the other flags we might be dropping on the floor here. // TODO: ACC_SYNTHETIC ? - val flags = GenBCode.PublicStatic | ( - if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0 - ) + val flags = GenBCode.PublicStatic | + (if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0) | + (if (m.isDeprecated) asm.Opcodes.ACC_DEPRECATED else 0) // TODO needed? for(ann <- m.annotations) { ann.symbol.initialize } val jgensig = staticForwarderGenericSignature diff --git a/test/files/neg/t10701.check b/test/files/neg/t10701.check new file mode 100644 index 000000000000..d58fdf52fa81 --- /dev/null +++ b/test/files/neg/t10701.check @@ -0,0 +1,6 @@ +t10701/Test.java:6: warning: [deprecation] whatever() in Meh has been deprecated + Meh.whatever(); + ^ +error: warnings found and -Werror specified +1 error +1 warning diff --git a/test/files/neg/t10701/Meh.scala b/test/files/neg/t10701/Meh.scala new file mode 100644 index 000000000000..afac4fea5a86 --- /dev/null +++ b/test/files/neg/t10701/Meh.scala @@ -0,0 +1,3 @@ +object Meh { + @deprecated("","") def whatever {} +} \ No newline at end of file diff --git a/test/files/neg/t10701/Test.java b/test/files/neg/t10701/Test.java new file mode 100644 index 000000000000..c55bc52e128b --- /dev/null +++ b/test/files/neg/t10701/Test.java @@ -0,0 +1,8 @@ +/* + * javac: -Werror -deprecation + */ +public class Test { + public static void main(String [] args) { + Meh.whatever(); + } +} \ No newline at end of file From a10566188704d2577e0676544a7360c5359f8d68 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 30 Jan 2018 12:44:59 +1000 Subject: [PATCH 1203/2793] Allow statistics printing after arbitrary phases MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This is the first phase setting with a default of multiple phases, so I had to tweak the option parsing code a little. ``` ⚡ qscalac -Ystatistics:all sandbox/test.scala 2>&1 | grep "Cumulative statistics" *** Cumulative statistics at phase parser *** Cumulative statistics at phase namer *** Cumulative statistics at phase packageobjects *** Cumulative statistics at phase typer *** Cumulative statistics at phase patmat *** Cumulative statistics at phase superaccessors *** Cumulative statistics at phase extmethods *** Cumulative statistics at phase pickler *** Cumulative statistics at phase refchecks *** Cumulative statistics at phase uncurry *** Cumulative statistics at phase fields *** Cumulative statistics at phase tailcalls *** Cumulative statistics at phase specialize *** Cumulative statistics at phase explicitouter *** Cumulative statistics at phase erasure *** Cumulative statistics at phase posterasure *** Cumulative statistics at phase lambdalift *** Cumulative statistics at phase constructors *** Cumulative statistics at phase flatten *** Cumulative statistics at phase mixin *** Cumulative statistics at phase cleanup *** Cumulative statistics at phase delambdafy *** Cumulative statistics at phase jvm ⚡ qscalac -Ystatistics sandbox/test.scala 2>&1 | grep "Cumulative statistics" *** Cumulative statistics at phase parser *** Cumulative statistics at phase typer *** Cumulative statistics at phase patmat *** Cumulative statistics at phase erasure *** Cumulative statistics at phase cleanup *** Cumulative statistics at phase jvm ⚡ qscalac -Ystatistics:typer sandbox/test.scala 2>&1 | grep "Cumulative statistics" *** Cumulative statistics at phase typer ``` --- src/compiler/scala/tools/nsc/MainBench.scala | 2 +- .../scala/tools/nsc/settings/MutableSettings.scala | 6 ++++-- .../scala/tools/nsc/settings/ScalaSettings.scala | 12 +----------- 3 files changed, 6 insertions(+), 14 deletions(-) diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index 3bfb24699e75..c5575b8a4c5c 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -24,7 +24,7 @@ object MainBench extends Driver with EvalLoop { var start = System.nanoTime() for (i <- 0 until NIter) { if (i == NIter-1) { - theCompiler.settings.Ystatistics.default.get foreach theCompiler.settings.Ystatistics.add + theCompiler.settings.Ystatistics.value = List("all") theCompiler.statistics.enabled = true theCompiler.statistics.hotEnabled = true } diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 6d1d9802f236..198a3e06bc68 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -928,12 +928,14 @@ class MutableSettings(val errorFn: String => Unit) def tryToSet(args: List[String]) = if (default == "") errorAndValue("missing phase", None) - else tryToSetColon(List(default)) map (_ => args) + else tryToSetColon(splitDefault) map (_ => args) + + private def splitDefault = default.split(',').toList override def tryToSetColon(args: List[String]) = try { args match { case Nil => if (default == "") errorAndValue("missing phase", None) - else tryToSetColon(List(default)) + else tryToSetColon(splitDefault) case xs => value = (value ++ xs).distinct.sorted ; Some(Nil) } } catch { case _: NumberFormatException => None } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index de79ac93152b..fe29ae0406eb 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -371,17 +371,7 @@ trait ScalaSettings extends AbsScalaSettings val YoptLogInline = StringSetting("-Yopt-log-inline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "") import scala.reflect.internal.util.Statistics - object YstatisticsPhases extends MultiChoiceEnumeration { val parser, typer, patmat, erasure, cleanup, jvm = Value } - val Ystatistics = { - val description = "Print compiler statistics for specific phases" - MultiChoiceSetting( - name = "-Ystatistics", - helpArg = "phase", - descr = description, - domain = YstatisticsPhases, - default = Some(List("_")) - ) - } + val Ystatistics = PhasesSetting("-Ystatistics", "Print compiler statistics for specific phases", "parser,typer,patmat,erasure,cleanup,jvm") override def YstatisticsEnabled = Ystatistics.value.nonEmpty val YhotStatistics = BooleanSetting("-Yhot-statistics-enabled", s"Enable `${Ystatistics.name}` to print hot statistics.") From 233939726317a1de59dc677a0796dec6ec8eb35d Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 31 Jan 2018 01:27:45 +0000 Subject: [PATCH 1204/2793] minor tidyup of files and paths --- .../scala/tools/nsc/classpath/FileUtils.scala | 17 +++++++++++------ src/reflect/scala/reflect/io/Path.scala | 8 ++------ 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index d402f2a61ae2..6b8dee627355 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -32,8 +32,11 @@ object FileUtils { implicit class FileOps(val file: JFile) extends AnyVal { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.getName) - def isClass: Boolean = file.isFile && file.getName.endsWith(".class") + def isClass: Boolean = file.isFile && endsClass(file.getName) } + private val SUFFIX_CLASS = ".class" + private val SUFFIX_SCALA = ".scala" + private val SUFFIX_JAVA = ".java" def stripSourceExtension(fileName: String): String = { if (endsScala(fileName)) stripClassExtension(fileName) @@ -43,23 +46,25 @@ object FileUtils { def dirPath(forPackage: String) = forPackage.replace('.', '/') + @inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length + def endsClass(fileName: String): Boolean = - fileName.length > 6 && fileName.substring(fileName.length - 6) == ".class" + ends (fileName, SUFFIX_CLASS) def endsScalaOrJava(fileName: String): Boolean = endsScala(fileName) || endsJava(fileName) def endsJava(fileName: String): Boolean = - fileName.length > 5 && fileName.substring(fileName.length - 5) == ".java" + ends (fileName, SUFFIX_JAVA) def endsScala(fileName: String): Boolean = - fileName.length > 6 && fileName.substring(fileName.length - 6) == ".scala" + ends (fileName, SUFFIX_SCALA) def stripClassExtension(fileName: String): String = - fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - ".class".length + fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - SUFFIX_CLASS.length def stripJavaExtension(fileName: String): String = - fileName.substring(0, fileName.length - 5) + fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length // probably it should match a pattern like [a-z_]{1}[a-z0-9_]* but it cannot be changed // because then some tests in partest don't pass diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index c5b5ae24baeb..b62e3085de3b 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -33,14 +33,10 @@ import scala.reflect.internal.util.Statistics object Path { def isExtensionJarOrZip(jfile: JFile): Boolean = isExtensionJarOrZip(jfile.getName) def isExtensionJarOrZip(name: String): Boolean = { - val ext = extension(name) - ext == "jar" || ext == "zip" + name.endsWith(".jar") || name.endsWith(".zip") } def extension(name: String): String = { - var i = name.length - 1 - while (i >= 0 && name.charAt(i) != '.') - i -= 1 - + val i = name.lastIndexOf('.') if (i < 0) "" else name.substring(i + 1).toLowerCase } From 8a2e71ffadbd8d979d25e1fc246982b034b9ed0c Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 31 Jan 2018 02:44:53 +0000 Subject: [PATCH 1205/2793] simple classpath tidyups --- .../tools/nsc/classpath/AggregateClassPath.scala | 14 ++++++-------- .../scala/tools/nsc/classpath/ClassPath.scala | 15 ++++++++------- .../nsc/classpath/ZipArchiveFileLookup.scala | 2 +- 3 files changed, 15 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala index 020d0a5b5449..fb1119a71ea2 100644 --- a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala @@ -82,7 +82,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { } }.unzip val distinctPackages = packages.flatten.distinct - val distinctClassesAndSources = mergeClassesAndSources(classesAndSources: _*) + val distinctClassesAndSources = mergeClassesAndSources(classesAndSources) ClassPathEntries(distinctPackages, distinctClassesAndSources) } @@ -91,8 +91,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { * creates an entry containing both of them. If there would be more than one class or source * entries for the same class it always would use the first entry of each type found on a classpath. */ - private def mergeClassesAndSources(entries: Seq[ClassRepresentation]*): Seq[ClassRepresentation] = { - // based on the implementation from MergedClassPath + private def mergeClassesAndSources(entries: Seq[Seq[ClassRepresentation]]): Seq[ClassRepresentation] = { var count = 0 val indices = collection.mutable.HashMap[String, Int]() val mergedEntries = new ArrayBuffer[ClassRepresentation](1024) @@ -117,7 +116,7 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { count += 1 } } - mergedEntries.toIndexedSeq + if (mergedEntries isEmpty) Nil else mergedEntries.toIndexedSeq } private def getDistinctEntries[EntryType <: ClassRepresentation](getEntries: ClassPath => Seq[EntryType]): Seq[EntryType] = { @@ -125,12 +124,11 @@ case class AggregateClassPath(aggregates: Seq[ClassPath]) extends ClassPath { val entriesBuffer = new ArrayBuffer[EntryType](1024) for { cp <- aggregates - entry <- getEntries(cp) if !seenNames.contains(entry.name) + entry <- getEntries(cp) } { - entriesBuffer += entry - seenNames += entry.name + if (seenNames.add(entry.name)) entriesBuffer += entry } - entriesBuffer.toIndexedSeq + if (entriesBuffer isEmpty) Nil else entriesBuffer.toIndexedSeq } } diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala index 08bd98b1d8de..6ad4142977eb 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala @@ -12,6 +12,7 @@ object ClassPathEntries { import scala.language.implicitConversions // to have working unzip method implicit def entry2Tuple(entry: ClassPathEntries): (Seq[PackageEntry], Seq[ClassRepresentation]) = (entry.packages, entry.classesAndSources) + val empty = ClassPathEntries(Seq.empty, Seq.empty) } trait ClassFileEntry extends ClassRepresentation { @@ -27,21 +28,21 @@ trait PackageEntry { } private[nsc] case class ClassFileEntryImpl(file: AbstractFile) extends ClassFileEntry { - override def name = FileUtils.stripClassExtension(file.name) // class name + override val name = FileUtils.stripClassExtension(file.name) // class name override def binary: Option[AbstractFile] = Some(file) override def source: Option[AbstractFile] = None } private[nsc] case class SourceFileEntryImpl(file: AbstractFile) extends SourceFileEntry { - override def name = FileUtils.stripSourceExtension(file.name) + override val name = FileUtils.stripSourceExtension(file.name) override def binary: Option[AbstractFile] = None override def source: Option[AbstractFile] = Some(file) } private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFile: AbstractFile) extends ClassRepresentation { - override def name = FileUtils.stripClassExtension(classFile.name) + override val name = FileUtils.stripClassExtension(classFile.name) override def binary: Option[AbstractFile] = Some(classFile) override def source: Option[AbstractFile] = Some(srcFile) @@ -50,11 +51,11 @@ private[nsc] case class ClassAndSourceFilesEntry(classFile: AbstractFile, srcFil private[nsc] case class PackageEntryImpl(name: String) extends PackageEntry private[nsc] trait NoSourcePaths { - def asSourcePathString: String = "" - private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty + final def asSourcePathString: String = "" + final private[nsc] def sources(inPackage: String): Seq[SourceFileEntry] = Seq.empty } private[nsc] trait NoClassPaths { - def findClassFile(className: String): Option[AbstractFile] = None - private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty + final def findClassFile(className: String): Option[AbstractFile] = None + private[nsc] final def classes(inPackage: String): Seq[ClassFileEntry] = Seq.empty } diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 31d971c25db1..0fbb6342a35a 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -63,7 +63,7 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPa fileBuf += createFileEntry(entry) } ClassPathEntries(pkgBuf, fileBuf) - } getOrElse ClassPathEntries(Seq.empty, Seq.empty) + } getOrElse ClassPathEntries.empty } private def findDirEntry(pkg: String): Option[archive.DirEntry] = { From bf57ad3dbd1e57a1c8fe04f721605d1fcad0dd71 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 17 Jan 2018 09:22:59 -0800 Subject: [PATCH 1206/2793] upgrade sbt 0.13.16 -> 0.13.17 not because of any particular expected benefit. just dogfooding, keeping current. --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 66 ++++++++++++------------ test/benchmarks/project/build.properties | 2 +- 4 files changed, 36 insertions(+), 36 deletions(-) diff --git a/project/build.properties b/project/build.properties index c091b86ca467..133a8f197e36 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.16 +sbt.version=0.13.17 diff --git a/scripts/common b/scripts/common index 20cb4b244639..316d8ed5a0fb 100644 --- a/scripts/common +++ b/scripts/common @@ -19,7 +19,7 @@ mkdir -p $IVY_CACHE rm -rf $IVY_CACHE/cache/org.scala-lang SBT_CMD=${sbtCmd-sbt} -SBT_CMD="$SBT_CMD -sbt-version 0.13.16" +SBT_CMD="$SBT_CMD -sbt-version 0.13.17" # temp dir where all 'non-build' operation are performed TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 6384b4863f94..16cddfa1d431 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -332,31 +332,31 @@ - - - - - - - + + + + + + + - - + + - - - - - - - - - + + + + + + + + + - - - + + + @@ -366,20 +366,20 @@ - - - - - - - + + + + + + + - - - + + + - - + +
diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties index c091b86ca467..133a8f197e36 100644 --- a/test/benchmarks/project/build.properties +++ b/test/benchmarks/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.16 +sbt.version=0.13.17 From e17353d72c905356dde8cf7d2464e52d38a63426 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 1 Feb 2018 09:25:29 -0800 Subject: [PATCH 1207/2793] Spot mistaken single-quote more broadly When erroring for unclosed char literal, broaden the previous advice by looking at the source text. If there are other single-quotes on the line, then it smells like an intended string literal, so it's worth adding that ' is not ". --- .../scala/tools/nsc/ast/parser/Scanners.scala | 11 ++++++++++- test/files/neg/badtok-1-212.check | 4 ++-- test/files/neg/badtok-1.check | 16 +++++++++++----- test/files/neg/badtok-1.scala | 4 ++++ 4 files changed, 27 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 302dfdf3e5d7..4dbba5a01000 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -546,9 +546,18 @@ trait Scanners extends ScannersCommon { def unclosedCharLit() = { val unclosed = "unclosed character literal" // advise if previous token was Symbol contiguous with the orphan single quote at offset - val msg = + val msg = { + val maybeMistakenQuote = + this match { + case sfs: SourceFileScanner => + val wholeLine = sfs.source.lineToString(sfs.source.offsetToLine(offset)) + wholeLine.count(_ == '\'') > 1 + case _ => false + } if (token == SYMBOLLIT && offset == lastOffset) s"""$unclosed (or use " for string literal "$strVal")""" + else if (maybeMistakenQuote) s"""$unclosed (or use " not ' for string literal)""" else unclosed + } syntaxError(msg) } def fetchSingleQuote() = { diff --git a/test/files/neg/badtok-1-212.check b/test/files/neg/badtok-1-212.check index 723b9160adb9..e3e1fc0efff5 100644 --- a/test/files/neg/badtok-1-212.check +++ b/test/files/neg/badtok-1-212.check @@ -1,7 +1,7 @@ -badtok-1-212.scala:2: error: unclosed character literal +badtok-1-212.scala:2: error: unclosed character literal (or use " not ' for string literal) '42' ^ -badtok-1-212.scala:2: error: unclosed character literal +badtok-1-212.scala:2: error: unclosed character literal (or use " not ' for string literal) '42' ^ badtok-1-212.scala:6: warning: deprecated syntax for character literal (use '\'' for single quote) diff --git a/test/files/neg/badtok-1.check b/test/files/neg/badtok-1.check index 68b2d0a406f4..7b8bfbbafc1b 100644 --- a/test/files/neg/badtok-1.check +++ b/test/files/neg/badtok-1.check @@ -1,13 +1,13 @@ -badtok-1.scala:2: error: unclosed character literal +badtok-1.scala:2: error: unclosed character literal (or use " not ' for string literal) '42' ^ -badtok-1.scala:2: error: unclosed character literal +badtok-1.scala:2: error: unclosed character literal (or use " not ' for string literal) '42' ^ badtok-1.scala:6: error: empty character literal (use '\'' for single quote) ''' ^ -badtok-1.scala:6: error: unclosed character literal +badtok-1.scala:6: error: unclosed character literal (or use " not ' for string literal) ''' ^ badtok-1.scala:8: error: empty character literal @@ -16,10 +16,16 @@ badtok-1.scala:8: error: empty character literal badtok-1.scala:11: error: unclosed character literal (or use " for string literal "''abc") 'abc' ^ -badtok-1.scala:13: error: unclosed character literal +badtok-1.scala:13: error: unclosed character literal (or use " for string literal "utf_8") +'utf_8' + ^ +badtok-1.scala:15: error: unclosed character literal (or use " not ' for string literal) +'utf-8' + ^ +badtok-1.scala:17: error: unclosed character literal ' ^ badtok-1.scala:11: error: expected class or object definition 'abc' ^ -8 errors found +10 errors found diff --git a/test/files/neg/badtok-1.scala b/test/files/neg/badtok-1.scala index 8118180b61f5..88351d0cebca 100644 --- a/test/files/neg/badtok-1.scala +++ b/test/files/neg/badtok-1.scala @@ -10,4 +10,8 @@ // SI-10120 'abc' +'utf_8' + +'utf-8' + ' From 90ba38fc1b053b614b0d9f3f0ee790a18e8dcbe9 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 1 Feb 2018 16:40:12 -0800 Subject: [PATCH 1208/2793] bundle scala-parser-combinators 1.0.7 it doesn't matter a whole lot what version we bundle, but on the balance, I suggest we bundle the latest 1.0.x release (there is also a 1.1.0, but let's be conservative) --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index 785d0eb4144d..31184131638c 100644 --- a/versions.properties +++ b/versions.properties @@ -20,7 +20,7 @@ scala.binary.version=2.12 # - jline: shaded with JarJar and included in scala-compiler # - partest: used for running the tests scala-xml.version.number=1.0.6 -scala-parser-combinators.version.number=1.0.6 +scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.0 partest.version.number=1.1.1 scala-asm.version=6.0.0-scala-1 From ef9b61b9517f30cb8a13cc6c0242eb0d88680677 Mon Sep 17 00:00:00 2001 From: Antoine Gourlay Date: Sat, 27 Jan 2018 15:03:30 +0100 Subject: [PATCH 1209/2793] Fix scaladoc links for Any{Ref,Val,} and Nothing. The symbols for `Any`, `AnyRef` and friends don't have an `associatedFile`, so another (hardcoded) symbol is used to make scaladoc pick the url for scala-library. Same thing is done for package objects using the special `package` member. But once the external url is picked from the mapping list, we should link to the real symbol, not the fake one. That part regressed in #5799 (c6ed953). Also, using `ListClass` as the replacement symbol doesn't work if the codebase being documented doesn't reference `List` at all: in that case it has no `associatedFile` either. The Akka codebase (from the ticket) obviously uses `List`, so it worked for them, but failed when minimized. There is some symbol initialization magic here that I don't understand, but using the root scala package always works and makes more sense anyway. Fixes scala/bug#10673. --- .../tools/nsc/doc/model/MemberLookup.scala | 7 +-- test/scaladoc/run/t10673.check | 4 ++ test/scaladoc/run/t10673.scala | 43 +++++++++++++++++++ 3 files changed, 51 insertions(+), 3 deletions(-) create mode 100644 test/scaladoc/run/t10673.check create mode 100644 test/scaladoc/run/t10673.scala diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala index 0e96f8220ccd..719d2a86db23 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala @@ -9,7 +9,7 @@ trait MemberLookup extends base.MemberLookupBase { thisFactory: ModelFactory => import global._ - import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass, ListClass } + import definitions.{ NothingClass, AnyClass, AnyValClass, AnyRefClass } override def internalLink(sym: Symbol, site: Symbol): Option[LinkTo] = findTemplateMaybe(sym) match { @@ -39,7 +39,8 @@ trait MemberLookup extends base.MemberLookupBase { override def findExternalLink(sym: Symbol, name: String): Option[LinkTo] = { val sym1 = - if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) ListClass + if (sym == AnyClass || sym == AnyRefClass || sym == AnyValClass || sym == NothingClass) + definitions.ScalaPackageClass.info.member(newTermName("package")) else if (sym.hasPackageFlag) /* Get package object which has associatedFile ne null */ sym.info.member(newTermName("package")) @@ -61,7 +62,7 @@ trait MemberLookup extends base.MemberLookupBase { } classpathEntryFor(sym1) flatMap { path => settings.extUrlMapping get path map { url => { - LinkToExternalTpl(name, url, makeTemplate(sym1)) + LinkToExternalTpl(name, url, makeTemplate(sym)) } } } diff --git a/test/scaladoc/run/t10673.check b/test/scaladoc/run/t10673.check new file mode 100644 index 000000000000..853c64c274c0 --- /dev/null +++ b/test/scaladoc/run/t10673.check @@ -0,0 +1,4 @@ +'scala.AnyRef' links to scala.AnyRef +'scala.collection.immutable.Seq' links to scala.collection.immutable.Seq +'scala.Nothing' links to scala.Nothing +Done. diff --git a/test/scaladoc/run/t10673.scala b/test/scaladoc/run/t10673.scala new file mode 100644 index 000000000000..4d747b41d7a5 --- /dev/null +++ b/test/scaladoc/run/t10673.scala @@ -0,0 +1,43 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.doc.base._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.nsc.doc.html.Page +import scala.tools.partest.ScaladocModelTest +import java.net.{URI, URL} +import java.io.File + +object Test extends ScaladocModelTest { + + override def code = + """ + trait Foo extends AnyRef + + class Bar extends scala.collection.immutable.Seq[Nothing] + """ + + def scalaURL = "http://bog.us" + + override def scaladocSettings = { + val samplePath = getClass.getClassLoader.getResource("scala/Function1.class").getPath + val scalaLibPath = if(samplePath.contains("!")) { // in scala-library.jar + val scalaLibUri = samplePath.split("!")(0) + new URI(scalaLibUri).getPath + } else { // individual class files on disk + samplePath.replace('\\', '/').dropRight("scala/Function1.class".length) + } + s"-no-link-warnings -doc-external-doc $scalaLibPath#$scalaURL" + } + + def testModel(rootPackage: Package) { + import access._ + def showParents(e: MemberTemplateEntity): Unit = { + e.parentTypes.foreach(_._2.refEntity.foreach { + case (_, (LinkToExternalTpl(name, _, tpl), _)) => println(s"'$name' links to $tpl") + case (_, (Tooltip(name), _)) => println(s"'$name' no link!") + }) + } + + showParents(rootPackage._trait("Foo")) + showParents(rootPackage._class("Bar")) + } +} From 8e4c9aca230da0db82c6d8618f84a5c933934b01 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 3 Feb 2018 16:53:46 +1000 Subject: [PATCH 1210/2793] Avoid IIOBE in phase stack Flatten's info transform ends calls `enteringMixin(enteringFlaten(...))` all the way up the prefix, which violate my assumption that 128 recursive atPhase calls should enough for anyone when we encounter heavily nested code (such as that regrettably still emitted by the REPL when many imports precede the current line). This commit switches to using an ArrayBuffer that will grow as needed, and also disables the entire maintenance of the phase stack when logging is disabled. --- src/compiler/scala/tools/nsc/Global.scala | 1 + .../scala/reflect/internal/SymbolTable.scala | 18 +++++++++++------- 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 9dc5b21f9521..748737d4ca38 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1098,6 +1098,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) var currentUnit: CompilationUnit = NoCompilationUnit val profiler: Profiler = Profiler(settings) + keepPhaseStack = settings.log.isSetByUser // used in sbt def uncheckedWarnings: List[(Position, String)] = reporting.uncheckedWarnings.map{case (pos, (msg, since)) => (pos, msg)} diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 19e9cc84abfb..f0b86d8cae53 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -11,6 +11,8 @@ import scala.annotation.elidable import scala.collection.mutable import util._ import java.util.concurrent.TimeUnit + +import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.{TreeGen => InternalTreeGen} abstract class SymbolTable extends macros.Universe @@ -181,12 +183,11 @@ abstract class SymbolTable extends macros.Universe final val NoRunId = 0 // sigh, this has to be public or enteringPhase doesn't inline. - var phStack: Array[Phase] = new Array(128) - var phStackIndex = 0 + val phStack: ArrayBuffer[Phase] = new ArrayBuffer(128) private[this] var ph: Phase = NoPhase private[this] var per = NoPeriod - final def atPhaseStack: List[Phase] = List.tabulate(phStackIndex)(i => phStack(i)) + final def atPhaseStack: List[Phase] = phStack.toList final def phase: Phase = { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(statistics.phaseCounter) @@ -207,15 +208,18 @@ abstract class SymbolTable extends macros.Universe final def pushPhase(ph: Phase): Phase = { val current = phase phase = ph - phStack(phStackIndex) = ph - phStackIndex += 1 + if (keepPhaseStack) { + phStack += ph + } current } final def popPhase(ph: Phase) { - phStack(phStackIndex) = null - phStackIndex -= 1 + if (keepPhaseStack) { + phStack.remove(phStack.size) + } phase = ph } + var keepPhaseStack: Boolean = false /** The current compiler run identifier. */ def currentRunId: RunId From 5c7b14219b746812089c39876edaba9ee226a4bb Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 5 Feb 2018 08:58:08 -0800 Subject: [PATCH 1211/2793] Iterator.scanLeft is lazy at initial value Deliver the initial value before querying the iterator. --- src/library/scala/collection/Iterator.scala | 24 +++++++++------ .../junit/scala/collection/IteratorTest.scala | 29 +++++++++++++++++++ 2 files changed, 44 insertions(+), 9 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index c43dd1711f95..3e865e851273 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -590,7 +590,7 @@ trait Iterator[+A] extends TraversableOnce[A] { } /** Produces a collection containing cumulative results of applying the - * operator going left to right. + * operator going left to right, including the initial value. * * $willNotTerminateInf * $orderDependent @@ -602,14 +602,20 @@ trait Iterator[+A] extends TraversableOnce[A] { * @note Reuse: $consumesAndProducesIterator */ def scanLeft[B](z: B)(op: (B, A) => B): Iterator[B] = new AbstractIterator[B] { - var hasNext = true - var elem = z - def next() = if (hasNext) { - val res = elem - if (self.hasNext) elem = op(elem, self.next()) - else hasNext = false - res - } else Iterator.empty.next() + private[this] var state = 0 // 1 consumed initial, 2 self.hasNext, 3 done + private[this] var accum = z + private[this] def gen() = { val res = op(accum, self.next()) ; accum = res ; res } + def hasNext = state match { + case 0 | 2 => true + case 3 => false + case _ => if (self.hasNext) { state = 2 ; true } else { state = 3 ; false } + } + def next() = state match { + case 0 => state = 1 ; accum + case 1 => gen() + case 2 => state = 1 ; gen() + case 3 => Iterator.empty.next() + } } /** Produces a collection containing cumulative results of applying the operator going right to left. diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 6b28845112de..37b5092cb590 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -296,4 +296,33 @@ class IteratorTest { assertEquals(v2, v4) assertEquals(Some(v1), v2) } + // scala/bug#10709 + @Test def `scan is lazy enough`(): Unit = { + val results = collection.mutable.ListBuffer.empty[Int] + val it = new AbstractIterator[Int] { + var cur = 1 + val max = 3 + override def hasNext = { + results += -cur + cur < max + } + override def next() = { + val res = cur + results += -res + cur += 1 + res + } + } + val xy = it.scanLeft(10)((sum, x) => { + results += -(sum + x) + sum + x + }) + val scan = collection.mutable.ListBuffer.empty[Int] + for (i <- xy) { + scan += i + results += i + } + assertSameElements(List(10,11,13), scan) + assertSameElements(List(10,-1,-1,-11,11,-2,-2,-13,13,-3), results) + } } From ab9ad2097f69fa6111c6d921c57cd109cb5fbe06 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 5 Feb 2018 12:54:55 -0800 Subject: [PATCH 1212/2793] Test that ScriptEngine loads Previously, changes around class path handling and a previous bug with class loading may have conspired to break initialzing the ScriptEngine. --- test/files/run/t10488.scala | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 test/files/run/t10488.scala diff --git a/test/files/run/t10488.scala b/test/files/run/t10488.scala new file mode 100644 index 000000000000..205251b9d6e1 --- /dev/null +++ b/test/files/run/t10488.scala @@ -0,0 +1,13 @@ + + +import javax.script._ + +object Test { + def run() = { + val sem = new ScriptEngineManager() + val eng = sem.getEngineByName("scala") + assert(eng != null) + assert(eng.eval("42", eng.getContext).asInstanceOf[Int] == 42) + } + def main(args: Array[String]): Unit = run() +} From 3e74e03faab2ce95837456b97db7712d001c5d9e Mon Sep 17 00:00:00 2001 From: Piotr Kukielka Date: Thu, 8 Feb 2018 10:25:42 +0100 Subject: [PATCH 1213/2793] Add JMH benchmarks for distinct --- .../scala/collection/DistinctBenchmark.scala | 70 +++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala diff --git a/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala b/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala new file mode 100644 index 000000000000..6f49a94c25c8 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/collection/DistinctBenchmark.scala @@ -0,0 +1,70 @@ +package scala.collection + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class DistinctBenchmark { + @Param(Array("0", "1", "2", "5", "10", "20", "50", "100", "1000")) + var size: Int = _ + + @Param(Array("List", "Vector")) + var collectionType: String = _ + + var distinctDataSet: Seq[String] = null + var lastDuplicatedDataSet: Seq[String] = null + var firstDuplicatedDataSet: Seq[String] = null + var interleavedDuplicationDataSet: Seq[String] = null + var sequentialDuplicationDataSet: Seq[String] = null + + @Setup(Level.Trial) def init(): Unit = { + val b1 = List.newBuilder[String] + val b2 = List.newBuilder[String] + 0 until size foreach { i => + b1 += i.toString + b2 += i.toString + b2 += i.toString + } + + val adjustCollectionType = collectionType match { + case "List" => (col: Seq[String]) => col.toList + case "Vector" => (col: Seq[String]) => col.toVector + } + + distinctDataSet = adjustCollectionType(b1.result()) + interleavedDuplicationDataSet = adjustCollectionType(b2.result()) + sequentialDuplicationDataSet = adjustCollectionType(distinctDataSet ++ distinctDataSet) + + if (size > 0) { + firstDuplicatedDataSet = adjustCollectionType(distinctDataSet.head +: distinctDataSet) + lastDuplicatedDataSet = adjustCollectionType(distinctDataSet :+ distinctDataSet.head) + } + } + + @Benchmark def testDistinct: Any = { + distinctDataSet.distinct + } + + @Benchmark def testFirstDuplicated: Any = { + firstDuplicatedDataSet.distinct + } + + @Benchmark def testLastDuplicated: Any = { + lastDuplicatedDataSet.distinct + } + + @Benchmark def testInterleavedDuplication: Any = { + interleavedDuplicationDataSet.distinct + } + + @Benchmark def testSequentialDuplication: Any = { + sequentialDuplicationDataSet.distinct + } +} From 153ccc27d73868dbf1f8adad5cc94a8e3cd6121a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 9 Feb 2018 10:05:07 -0800 Subject: [PATCH 1214/2793] Warn when implicit is enclosing owner Maybe put it behind a flag? --- .../scala/tools/nsc/settings/Warnings.scala | 2 ++ .../scala/tools/nsc/typechecker/Implicits.scala | 8 ++++++++ test/files/neg/implicitly-self.check | 15 +++++++++++++++ test/files/neg/implicitly-self.flags | 1 + test/files/neg/implicitly-self.scala | 12 ++++++++++++ 5 files changed, 38 insertions(+) create mode 100644 test/files/neg/implicitly-self.check create mode 100644 test/files/neg/implicitly-self.flags create mode 100644 test/files/neg/implicitly-self.scala diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index 66e51a02b3e6..c274687fd4f2 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -67,6 +67,8 @@ trait Warnings { val warnExtraImplicit = BooleanSetting("-Ywarn-extra-implicit", "Warn when more than one implicit parameter section is defined.") + val warnSelfImplicit = BooleanSetting("-Ywarn-self-implicit", "Warn when an implicit resolves to an enclosing self-definition.") + // Experimental lint warnings that are turned off, but which could be turned on programmatically. // They are not activated by -Xlint and can't be enabled on the command line because they are not // created using the standard factory methods. diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index b8bd86a709ea..bc6917ef34b6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -109,6 +109,14 @@ trait Implicits { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(findMemberImpl, findMemberStart) if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopCounter(subtypeImpl, subtypeStart) + if (result.isSuccess && settings.warnSelfImplicit && result.tree.symbol != null) { + val s = + if (result.tree.symbol.isAccessor) result.tree.symbol.accessed + else if (result.tree.symbol.isModule) result.tree.symbol.moduleClass + else result.tree.symbol + if (context.owner.hasTransOwner(s)) + context.warning(result.tree.pos, s"Implicit resolves to enclosing ${result.tree.symbol}") + } result } diff --git a/test/files/neg/implicitly-self.check b/test/files/neg/implicitly-self.check new file mode 100644 index 000000000000..d9b411ab67c2 --- /dev/null +++ b/test/files/neg/implicitly-self.check @@ -0,0 +1,15 @@ +implicitly-self.scala:5: warning: Implicit resolves to enclosing method c + implicit def c: Char = implicitly[Char] + ^ +implicitly-self.scala:6: warning: Implicit resolves to enclosing value s + implicit val s: String = implicitly[String] + ^ +implicitly-self.scala:8: warning: Implicit resolves to enclosing value t + def f = implicitly[Int] + ^ +implicitly-self.scala:11: warning: Implicit resolves to enclosing object tcString + implicit object tcString extends TC[String] { def ix = implicitly[TC[String]].ix + 1 } + ^ +error: No warnings can be incurred under -Xfatal-warnings. +four warnings found +one error found diff --git a/test/files/neg/implicitly-self.flags b/test/files/neg/implicitly-self.flags new file mode 100644 index 000000000000..3561bb51ccd2 --- /dev/null +++ b/test/files/neg/implicitly-self.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Ywarn-self-implicit diff --git a/test/files/neg/implicitly-self.scala b/test/files/neg/implicitly-self.scala new file mode 100644 index 000000000000..8293b521e59b --- /dev/null +++ b/test/files/neg/implicitly-self.scala @@ -0,0 +1,12 @@ + +trait TC[T] { def ix: Int } + +object Test { + implicit def c: Char = implicitly[Char] + implicit val s: String = implicitly[String] + implicit val t: Int = { + def f = implicitly[Int] + f + } + implicit object tcString extends TC[String] { def ix = implicitly[TC[String]].ix + 1 } +} From a05cd477ea279f9214cfc371d22bc10517310d2e Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 10 Feb 2018 16:44:12 -0800 Subject: [PATCH 1215/2793] Restrict attachments for warnings Don't add attachments when not warning. Avoid warning when already in error. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 3 +-- .../nsc/typechecker/TypeDiagnostics.scala | 6 +++--- .../scala/reflect/internal/TreeGen.scala | 21 +++++++++++-------- 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 0084c21e684d..b0f304c01434 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1965,8 +1965,7 @@ self => atPos(p.pos.start, p.pos.start, body.pos.end) { val t = Bind(name, body) body match { - case Ident(nme.WILDCARD) => t updateAttachment AtBoundIdentifierAttachment - case _ if !settings.warnUnusedPatVars => t updateAttachment AtBoundIdentifierAttachment + case Ident(nme.WILDCARD) if settings.warnUnusedPatVars => t updateAttachment AtBoundIdentifierAttachment case _ => t } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index dbfcfb1b031c..1dfdd77e1e0c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -502,14 +502,14 @@ trait TypeDiagnostics { && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym)) ) - override def traverse(t: Tree): Unit = { + override def traverse(t: Tree): Unit = if (!t.isErrorTyped) { val sym = t.symbol var bail = false t match { case m: MemberDef if qualifies(sym) => t match { case ValDef(mods@_, name@_, tpt@_, rhs@_) if wasPatVarDef(t) => - if (!atBounded(t)) patvars += sym + if (settings.warnUnusedPatVars && !atBounded(t)) patvars += sym case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => if (sym.isPrimaryConstructor) for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa @@ -705,7 +705,7 @@ trait TypeDiagnostics { context.warning(s.pos, s"parameter $s in ${s.owner} is never used") } } - def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava) { + def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava && !context.reporter.hasErrors) { val body = unit.body // TODO the message should distinguish whether the unusage is before or after macro expansion. settings.warnMacros.value match { diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 25dfe73b0037..3ca58a7e7b1d 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -732,16 +732,19 @@ abstract class TreeGen { def mkPatDef(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = mkPatDef(Modifiers(0), pat, rhs) - private def cpAtBoundAttachment(from: Tree, to: ValDef): to.type = - if (from.hasAttachment[AtBoundIdentifierAttachment.type]) to.updateAttachment(AtBoundIdentifierAttachment) else to - private def cpPatVarDefAttachments(from: Tree, to: ValDef): to.type = - cpAtBoundAttachment(from, to).updateAttachment(PatVarDefAttachment) + private def propagateAtBoundAttachment(from: Tree, to: ValDef): to.type = + if (isPatVarWarnable && from.hasAttachment[AtBoundIdentifierAttachment.type]) to.updateAttachment(AtBoundIdentifierAttachment) + else to + + // Keep marker for `x@_`, add marker for `val C(x) = ???` to distinguish from ordinary `val x = ???`. + private def propagatePatVarDefAttachments(from: Tree, to: ValDef): to.type = + propagateAtBoundAttachment(from, to).updateAttachment(PatVarDefAttachment) /** Create tree for pattern definition */ def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = matchVarPattern(pat) match { case Some((name, tpt)) => List(atPos(pat.pos union rhs.pos) { - cpAtBoundAttachment(pat, ValDef(mods, name.toTermName, tpt, rhs)) + propagateAtBoundAttachment(pat, ValDef(mods, name.toTermName, tpt, rhs)) }) case None => @@ -785,7 +788,7 @@ abstract class TreeGen { vars match { case List((vname, tpt, pos, original)) => List(atPos(pat.pos union pos union rhs.pos) { - cpPatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, matchExpr)) + propagatePatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, matchExpr)) }) case _ => val tmp = freshTermName() @@ -797,7 +800,7 @@ abstract class TreeGen { var cnt = 0 val restDefs = for ((vname, tpt, pos, original) <- vars) yield atPos(pos) { cnt += 1 - cpPatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), newTermName("_" + cnt)))) + propagatePatVarDefAttachments(original, ValDef(mods, vname.toTermName, tpt, Select(Ident(tmp), TermName("_" + cnt)))) } firstDef :: restDefs } @@ -906,8 +909,8 @@ abstract class TreeGen { case Ident(name) if treeInfo.isVarPattern(tree) && name != nme.WILDCARD => atPos(tree.pos) { val b = Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))) - if (!forFor && isPatVarWarnable) b - else b updateAttachment AtBoundIdentifierAttachment + if (forFor && isPatVarWarnable) b updateAttachment AtBoundIdentifierAttachment + else b } case Typed(id @ Ident(name), tpt) if treeInfo.isVarPattern(id) && name != nme.WILDCARD => atPos(tree.pos.withPoint(id.pos.point)) { From e8d44c73ce5089051bd32b3766fe20fff3e17e44 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 15 Feb 2018 12:51:22 +1000 Subject: [PATCH 1216/2793] Fix off by one error regression under -Ylog --- src/reflect/scala/reflect/internal/SymbolTable.scala | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index f0b86d8cae53..0d4a3500ce0a 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -182,8 +182,7 @@ abstract class SymbolTable extends macros.Universe type RunId = Int final val NoRunId = 0 - // sigh, this has to be public or enteringPhase doesn't inline. - val phStack: ArrayBuffer[Phase] = new ArrayBuffer(128) + private val phStack: collection.mutable.ArrayStack[Phase] = new collection.mutable.ArrayStack() private[this] var ph: Phase = NoPhase private[this] var per = NoPeriod @@ -209,13 +208,13 @@ abstract class SymbolTable extends macros.Universe val current = phase phase = ph if (keepPhaseStack) { - phStack += ph + phStack.push(ph) } current } final def popPhase(ph: Phase) { if (keepPhaseStack) { - phStack.remove(phStack.size) + phStack.pop() } phase = ph } From bb80fccdfea4772a8a443b14b4168ee464a4dca2 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 12 Feb 2018 22:41:30 +0000 Subject: [PATCH 1217/2793] Several changes to enable multi-threaded post-processing and class writing for GenBCode indyLamdaMethods - Optimise use of indyLamdaMethods map Use a java concurrent map for performance Provide API to perform conditional operation based on presence ClassBType Rework ClassBType to enable parallelism, move logic in the companion Rewrite ClasfileWriter, specialising for JAR/dir, and providing wrappers for the less common cases Rework directory classfile writing to be threadsafe and NIO based Tune the NIO flags for the common case which cannot be handled in a single call in windows (create and truncate) PerRunInit make PerRunInit theadsafe BackendUtils Make some data structure/APIs theadsafe (indyLambdaMethods and maxLocalStackComputed) Settings add extra parameter -Ybackend-parallelism .. "maximum worker threads for backend" add a GeneratedClassHandler as a multi-threaded delegate that allows the minimal set in post-processing steps to be performed warn if statistics is used when -Ybackend-parallelism > 1 as stats are not threadsafe add parameter -Yjar-compression-level to allow the user to adjust the jar file compression Classname case insensitivity Improve classname case insensitivity checking Move case insensitive check to back end Make check threadsafe Remove double map access for case insensitivity javaDefinedClasses use Lazy, optimise calculation --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 4 +- .../nsc/backend/jvm/ClassfileWriter.scala | 322 ++++++++++++------ .../scala/tools/nsc/backend/jvm/CodeGen.scala | 60 ++-- .../tools/nsc/backend/jvm/GenBCode.scala | 30 +- .../backend/jvm/GeneratedClassHandler.scala | 295 ++++++++++++++++ .../tools/nsc/backend/jvm/PerRunInit.scala | 1 - .../tools/nsc/backend/jvm/PostProcessor.scala | 110 +++--- .../jvm/PostProcessorFrontendAccess.scala | 63 ++-- .../nsc/backend/jvm/ThreadFactories.scala | 20 ++ .../backend/jvm/analysis/BackendUtils.scala | 2 +- .../backend/jvm/opt/ByteCodeRepository.scala | 11 +- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 3 + .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 2 - ...ncHelper.scala => ThreadPoolFactory.scala} | 73 ++-- .../tools/nsc/settings/ScalaSettings.scala | 10 +- test/files/run/t5717.scala | 6 +- 16 files changed, 740 insertions(+), 272 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala rename src/compiler/scala/tools/nsc/profile/{AsyncHelper.scala => ThreadPoolFactory.scala} (64%) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index e78f0a945d4b..5f870ba5c448 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1005,7 +1005,6 @@ abstract class BTypes { * to be executed when it's forced. */ private final class LazyWithLock[T <: AnyRef](t: () => T) extends AbstractLazy[T](t) { - def init(t: () => T): T = frontendSynch { if (value == null) value = t() value @@ -1017,7 +1016,6 @@ abstract class BTypes { * to be executed when it's forced. */ private final class LazyWithoutLock[T <: AnyRef](t: () => T) extends AbstractLazy[T](t) { - def init(t: () => T): T = this.synchronized { if (value == null) value = t() value @@ -1057,7 +1055,7 @@ abstract class BTypes { } } - def reInitialize(): Unit = frontendSynch{ + def reInitialize(): Unit = frontendSynch { v = null.asInstanceOf[T] isInit = false } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala index a7b32b597eeb..8ead9856f512 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala @@ -1,132 +1,264 @@ package scala.tools.nsc.backend.jvm -import java.io.{DataOutputStream, IOException} +import java.io.{BufferedOutputStream, DataOutputStream, FileOutputStream, IOException} +import java.nio.ByteBuffer +import java.nio.channels.FileChannel import java.nio.charset.StandardCharsets -import java.nio.file.Files -import java.util.jar.Attributes.Name +import java.nio.file.attribute.FileAttribute +import java.nio.file.{FileAlreadyExistsException, Files, Path, Paths, StandardOpenOption} +import java.util +import java.util.concurrent.ConcurrentHashMap +import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} import scala.reflect.internal.util.{NoPosition, Statistics} -import scala.reflect.io._ +import scala.tools.nsc.Global import scala.tools.nsc.backend.jvm.BTypes.InternalName -import scala.tools.nsc.io.{AbstractFile, Jar, JarWriter} - -class ClassfileWriter(frontendAccess: PostProcessorFrontendAccess, - statistics: Statistics with BackendStats) { - import frontendAccess.{backendReporting, compilerSettings} - - // if non-null, asm text files are written to this directory - private val asmOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.genAsmpDirectory) - - // if non-null, classfiles are additionally written to this directory - private val dumpOutputDir: AbstractFile = getDirectoryOrNull(compilerSettings.dumpClassesDirectory) - - // if non-null, classfiles are written to a jar instead of the output directory - private val jarWriter: JarWriter = compilerSettings.singleOutputDirectory match { - case Some(f) if f hasExtension "jar" => - // If no main class was specified, see if there's only one - // entry point among the classes going into the jar. - val mainClass = compilerSettings.mainClass match { - case c @ Some(m) => - backendReporting.log(s"Main-Class was specified: $m") - c - - case None => frontendAccess.getEntryPoints match { - case Nil => - backendReporting.log("No Main-Class designated or discovered.") - None - case name :: Nil => - backendReporting.log(s"Unique entry point: setting Main-Class to $name") - Some(name) - case names => - backendReporting.log(s"No Main-Class due to multiple entry points:\n ${names.mkString("\n ")}") - None +import scala.tools.nsc.io.AbstractFile + +/** + * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the + * directory and files that are created, and eventually calls `close` when the writing is complete. + * + * The companion object is responsible for constructing a appropriate and optimal implementation for + * the supplied settings. + * + * Operations are threadsafe. + */ +sealed trait ClassfileWriter { + /** + * Write a classfile + */ + def write(unit: SourceUnit, name: InternalName, bytes: Array[Byte]) + + /** + * Close the writer. Behavior is undefined after a call to `close`. + */ + def close() : Unit +} + +object ClassfileWriter { + private def getDirectory(dir: String): Path = Paths.get(dir) + + def apply(global: Global): ClassfileWriter = { + //Note dont import global._ - its too easy to leak non threadsafe structures + import global.{cleanup, genBCode, log, settings, statistics} + def jarManifestMainClass: Option[String] = settings.mainClass.valueSetByUser.orElse { + cleanup.getEntryPoints match { + case List(name) => Some(name) + case es => + if (es.isEmpty) log("No Main-Class designated or discovered.") + else log(s"No Main-Class due to multiple entry points:\n ${es.mkString("\n ")}") + None + } + } + + def singleWriter(file: AbstractFile): UnderlyingClassfileWriter = { + if (file hasExtension "jar") { + new JarClassWriter(file, jarManifestMainClass, settings.YjarCompressionLevel.value) + } else if (file.isVirtual) { + new VirtualClassWriter() + } else if (file.isDirectory) { + new DirClassWriter(genBCode.postProcessorFrontendAccess) + } else { + throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") + } + } + + val basicClassWriter = settings.outputDirs.getSingleOutput match { + case Some(dest) => singleWriter(dest) + case None => + val distinctOutputs: Set[AbstractFile] = settings.outputDirs.outputs.map(_._2)(scala.collection.breakOut) + if (distinctOutputs.size == 1) singleWriter(distinctOutputs.head) + else new MultiClassWriter(distinctOutputs.map { output: AbstractFile => output -> singleWriter(output) }(scala.collection.breakOut)) + } + + val withAdditionalFormats = if (settings.Ygenasmp.valueSetByUser.isEmpty && settings.Ydumpclasses.valueSetByUser.isEmpty) basicClassWriter else { + val asmp = settings.Ygenasmp.valueSetByUser map { dir: String => new AsmClassWriter(getDirectory(dir), genBCode.postProcessorFrontendAccess) } + val dump = settings.Ydumpclasses.valueSetByUser map { dir: String => new DumpClassWriter(getDirectory(dir), genBCode.postProcessorFrontendAccess) } + new AllClassWriter(basicClassWriter, asmp, dump) + } + + if (statistics.enabled) new WithStatsWriter(statistics, withAdditionalFormats) else withAdditionalFormats + } + + /** + * a trait to specify the Classfilewriters that actually write, rather than layer functionality + */ + sealed trait UnderlyingClassfileWriter extends ClassfileWriter + + private final class JarClassWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends UnderlyingClassfileWriter { + //keep these imports local - avoid confusion with scala naming + import java.util.jar.Attributes.Name + import java.util.jar.{JarOutputStream, Manifest} + val storeOnly = compressionLevel == Deflater.NO_COMPRESSION + + val jarWriter: JarOutputStream = { + val manifest = new Manifest() + mainClass foreach { c => manifest.getMainAttributes.put(Name.MAIN_CLASS, c) } + val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) + jar.setLevel(compressionLevel) + if (storeOnly) jar.setMethod(ZipOutputStream.STORED) + jar + } + + lazy val crc = new CRC32 + + override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = this.synchronized { + val path = className + ".class" + val entry = new ZipEntry(path) + if (storeOnly) { + crc.reset() + crc.update(bytes) + entry.setCrc(crc.getValue) + } + jarWriter.putNextEntry(entry) + try jarWriter.write(bytes, 0, bytes.length) + finally jarWriter.flush() + } + + override def close(): Unit = this.synchronized(jarWriter.close()) + } + + private sealed class DirClassWriter(frontendAccess: PostProcessorFrontendAccess) extends UnderlyingClassfileWriter { + val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() + val noAttributes = Array.empty[FileAttribute[_]] + + def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { + import java.lang.Boolean.TRUE + val parent = filePath.getParent + if (!builtPaths.containsKey(parent)) { + try Files.createDirectories(parent, noAttributes: _*) + catch { + case e: FileAlreadyExistsException => + throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) + } + builtPaths.put(baseDir, TRUE) + var current = parent + while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { + current = current.getParent } } - val jarMainAttrs = mainClass.map(c => Name.MAIN_CLASS -> c).toList - new Jar(f.file).jarWriter(jarMainAttrs: _*) + } + + protected def getPath(unit: SourceUnit, className: InternalName) = unit.outputPath.resolve(className + ".class") + + protected def formatData(rawBytes: Array[Byte]) = rawBytes - case _ => null + protected def qualifier: String = "" + + // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive + // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call + // even if the file is new. + // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails + + private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) + private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + + override def write(unit: SourceUnit, className: InternalName, rawBytes: Array[Byte]): Unit = try { + val path = getPath(unit, className) + val bytes = formatData(rawBytes) + ensureDirForPath(unit.outputPath, path) + val os = try FileChannel.open(path, fastOpenOptions) + catch { + case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) + } + + os.write(ByteBuffer.wrap(bytes), 0L) + os.close() + } catch { + case e: FileConflictException => + frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getMessage}") + case e: java.nio.file.FileSystemException => + if (frontendAccess.compilerSettings.debug) + e.printStackTrace() + frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getClass.getName} ${e.getMessage}") + + } + override def close(): Unit = () } - private def getDirectoryOrNull(dir: Option[String]): AbstractFile = - dir.map(d => new PlainDirectory(Directory(Path(d)))).orNull + private final class AsmClassWriter( + asmOutputPath: Path, + frontendAccess: PostProcessorFrontendAccess) + extends DirClassWriter(frontendAccess) { + override protected def getPath(unit: SourceUnit, className: InternalName) = asmOutputPath.resolve(className + ".asmp") - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - if (base.file != null) { - fastGetFile(base, clsName, suffix) - } else { + override protected def formatData(rawBytes: Array[Byte]) = AsmUtils.textify(AsmUtils.readClass(rawBytes)).getBytes(StandardCharsets.UTF_8) + + override protected def qualifier: String = " [for asmp]" + } + + private final class DumpClassWriter( + dumpOutputPath: Path, + frontendAccess: PostProcessorFrontendAccess) + extends DirClassWriter(frontendAccess) { + override protected def getPath(unit: SourceUnit, className: InternalName) = dumpOutputPath.resolve(className + ".class") + + override protected def qualifier: String = " [for dump]" + } + + private final class VirtualClassWriter() extends UnderlyingClassfileWriter { + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { def ensureDirectory(dir: AbstractFile): AbstractFile = if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory", dir) + else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory") + var dir = base val pathParts = clsName.split("[./]").toList for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part ensureDirectory(dir) fileNamed pathParts.last + suffix } - } - private def fastGetFile(base: AbstractFile, clsName: String, suffix: String) = { - val index = clsName.lastIndexOf('/') - val (packageName, simpleName) = if (index > 0) { - (clsName.substring(0, index), clsName.substring(index + 1)) - } else ("", clsName) - val directory = base.file.toPath.resolve(packageName) - new PlainNioFile(directory.resolve(simpleName + suffix)) - } - - private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { - if (outFile.file != null) { - val outPath = outFile.file.toPath - try Files.write(outPath, bytes) - catch { - case _: java.nio.file.NoSuchFileException => - Files.createDirectories(outPath.getParent) - Files.write(outPath, bytes) - } - } else { + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { val out = new DataOutputStream(outFile.bufferedOutput) try out.write(bytes, 0, bytes.length) finally out.close() } - } - def write(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = try { - val writeStart = statistics.startTimer(statistics.bcodeWriteTimer) - if (jarWriter == null) { - val outFolder = compilerSettings.outputDirectoryFor(sourceFile) - val outFile = getFile(outFolder, className, ".class") + override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { + val outFile = getFile(unit.outputDir, className, ".class") writeBytes(outFile, bytes) - } else { - val path = className + ".class" - val out = jarWriter.newOutputStream(path) - try out.write(bytes, 0, bytes.length) - finally out.flush() } - statistics.stopTimer(statistics.bcodeWriteTimer, writeStart) - if (asmOutputDir != null) { - val asmpFile = getFile(asmOutputDir, className, ".asmp") - val asmpString = AsmUtils.textify(AsmUtils.readClass(bytes)) - writeBytes(asmpFile, asmpString.getBytes(StandardCharsets.UTF_8)) + override def close(): Unit = () + } + + private final class MultiClassWriter(underlying: Map[AbstractFile, UnderlyingClassfileWriter]) extends ClassfileWriter { + private def getUnderlying(unit: SourceUnit) = underlying.getOrElse(unit.outputDir, { + throw new Exception(s"Cannot determine output directory for ${unit.sourceFile} with output ${unit.outputDir}. Configured outputs are ${underlying.keySet}") + }) + + override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { + getUnderlying(unit).write(unit, className, bytes) + } + + override def close(): Unit = underlying.values.foreach(_.close()) + } + + private final class AllClassWriter(basic: ClassfileWriter, asmp: Option[UnderlyingClassfileWriter], dump: Option[UnderlyingClassfileWriter]) extends ClassfileWriter { + override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { + basic.write(unit, className, bytes) + asmp.foreach(_.write(unit, className, bytes)) + dump.foreach(_.write(unit, className, bytes)) } - if (dumpOutputDir != null) { - val dumpFile = getFile(dumpOutputDir, className, ".class") - writeBytes(dumpFile, bytes) + override def close(): Unit = { + basic.close() + asmp.foreach(_.close()) + dump.foreach(_.close()) } - } catch { - case e: FileConflictException => - backendReporting.error(NoPosition, s"error writing $className: ${e.getMessage}") - case e: java.nio.file.FileSystemException => - if (compilerSettings.debug) - e.printStackTrace() - backendReporting.error(NoPosition, s"error writing $className: ${e.getClass.getName} ${e.getMessage}") } - def close(): Unit = { - if (jarWriter != null) jarWriter.close() + private final class WithStatsWriter(statistics: Statistics with BackendStats, underlying: ClassfileWriter) extends ClassfileWriter { + override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { + val snap = statistics.startTimer(statistics.bcodeWriteTimer) + underlying.write(unit, className, bytes) + statistics.stopTimer(statistics.bcodeWriteTimer, snap) + } + + override def close(): Unit = underlying.close() } } /** Can't output a file due to the state of the file system. */ -class FileConflictException(msg: String, val file: AbstractFile) extends IOException(msg) +class FileConflictException(msg: String, cause:Throwable = null) extends IOException(msg, cause) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 22bb904b3e82..34a68869b6bf 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -1,7 +1,8 @@ package scala.tools.nsc package backend.jvm -import scala.collection.mutable +import scala.collection.mutable.ListBuffer +import scala.reflect.internal.util.Statistics import scala.tools.asm.tree.ClassNode abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { @@ -9,29 +10,39 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { import global._ import bTypes._ - - private val caseInsensitively = perRunCaches.newMap[String, Symbol]() + import genBCode.generatedClassHandler // TODO: do we really need a new instance per run? Is there state that depends on the compiler frontend (symbols, types, settings)? private[this] lazy val mirrorCodeGen: LazyVar[CodeGenImpl.JMirrorBuilder] = perRunLazy(this)(new CodeGenImpl.JMirrorBuilder()) private[this] lazy val beanInfoCodeGen: LazyVar[CodeGenImpl.JBeanInfoBuilder] = perRunLazy(this)(new CodeGenImpl.JBeanInfoBuilder()) - def genUnit(unit: CompilationUnit): List[GeneratedClass] = { - val res = mutable.ListBuffer.empty[GeneratedClass] + /** + * Generate ASM ClassNodes for classes found in a compilation unit. The resulting classes are + * passed to the `genBCode.generatedClassHandler`. + */ + def genUnit(unit: CompilationUnit): Unit = { + val generatedClasses = ListBuffer.empty[GeneratedClass] + val sourceFile = unit.source def genClassDef(cd: ClassDef): Unit = try { val sym = cd.symbol - val sourceFile = unit.source.file - res += GeneratedClass(genClass(cd, unit), sourceFile, isArtifact = false) + val position = sym.pos + val fullSymbolName = sym.javaClassName + val mainClassNode = genClass(cd, unit) + generatedClasses += GeneratedClass(mainClassNode, fullSymbolName, position, sourceFile, isArtifact = false) if (bTypes.isTopLevelModuleClass(sym)) { - if (sym.companionClass == NoSymbol) - res += GeneratedClass(genMirrorClass(sym, unit), sourceFile, isArtifact = true) + if (sym.companionClass == NoSymbol) { + val mirrorClassNode = genMirrorClass(sym, unit) + generatedClasses += GeneratedClass(mirrorClassNode, fullSymbolName, position, sourceFile, isArtifact = true) + } else log(s"No mirror class for module with linked class: ${sym.fullName}") } - if (sym hasAnnotation coreBTypes.BeanInfoAttr) - res += GeneratedClass(genBeanInfoClass(cd, unit), sourceFile, isArtifact = true) + if (sym hasAnnotation coreBTypes.BeanInfoAttr) { + val beanClassNode = genBeanInfoClass(cd, unit) + generatedClasses += GeneratedClass(beanClassNode, fullSymbolName, position, sourceFile, isArtifact = true) + } } catch { case ex: Throwable => ex.printStackTrace() @@ -41,16 +52,19 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { def genClassDefs(tree: Tree): Unit = tree match { case EmptyTree => () case PackageDef(_, stats) => stats foreach genClassDefs - case cd: ClassDef => genClassDef(cd) + case cd: ClassDef => frontendAccess.frontendSynch(genClassDef(cd)) } - genClassDefs(unit.body) - res.toList + statistics.timed(statistics.bcodeGenStat) { + genClassDefs(unit.body) + } + + generatedClassHandler.process(GeneratedCompilationUnit(unit.source.file, generatedClasses.toList)) } def genClass(cd: ClassDef, unit: CompilationUnit): ClassNode = { - warnCaseInsensitiveOverwrite(cd) addSbtIClassShim(cd) + // TODO: do we need a new builder for each class? could we use one per run? or one per Global compiler instance? val b = new CodeGenImpl.SyncAndTryBuilder(unit) b.genPlainClass(cd) @@ -66,22 +80,6 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { beanInfoCodeGen.get.genBeanInfoClass(sym, unit, CodeGenImpl.fieldSymbols(sym), CodeGenImpl.methodSymbols(cd)) } - private def warnCaseInsensitiveOverwrite(cd: ClassDef): Unit = { - val sym = cd.symbol - // GenASM checks this before classfiles are emitted, https://github.com/scala/scala/commit/e4d1d930693ac75d8eb64c2c3c69f2fc22bec739 - val lowercaseJavaClassName = sym.javaClassName.toLowerCase - caseInsensitively.get(lowercaseJavaClassName) match { - case None => - caseInsensitively.put(lowercaseJavaClassName, sym) - case Some(dupClassSym) => - reporter.warning( - sym.pos, - s"Class ${sym.javaClassName} differs only in case from ${dupClassSym.javaClassName}. " + - "Such classes will overwrite one another on case-insensitive filesystems." - ) - } - } - private def addSbtIClassShim(cd: ClassDef): Unit = { // shim for SBT, see https://github.com/sbt/sbt/issues/2076 // TODO put this closer to classfile writing once we have closure elimination diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 677756bbb893..5e70220262c5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -20,9 +20,10 @@ abstract class GenBCode extends SubComponent { val codeGen: CodeGen[global.type] = new { val bTypes: self.bTypes.type = self.bTypes } with CodeGen[global.type](global) - val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { - val bTypes: self.bTypes.type = self.bTypes - } with PostProcessor(statistics) + val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { val bTypes: self.bTypes.type = self.bTypes } with PostProcessor(statistics) + + // re-initialized per run, as it depends on compiler settings that may change + var generatedClassHandler: GeneratedClassHandler = _ val phaseName = "jvm" @@ -33,29 +34,17 @@ abstract class GenBCode extends SubComponent { override val erasedTypes = true - private val globalOptsEnabled = { - import postProcessorFrontendAccess._ - compilerSettings.optInlinerEnabled || compilerSettings.optClosureInvocations - } - - def apply(unit: CompilationUnit): Unit = { - val generated = statistics.timed(bcodeGenStat) { - codeGen.genUnit(unit) - } - if (globalOptsEnabled) postProcessor.generatedClasses ++= generated - else postProcessor.postProcessAndSendToDisk(generated) - } + def apply(unit: CompilationUnit): Unit = codeGen.genUnit(unit) override def run(): Unit = { statistics.timed(bcodeTimer) { try { initialize() super.run() // invokes `apply` for each compilation unit - if (globalOptsEnabled) postProcessor.postProcessAndSendToDisk(postProcessor.generatedClasses) + generatedClassHandler.complete() } finally { - // When writing to a jar, we need to close the jarWriter. Since we invoke the postProcessor - // multiple times if (!globalOptsEnabled), we have to do it here at the end. - postProcessor.classfileWriter.get.close() + // When writing to a jar, we need to close the jarWriter. + generatedClassHandler.close() } } } @@ -71,7 +60,8 @@ abstract class GenBCode extends SubComponent { codeGen.initialize() postProcessorFrontendAccess.initialize() postProcessor.initialize() - statistics.stopTimer(bcodeInitTimer, initStart) + generatedClassHandler = GeneratedClassHandler(global) + statistics.stopTimer(statistics.bcodeInitTimer, initStart) } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala new file mode 100644 index 000000000000..72b24c526e04 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -0,0 +1,295 @@ +package scala.tools.nsc +package backend.jvm + +import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy +import java.util.concurrent._ + +import scala.collection.mutable.ListBuffer +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future, Promise} +import scala.reflect.internal.util.{NoPosition, Position, SourceFile} +import scala.tools.nsc.backend.jvm.PostProcessorFrontendAccess.BackendReporting +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.profile.ThreadPoolFactory +import scala.util.control.NonFatal + +/** + * Interface to handle post-processing (see [[PostProcessor]]) and classfile writing of generated + * classes, potentially in parallel. + */ +private[jvm] sealed trait GeneratedClassHandler { + val postProcessor: PostProcessor + + /** + * Pass the result of code generation for a compilation unit to this handler for post-processing + */ + def process(unit: GeneratedCompilationUnit) + + /** + * If running in parallel, block until all generated classes are handled + */ + def complete(): Unit + + /** + * Invoked at the end of the jvm phase + */ + def close(): Unit +} + +private[jvm] object GeneratedClassHandler { + def apply(global: Global): GeneratedClassHandler = { + import global._ + import genBCode.postProcessor + + val cfWriter = ClassfileWriter(global) + + val unitInfoLookup = settings.outputDirs.getSingleOutput match { + case Some(dir) => new SingleUnitInfo(postProcessor.bTypes.frontendAccess, dir) + case None => new LookupUnitInfo(postProcessor.bTypes.frontendAccess) + } + val handler = settings.YaddBackendThreads.value match { + case 1 => + new SyncWritingClassHandler(unitInfoLookup, postProcessor, cfWriter) + + case maxThreads => + if (global.statistics.enabled) + global.reporter.warning(global.NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing") + val additionalThreads = maxThreads -1 + // the queue size is taken to be large enough to ensure that the a 'CallerRun' will not take longer to + // run that it takes to exhaust the queue for the backend workers + // when the queue is full, the main thread will no some background work + // so this provides back-pressure + val queueSize = if (settings.YmaxQueue.isSetByUser) settings.YmaxQueue.value else maxThreads * 2 + val threadPoolFactory = ThreadPoolFactory(global, currentRun.jvmPhase) + val javaExecutor = threadPoolFactory.newBoundedQueueFixedThreadPool(additionalThreads, queueSize, new CallerRunsPolicy, "non-ast") + val execInfo = ExecutorServiceInfo(additionalThreads, javaExecutor, javaExecutor.getQueue) + new AsyncWritingClassHandler(unitInfoLookup, postProcessor, cfWriter, execInfo) + } + + if (settings.optInlinerEnabled || settings.optClosureInvocations) + new GlobalOptimisingGeneratedClassHandler(postProcessor, handler) + else handler + } + + private class GlobalOptimisingGeneratedClassHandler( + val postProcessor: PostProcessor, + underlying: WritingClassHandler) + extends GeneratedClassHandler { + + private val generatedUnits = ListBuffer.empty[GeneratedCompilationUnit] + + def process(unit: GeneratedCompilationUnit): Unit = generatedUnits += unit + + def complete(): Unit = { + val allGeneratedUnits = generatedUnits.result() + generatedUnits.clear() + postProcessor.runGlobalOptimizations(allGeneratedUnits) + allGeneratedUnits.foreach(underlying.process) + underlying.complete() + } + + def close(): Unit = underlying.close() + + override def toString: String = s"GloballyOptimising[$underlying]" + } + + sealed abstract class WritingClassHandler(val javaExecutor: Executor) extends GeneratedClassHandler { + val unitInfoLookup: UnitInfoLookup + val cfWriter: ClassfileWriter + + def tryStealing: Option[Runnable] + + private val processingUnits = ListBuffer.empty[UnitResult] + + def process(unit: GeneratedCompilationUnit): Unit = { + val unitProcess = new UnitResult(unitInfoLookup, unit.classes, unit.sourceFile) + postProcessUnit(unitProcess) + processingUnits += unitProcess + } + + protected implicit val executionContext: ExecutionContextExecutor = ExecutionContext.fromExecutor(javaExecutor) + + final def postProcessUnit(unitProcess: UnitResult): Unit = { + unitProcess.task = Future { + unitProcess.withBufferedReporter { + // we 'take' classes to reduce the memory pressure + // as soon as the class is consumed and written, we release its data + unitProcess.takeClasses foreach { + postProcessor.sendToDisk(unitProcess, _, cfWriter) + } + } + } + } + + protected def getAndClearProcessingUnits(): List[UnitResult] = { + val result = processingUnits.result() + processingUnits.clear() + result + } + + override def complete(): Unit = { + val directBackendReporting = postProcessor.bTypes.frontendAccess.directBackendReporting + + def stealWhileWaiting(unitResult: UnitResult, fut: Future[Unit]): Unit = { + while (!fut.isCompleted) + tryStealing match { + case Some(r) => r.run() + case None => Await.ready(fut, Duration.Inf) + } + //we know that they are complete by we need to check for exception + //but first get any reports + unitResult.relayReports(directBackendReporting) + fut.value.get.get // throw the exception if the future completed with a failure + } + + + /** We could consume the results when yey are ready, via use of a [[java.util.concurrent.CompletionService]] + * or something similar, but that would lead to non deterministic reports from backend threads, as the + * compilation unit could complete in a different order that when they were submitted, and thus the relayed + * reports would be in a different order. + * To avoid that non-determinism we read the result in order or submission, with a potential minimal performance + * loss, do to the memory being retained longer for tasks that it might otherwise. + * Most of the memory in the UnitResult is reclaimable anyway as the classes are deferenced after use + */ + getAndClearProcessingUnits().foreach { unitResult => + try { + stealWhileWaiting(unitResult, unitResult.task) + } catch { + case NonFatal(t) => + t.printStackTrace() + postProcessor.bTypes.frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitResult.sourceFile} $t") + } + } + } + + def close(): Unit = cfWriter.close() + } + + private final class SyncWritingClassHandler( + val unitInfoLookup: UnitInfoLookup, + val postProcessor: PostProcessor, + val cfWriter: ClassfileWriter) + extends WritingClassHandler((r) => r.run()) { + + override def toString: String = s"SyncWriting [$cfWriter]" + + override def tryStealing: Option[Runnable] = None + } + + private final case class ExecutorServiceInfo(maxThreads: Int, javaExecutor: ExecutorService, queue: BlockingQueue[Runnable]) + + private final class AsyncWritingClassHandler(val unitInfoLookup: UnitInfoLookup, + val postProcessor: PostProcessor, + val cfWriter: ClassfileWriter, + val executorServiceInfo: ExecutorServiceInfo) + extends WritingClassHandler(executorServiceInfo.javaExecutor) { + + override def toString: String = s"AsyncWriting[additional threads:${executorServiceInfo.maxThreads} writer:$cfWriter]" + + override def close(): Unit = { + super.close() + executorServiceInfo.javaExecutor.shutdownNow() + } + + override def tryStealing: Option[Runnable] = Option(executorServiceInfo.queue.poll()) + } + +} +//we avoid the lock on frontendSync for the common case, when compiling to a single target +sealed trait UnitInfoLookup { + def outputDir(source:AbstractFile) : AbstractFile + val frontendAccess: PostProcessorFrontendAccess +} +final class SingleUnitInfo(val frontendAccess: PostProcessorFrontendAccess, constantOutputDir:AbstractFile) extends UnitInfoLookup { + override def outputDir(source: AbstractFile) = constantOutputDir +} +final class LookupUnitInfo(val frontendAccess: PostProcessorFrontendAccess) extends UnitInfoLookup { + lazy val outputDirectories = frontendAccess.compilerSettings.outputDirectories + override def outputDir(source: AbstractFile) = outputDirectories.outputDirFor(source) +} +sealed trait SourceUnit { + def withBufferedReporter[T](fn: => T): T + + val outputDir: AbstractFile + val outputPath: java.nio.file.Path + def sourceFile:AbstractFile +} + +final class UnitResult(unitInfoLookup: UnitInfoLookup, _classes : List[GeneratedClass], val sourceFile: AbstractFile) extends SourceUnit with BackendReporting { + lazy val outputDir = unitInfoLookup.outputDir(sourceFile) + lazy val outputPath = outputDir.file.toPath + + private var classes: List[GeneratedClass] = _classes + + def copyClasses = classes + + def takeClasses(): List[GeneratedClass] = { + val c = classes + classes = Nil + c + } + + /** the main async task submitted onto the scheduler */ + var task: Future[Unit] = _ + + def relayReports(backendReporting: BackendReporting): Unit = this.synchronized { + if (bufferedReports nonEmpty) { + for (report: Report <- bufferedReports.reverse) { + report.relay(backendReporting) + } + } + bufferedReports = Nil + } + + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and + // consumed in another + private var bufferedReports = List.empty[Report] + + override def withBufferedReporter[T](fn: => T) = unitInfoLookup.frontendAccess.withLocalReporter(this)(fn) + + override def inlinerWarning(pos: Position, message: String): Unit = + this.synchronized(bufferedReports ::= new ReportInlinerWarning(pos, message)) + + override def error(pos: Position, message: String): Unit = + this.synchronized(bufferedReports ::= new ReportError(pos, message)) + + override def warning(pos: Position, message: String): Unit = + this.synchronized(bufferedReports ::= new ReportWarning(pos, message)) + + override def inform(message: String): Unit = + this.synchronized(bufferedReports ::= new ReportInform(message)) + + override def log(message: String): Unit = + this.synchronized(bufferedReports ::= new ReportLog(message)) + + private sealed trait Report { + def relay(backendReporting: BackendReporting): Unit + } + + private class ReportInlinerWarning(pos: Position, message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.inlinerWarning(pos, message) + } + + private class ReportError(pos: Position, message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.error(pos, message) + } + + private class ReportWarning(pos: Position, message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.warning(pos, message) + } + + private class ReportInform(message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.inform(message) + } + + private class ReportLog(message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.log(message) + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala b/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala index ac6107aaa06b..a27fe22653ac 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala @@ -17,4 +17,3 @@ trait PerRunInit { def initialize(): Unit = inits.synchronized(inits.foreach(_.apply())) } - diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 82f4f6348412..67bd45b19284 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -1,8 +1,9 @@ package scala.tools.nsc package backend.jvm -import scala.collection.mutable.ListBuffer -import scala.reflect.internal.util.{NoPosition, Statistics} +import java.util.concurrent.ConcurrentHashMap + +import scala.reflect.internal.util.{NoPosition, Position, SourceFile, Statistics} import scala.reflect.io.AbstractFile import scala.tools.asm.ClassWriter import scala.tools.asm.tree.ClassNode @@ -18,7 +19,7 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P val bTypes: BTypes import bTypes._ - import frontendAccess.{backendReporting, compilerSettings, recordPerRunCache} + import frontendAccess.{backendReporting, compilerSettings, recordPerRunJavaMapCache} val backendUtils : BackendUtils { val postProcessor: self.type } = new { val postProcessor: self.type = self } with BackendUtils val byteCodeRepository : ByteCodeRepository { val postProcessor: self.type } = new { val postProcessor: self.type = self } with ByteCodeRepository @@ -29,66 +30,78 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P val callGraph : CallGraph { val postProcessor: self.type } = new { val postProcessor: self.type = self } with CallGraph val bTypesFromClassfile : BTypesFromClassfile { val postProcessor: self.type } = new { val postProcessor: self.type = self } with BTypesFromClassfile - // re-initialized per run because it reads compiler settings that might change - lazy val classfileWriter: LazyVar[ClassfileWriter] = - perRunLazy(this)(new ClassfileWriter(frontendAccess, statistics)) - - lazy val generatedClasses = recordPerRunCache(new ListBuffer[GeneratedClass]) + private val caseInsensitively = recordPerRunJavaMapCache(new ConcurrentHashMap[String, String]) override def initialize(): Unit = { super.initialize() backendUtils.initialize() - byteCodeRepository.initialize() inlinerHeuristics.initialize() + byteCodeRepository.initialize() } - def postProcessAndSendToDisk(classes: Traversable[GeneratedClass]): Unit = { - runGlobalOptimizations(classes) - - for (GeneratedClass(classNode, sourceFile, isArtifact) <- classes) { - val bytes = try { - if (!isArtifact) { - localOptimizations(classNode) - backendUtils.onIndyLambdaImplMethodIfPresent(classNode.name) { - methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) - } + def sendToDisk(unit:SourceUnit, clazz: GeneratedClass, writer: ClassfileWriter): Unit = { + val classNode = clazz.classNode + val internalName = classNode.name + val bytes = try { + if (!clazz.isArtifact) { + warnCaseInsensitiveOverwrite(clazz) + localOptimizations(classNode) + backendUtils.onIndyLambdaImplMethodIfPresent(internalName) { + methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) } - setInnerClasses(classNode) - serializeClass(classNode) - } catch { - case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") => - backendReporting.error(NoPosition, - s"Could not write class ${classNode.name} because it exceeds JVM code size limits. ${e.getMessage}") - null - case ex: Throwable => - ex.printStackTrace() - backendReporting.error(NoPosition, s"Error while emitting ${classNode.name}\n${ex.getMessage}") - null } - if (bytes != null) { - if (AsmUtils.traceSerializedClassEnabled && classNode.name.contains(AsmUtils.traceSerializedClassPattern)) - AsmUtils.traceClass(bytes) + setInnerClasses(classNode) + serializeClass(classNode) + } catch { + case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") => + backendReporting.error(NoPosition, + s"Could not write class ${internalName} because it exceeds JVM code size limits. ${e.getMessage}") + null + case ex: Throwable => + ex.printStackTrace() + backendReporting.error(NoPosition, s"Error while emitting ${internalName}\n${ex.getMessage}") + null + } - classfileWriter.get.write(classNode.name, bytes, sourceFile) - } + if (bytes != null) { + if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) + AsmUtils.traceClass(bytes) + + writer.write(unit, internalName, bytes) + } + } + private def warnCaseInsensitiveOverwrite(clazz: GeneratedClass): Unit = { + val name = clazz.classNode.name + val lowercaseJavaClassName = name.toLowerCase + val sourceClassName = clazz.sourceClassName + + val duplicate = caseInsensitively.putIfAbsent(lowercaseJavaClassName, sourceClassName) + if (duplicate != null) { + backendReporting.warning( + clazz.position, + s"Class ${sourceClassName} differs only in case from ${duplicate}. " + + "Such classes will overwrite one another on case-insensitive filesystems." + ) } } - def runGlobalOptimizations(classes: Traversable[GeneratedClass]): Unit = { + def runGlobalOptimizations(generatedUnits: Traversable[GeneratedCompilationUnit]): Unit = { // add classes to the bytecode repo before building the call graph: the latter needs to // look up classes and methods in the code repo. - if (compilerSettings.optAddToBytecodeRepository) for (c <- classes) { - byteCodeRepository.add(c.classNode, Some(c.sourceFile.canonicalPath)) - } - if (compilerSettings.optBuildCallGraph) for (c <- classes if !c.isArtifact) { - // skip call graph for mirror / bean: we don't inline into them, and they are not referenced from other classes - callGraph.addClass(c.classNode) + if (compilerSettings.optAddToBytecodeRepository) { + for (u <- generatedUnits; c <- u.classes) { + byteCodeRepository.add(c.classNode, Some(u.sourceFile.canonicalPath)) + } + if (compilerSettings.optBuildCallGraph) for (u <- generatedUnits; c <- u.classes if !c.isArtifact) { + // skip call graph for mirror / bean: we don't inline into them, and they are not referenced from other classes + callGraph.addClass(c.classNode) + } + if (compilerSettings.optInlinerEnabled) + inliner.runInliner() + if (compilerSettings.optClosureInvocations) + closureOptimizer.rewriteClosureApplyInvocations() } - if (compilerSettings.optInlinerEnabled) - inliner.runInliner() - if (compilerSettings.optClosureInvocations) - closureOptimizer.rewriteClosureApplyInvocations() } def localOptimizations(classNode: ClassNode): Unit = { @@ -108,7 +121,7 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P /** * An asm ClassWriter that uses ClassBType.jvmWiseLUB to compute the common superclass of class - * types. This operation is used for computing statck map frames. + * types. This operation is used for computing stack map frames. */ final class ClassWriterWithBTypeLub(flags: Int) extends ClassWriter(flags) { /** @@ -132,4 +145,5 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P /** * The result of code generation. [[isArtifact]] is `true` for mirror and bean-info classes. */ -case class GeneratedClass(classNode: ClassNode, sourceFile: AbstractFile, isArtifact: Boolean) +case class GeneratedClass(classNode: ClassNode, sourceClassName: String, position: Position, sourceFile: SourceFile, isArtifact: Boolean) +case class GeneratedCompilationUnit(sourceFile: AbstractFile, classes: List[GeneratedClass]) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 077c18630b36..33e82a683bb2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -21,7 +21,9 @@ sealed abstract class PostProcessorFrontendAccess { def compilerSettings: CompilerSettings + def withLocalReporter[T](reporter: BackendReporting)(fn: => T): T def backendReporting: BackendReporting + def directBackendReporting: BackendReporting def backendClassPath: BackendClassPath @@ -42,13 +44,7 @@ object PostProcessorFrontendAccess { def target: String - def genAsmpDirectory: Option[String] - def dumpClassesDirectory: Option[String] - - def singleOutputDirectory: Option[AbstractFile] - def outputDirectoryFor(src: AbstractFile): AbstractFile - - def mainClass: Option[String] + def outputDirectories : Settings#OutputDirs def optAddToBytecodeRepository: Boolean def optBuildCallGraph: Boolean @@ -80,9 +76,11 @@ object PostProcessorFrontendAccess { def optTrace: Option[String] } - sealed trait BackendReporting { + trait BackendReporting { def inlinerWarning(pos: Position, message: String): Unit def error(pos: Position, message: String): Unit + def warning(pos: Position, message: String): Unit + def inform(message: String): Unit def log(message: String): Unit } @@ -104,14 +102,7 @@ object PostProcessorFrontendAccess { val debug: Boolean = s.debug val target: String = s.target.value - - val genAsmpDirectory: Option[String] = s.Ygenasmp.valueSetByUser - val dumpClassesDirectory: Option[String] = s.Ydumpclasses.valueSetByUser - - val singleOutputDirectory: Option[AbstractFile] = s.outputDirs.getSingleOutput - def outputDirectoryFor(src: AbstractFile): AbstractFile = frontendSynch(s.outputDirs.outputDirFor(src)) - - val mainClass: Option[String] = s.mainClass.valueSetByUser + val outputDirectories = s.outputDirs val optAddToBytecodeRepository: Boolean = s.optAddToBytecodeRepository val optBuildCallGraph: Boolean = s.optBuildCallGraph @@ -146,24 +137,50 @@ object PostProcessorFrontendAccess { val optTrace: Option[String] = s.YoptTrace.valueSetByUser } - object backendReporting extends BackendReporting { + private lazy val localReporter = perRunLazy(this)(new ThreadLocal[BackendReporting]) + + override def withLocalReporter[T](reporter: BackendReporting)(fn: => T): T = { + val threadLocal = localReporter.get + val old = threadLocal.get() + threadLocal.set(reporter) + try fn finally + if (old eq null) threadLocal.remove() else threadLocal.set(old) + } + + override def backendReporting: BackendReporting = { + val local = localReporter.get.get() + if (local eq null) directBackendReporting else local + } + + object directBackendReporting extends BackendReporting { def inlinerWarning(pos: Position, message: String): Unit = frontendSynch { currentRun.reporting.inlinerWarning(pos, message) } - def error(pos: Position, message: String): Unit = frontendSynch(reporter.error(pos, message)) - def log(message: String): Unit = frontendSynch(global.log(message)) + def error(pos: Position, message: String): Unit = frontendSynch { + reporter.error(pos, message) + } + def warning(pos: Position, message: String): Unit = frontendSynch { + global.warning(pos, message) + } + def inform(message: String): Unit = frontendSynch { + global.inform(message) + } + def log(message: String): Unit = frontendSynch { + global.log(message) + } } + private lazy val cp = perRunLazy(this)(frontendSynch(optimizerClassPath(classPath))) object backendClassPath extends BackendClassPath { - def findClassFile(className: String): Option[AbstractFile] = frontendSynch(optimizerClassPath(classPath).findClassFile(className)) + def findClassFile(className: String): Option[AbstractFile] = cp.get.findClassFile(className) } def getEntryPoints: List[String] = frontendSynch(cleanup.getEntryPoints) def javaDefinedClasses: Set[InternalName] = frontendSynch { - currentRun.symSource.collect({ - case (sym, _) if sym.isJavaDefined => sym.javaBinaryNameString - }).toSet + currentRun.symSource.keys.collect{ + case sym if sym.isJavaDefined => sym.javaBinaryNameString + }(scala.collection.breakOut) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala b/src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala new file mode 100644 index 000000000000..97409b080ecb --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala @@ -0,0 +1,20 @@ +package scala.tools.nsc.backend.jvm + +import java.util.concurrent.ThreadFactory +import java.util.concurrent.atomic.AtomicInteger + +class CommonThreadFactory(namePrefix:String, + threadGroup: ThreadGroup = Thread.currentThread().getThreadGroup, + daemon:Boolean = true, + priority:Int = Thread.NORM_PRIORITY) extends ThreadFactory { + private val group: ThreadGroup = Thread.currentThread().getThreadGroup + private val threadNumber: AtomicInteger = new AtomicInteger(1) + + + override def newThread(r: Runnable): Thread = { + val t: Thread = new Thread(group, r, namePrefix + threadNumber.getAndIncrement, 0) + if (t.isDaemon != daemon) t.setDaemon(daemon) + if (t.getPriority != priority) t.setPriority(priority) + t + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 8e33ddd56b78..d4d49b0ca0cf 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -5,8 +5,8 @@ package analysis import java.lang.invoke.LambdaMetafactory import scala.annotation.{switch, tailrec} -import scala.collection.JavaConverters._ import scala.collection.mutable +import scala.collection.JavaConverters._ import java.util.concurrent.ConcurrentHashMap import scala.tools.asm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 08b03343647e..1ac470883917 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -22,7 +22,7 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ * The ByteCodeRepository provides utilities to read the bytecode of classfiles from the compilation * classpath. Parsed classes are cached in the `classes` map. */ -abstract class ByteCodeRepository { +abstract class ByteCodeRepository extends PerRunInit { val postProcessor: PostProcessor import postProcessor.{bTypes, bTypesFromClassfile} @@ -54,12 +54,7 @@ abstract class ByteCodeRepository { * Contains the internal names of all classes that are defined in Java source files of the current * compilation run (mixed compilation). Used for more detailed error reporting. */ - val javaDefinedClasses: mutable.Set[InternalName] = recordPerRunCache(mutable.Set.empty) - - - def initialize(): Unit = { - javaDefinedClasses ++= frontendAccess.javaDefinedClasses - } + private lazy val javaDefinedClasses = perRunLazy(this)(frontendAccess.javaDefinedClasses) def add(classNode: ClassNode, sourceFilePath: Option[String]) = sourceFilePath match { case Some(path) if path != "" => compilingClasses(classNode.name) = (classNode, path) @@ -273,7 +268,7 @@ abstract class ByteCodeRepository { classNode } match { case Some(node) => Right(node) - case None => Left(ClassNotFound(internalName, javaDefinedClasses(internalName))) + case None => Left(ClassNotFound(internalName, javaDefinedClasses.get(internalName))) } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index a19495fcf1a5..e0c7ae4f3231 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -57,6 +57,7 @@ abstract class CallGraph { * optimizer: finding callsites to re-write requires running a producers-consumers analysis on * the method. Here the closure instantiations are already grouped by method. */ + //currently single threaded access only val closureInstantiations: mutable.Map[MethodNode, Map[InvokeDynamicInsnNode, ClosureInstantiation]] = recordPerRunCache(concurrent.TrieMap.empty withDefaultValue Map.empty) /** @@ -70,7 +71,9 @@ abstract class CallGraph { * Instructions are added during code generation (BCodeBodyBuilder). The maps are then queried * when building the CallGraph, every Callsite object has an annotated(No)Inline field. */ + //currently single threaded access only val inlineAnnotatedCallsites: mutable.Set[MethodInsnNode] = recordPerRunCache(mutable.Set.empty) + //currently single threaded access only val noInlineAnnotatedCallsites: mutable.Set[MethodInsnNode] = recordPerRunCache(mutable.Set.empty) def removeCallsite(invocation: MethodInsnNode, methodNode: MethodNode): Option[Callsite] = { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 7adcb7351ea1..6bf6f48c13ce 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -138,8 +138,6 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class LocalOpt { val postProcessor: PostProcessor - import postProcessor.bTypes.frontendAccess.recordPerRunCache - import postProcessor._ import bTypes._ import bTypesFromClassfile._ diff --git a/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala similarity index 64% rename from src/compiler/scala/tools/nsc/profile/AsyncHelper.scala rename to src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala index 2258d1fe43e4..33d8cefde10b 100644 --- a/src/compiler/scala/tools/nsc/profile/AsyncHelper.scala +++ b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala @@ -1,44 +1,52 @@ package scala.tools.nsc.profile -import java.util.Collections import java.util.concurrent.ThreadPoolExecutor.AbortPolicy import java.util.concurrent._ -import java.util.concurrent.atomic.{AtomicInteger, AtomicLong} +import java.util.concurrent.atomic.AtomicInteger import scala.tools.nsc.{Global, Phase} -sealed trait AsyncHelper { - - def newUnboundedQueueFixedThreadPool - (nThreads: Int, - shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor - def newBoundedQueueFixedThreadPool - (nThreads: Int, maxQueueSize: Int, rejectHandler: RejectedExecutionHandler, - shortId: String, priority : Int = Thread.NORM_PRIORITY) : ThreadPoolExecutor - +sealed trait ThreadPoolFactory { + def newUnboundedQueueFixedThreadPool( + nThreads: Int, + shortId: String, + priority: Int = Thread.NORM_PRIORITY): ThreadPoolExecutor + + def newBoundedQueueFixedThreadPool( + nThreads: Int, + maxQueueSize: Int, + rejectHandler: RejectedExecutionHandler, + shortId: String, + priority: Int = Thread.NORM_PRIORITY): ThreadPoolExecutor } -object AsyncHelper { - def apply(global: Global, phase: Phase): AsyncHelper = global.currentRun.profiler match { - case NoOpProfiler => new BasicAsyncHelper(global, phase) - case r: RealProfiler => new ProfilingAsyncHelper(global, phase, r) +object ThreadPoolFactory { + def apply(global: Global, phase: Phase): ThreadPoolFactory = global.currentRun.profiler match { + case NoOpProfiler => new BasicThreadPoolFactory(phase) + case r: RealProfiler => new ProfilingThreadPoolFactory(phase, r) } - private abstract class BaseAsyncHelper(global: Global, phase: Phase) extends AsyncHelper { + private abstract class BaseThreadPoolFactory(phase: Phase) extends ThreadPoolFactory { val baseGroup = new ThreadGroup(s"scalac-${phase.name}") + private def childGroup(name: String) = new ThreadGroup(baseGroup, name) - protected def wrapRunnable(r: Runnable, shortId:String): Runnable + // Invoked when a new `Worker` is created, see `CommonThreadFactory.newThread` + protected def wrapWorker(worker: Runnable, shortId: String): Runnable = worker - protected class CommonThreadFactory(shortId: String, - daemon: Boolean = true, - priority: Int) extends ThreadFactory { + protected final class CommonThreadFactory( + shortId: String, + daemon: Boolean = true, + priority: Int) extends ThreadFactory { private val group: ThreadGroup = childGroup(shortId) private val threadNumber: AtomicInteger = new AtomicInteger(1) private val namePrefix = s"${baseGroup.getName}-$shortId-" - override def newThread(r: Runnable): Thread = { - val wrapped = wrapRunnable(r, shortId) + // Invoked by the `ThreadPoolExecutor` when creating a new worker thread. The argument + // runnable is the `Worker` (which extends `Runnable`). Its `run` method gets tasks from + // the thread pool and executes them (on the thread created here). + override def newThread(worker: Runnable): Thread = { + val wrapped = wrapWorker(worker, shortId) val t: Thread = new Thread(group, wrapped, namePrefix + threadNumber.getAndIncrement, 0) if (t.isDaemon != daemon) t.setDaemon(daemon) if (t.getPriority != priority) t.setPriority(priority) @@ -47,8 +55,7 @@ object AsyncHelper { } } - private final class BasicAsyncHelper(global: Global, phase: Phase) extends BaseAsyncHelper(global, phase) { - + private final class BasicThreadPoolFactory(phase: Phase) extends BaseThreadPoolFactory(phase) { override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { val threadFactory = new CommonThreadFactory(shortId, priority = priority) //like Executors.newFixedThreadPool @@ -60,12 +67,9 @@ object AsyncHelper { //like Executors.newFixedThreadPool new ThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) } - - override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = r } - private class ProfilingAsyncHelper(global: Global, phase: Phase, private val profiler: RealProfiler) extends BaseAsyncHelper(global, phase) { - + private class ProfilingThreadPoolFactory(phase: Phase, profiler: RealProfiler) extends BaseThreadPoolFactory(phase) { override def newUnboundedQueueFixedThreadPool(nThreads: Int, shortId: String, priority: Int): ThreadPoolExecutor = { val threadFactory = new CommonThreadFactory(shortId, priority = priority) //like Executors.newFixedThreadPool @@ -78,12 +82,12 @@ object AsyncHelper { new SinglePhaseInstrumentedThreadPoolExecutor(nThreads, nThreads, 0L, TimeUnit.MILLISECONDS, new ArrayBlockingQueue[Runnable](maxQueueSize), threadFactory, rejectHandler) } - override protected def wrapRunnable(r: Runnable, shortId:String): Runnable = () => { + override protected def wrapWorker(worker: Runnable, shortId: String): Runnable = () => { val data = new ThreadProfileData localData.set(data) val profileStart = profiler.snapThread(0) - try r.run finally { + try worker.run finally { val snap = profiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) @@ -106,10 +110,10 @@ object AsyncHelper { val localData = new ThreadLocal[ThreadProfileData] - private class SinglePhaseInstrumentedThreadPoolExecutor - ( corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, - workQueue: BlockingQueue[Runnable], threadFactory: ThreadFactory, handler: RejectedExecutionHandler - ) extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler) { + private class SinglePhaseInstrumentedThreadPoolExecutor( + corePoolSize: Int, maximumPoolSize: Int, keepAliveTime: Long, unit: TimeUnit, + workQueue: BlockingQueue[Runnable], threadFactory: ThreadFactory, handler: RejectedExecutionHandler) + extends ThreadPoolExecutor(corePoolSize, maximumPoolSize, keepAliveTime, unit, workQueue, threadFactory, handler) { override def beforeExecute(t: Thread, r: Runnable): Unit = { val data = localData.get @@ -133,7 +137,6 @@ object AsyncHelper { super.afterExecute(r, t) } - } } } \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index fe29ae0406eb..0ed784d3d13a 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -9,8 +9,9 @@ package tools package nsc package settings -import scala.language.existentials +import java.util.zip.Deflater +import scala.language.existentials import scala.annotation.elidable import scala.tools.util.PathResolver.Defaults import scala.collection.mutable @@ -225,6 +226,11 @@ trait ScalaSettings extends AbsScalaSettings val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") + val YaddBackendThreads = IntSetting ("-Ybackend-parallelism", "maximum worker threads for backend", 1, Some((1,16)), (x: String) => None ) + val YmaxQueue = IntSetting ("-Ybackend-worker-queue", "backend threads worker queue size", 0, Some((0,1000)), (x: String) => None ) + val YjarCompressionLevel = IntSetting("-Yjar-compression-level", "compression level to use when writing jar files", + Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (x: String) => None) + object optChoices extends MultiChoiceEnumeration { val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers guarding no instructions, redundant metadata (debug information, line numbers).") val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") @@ -382,7 +388,7 @@ trait ScalaSettings extends AbsScalaSettings withPostSetHook( _ => YprofileEnabled.value = true ) val YprofileExternalTool = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase", "typer"). withPostSetHook( _ => YprofileEnabled.value = true ) - val YprofileRunGcBetweenPhases = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or *", "_"). + val YprofileRunGcBetweenPhases = PhasesSetting("-Yprofile-run-gc", "Run a GC between phases - this allows heap size to be accurate at the expense of more time. Specify a list of phases, or all", "_"). withPostSetHook( _ => YprofileEnabled.value = true ) diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala index 3f9e15ec4bdf..880d3c8e9128 100644 --- a/test/files/run/t5717.scala +++ b/test/files/run/t5717.scala @@ -19,9 +19,9 @@ object Test extends StoreReporterDirectTest { compileCode("package a { class B }") val List(i) = filteredInfos // for some reason, nio doesn't throw the same exception on windows and linux/mac - val expected = - if (util.Properties.isWin) "error writing a/B: java.nio.file.FileAlreadyExistsException \\a" - else "error writing a/B: java.nio.file.FileSystemException /a/B.class: Not a directory" + val path = if(util.Properties.isWin)"\\a" else "/a" + val expected = "error writing a/B: Can't create directory " + path + + "; there is an existing (non-directory) file in its path" val actual = i.msg.replace(testOutput.path, "") assert(actual == expected, actual) } From c8e6887bcbe6d352980b3d44d048b07716261e1f Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 7 Feb 2018 15:19:07 +0100 Subject: [PATCH 1218/2793] Review and cleanup of the code Move classfileWriter to PostProcessor Remove UnitInfoLookup, Separate concerns in UnitResult Remove ExecutorServiceInfo Set entry sizes in uncompressed jars Refer to stats from frontendAccess, warn on stats with multi-threaded writer Separate BufferedReporting Remove unused classes Add some documentation --- src/compiler/scala/tools/nsc/Global.scala | 14 - .../scala/tools/nsc/backend/jvm/BTypes.scala | 4 +- .../nsc/backend/jvm/ClassfileWriter.scala | 264 ----------------- .../nsc/backend/jvm/ClassfileWriters.scala | 275 ++++++++++++++++++ .../scala/tools/nsc/backend/jvm/CodeGen.scala | 7 +- .../tools/nsc/backend/jvm/GenBCode.scala | 49 +++- .../backend/jvm/GeneratedClassHandler.scala | 231 +++++---------- .../tools/nsc/backend/jvm/PostProcessor.scala | 19 +- .../jvm/PostProcessorFrontendAccess.scala | 84 +++++- .../nsc/backend/jvm/ThreadFactories.scala | 20 -- .../tools/nsc/settings/MutableSettings.scala | 17 +- 11 files changed, 492 insertions(+), 492 deletions(-) delete mode 100644 src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala delete mode 100644 src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 748737d4ca38..6e571a7348c0 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1653,20 +1653,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) } } - def getFile(source: AbstractFile, segments: Array[String], suffix: String): File = { - val outDir = Path( - settings.outputDirs.outputDirFor(source).path match { - case "" => "." - case path => path - } - ) - val dir = segments.init.foldLeft(outDir)(_ / _).createDirectory() - new File(dir.path, segments.last + suffix) - } - - /** Returns the file with the given suffix for the given class. Used for icode writing. */ - def getFile(clazz: Symbol, suffix: String): File = getFile(clazz.sourceFile, clazz.fullName split '.', suffix) - def createJavadoc = false } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 5f870ba5c448..a1e7f18006fc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1025,7 +1025,7 @@ abstract class BTypes { /** * Create state that lazily evaluated (to work around / not worry about initialization ordering - * issues). The state is re-initialized in each compiler run when the component is initialized. + * issues). The state is cleared in each compiler run when the component is initialized. */ def perRunLazy[T](component: PerRunInit)(init: => T): LazyVar[T] = { val r = new LazyVar(() => init) @@ -1039,7 +1039,7 @@ abstract class BTypes { * be safely initialized in the post-processor. * * Note that values defined as `LazyVar`s are usually `lazy val`s themselves (created through the - * `perRunLazy` method). This ensures that re-initializing a component only re-initializes those + * `perRunLazy` method). This ensures that re-initializing a component only clears those * `LazyVar`s that have actually been used in the previous compiler run. */ class LazyVar[T](init: () => T) { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala deleted file mode 100644 index 8ead9856f512..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriter.scala +++ /dev/null @@ -1,264 +0,0 @@ -package scala.tools.nsc.backend.jvm - -import java.io.{BufferedOutputStream, DataOutputStream, FileOutputStream, IOException} -import java.nio.ByteBuffer -import java.nio.channels.FileChannel -import java.nio.charset.StandardCharsets -import java.nio.file.attribute.FileAttribute -import java.nio.file.{FileAlreadyExistsException, Files, Path, Paths, StandardOpenOption} -import java.util -import java.util.concurrent.ConcurrentHashMap -import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} - -import scala.reflect.internal.util.{NoPosition, Statistics} -import scala.tools.nsc.Global -import scala.tools.nsc.backend.jvm.BTypes.InternalName -import scala.tools.nsc.io.AbstractFile - -/** - * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the - * directory and files that are created, and eventually calls `close` when the writing is complete. - * - * The companion object is responsible for constructing a appropriate and optimal implementation for - * the supplied settings. - * - * Operations are threadsafe. - */ -sealed trait ClassfileWriter { - /** - * Write a classfile - */ - def write(unit: SourceUnit, name: InternalName, bytes: Array[Byte]) - - /** - * Close the writer. Behavior is undefined after a call to `close`. - */ - def close() : Unit -} - -object ClassfileWriter { - private def getDirectory(dir: String): Path = Paths.get(dir) - - def apply(global: Global): ClassfileWriter = { - //Note dont import global._ - its too easy to leak non threadsafe structures - import global.{cleanup, genBCode, log, settings, statistics} - def jarManifestMainClass: Option[String] = settings.mainClass.valueSetByUser.orElse { - cleanup.getEntryPoints match { - case List(name) => Some(name) - case es => - if (es.isEmpty) log("No Main-Class designated or discovered.") - else log(s"No Main-Class due to multiple entry points:\n ${es.mkString("\n ")}") - None - } - } - - def singleWriter(file: AbstractFile): UnderlyingClassfileWriter = { - if (file hasExtension "jar") { - new JarClassWriter(file, jarManifestMainClass, settings.YjarCompressionLevel.value) - } else if (file.isVirtual) { - new VirtualClassWriter() - } else if (file.isDirectory) { - new DirClassWriter(genBCode.postProcessorFrontendAccess) - } else { - throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") - } - } - - val basicClassWriter = settings.outputDirs.getSingleOutput match { - case Some(dest) => singleWriter(dest) - case None => - val distinctOutputs: Set[AbstractFile] = settings.outputDirs.outputs.map(_._2)(scala.collection.breakOut) - if (distinctOutputs.size == 1) singleWriter(distinctOutputs.head) - else new MultiClassWriter(distinctOutputs.map { output: AbstractFile => output -> singleWriter(output) }(scala.collection.breakOut)) - } - - val withAdditionalFormats = if (settings.Ygenasmp.valueSetByUser.isEmpty && settings.Ydumpclasses.valueSetByUser.isEmpty) basicClassWriter else { - val asmp = settings.Ygenasmp.valueSetByUser map { dir: String => new AsmClassWriter(getDirectory(dir), genBCode.postProcessorFrontendAccess) } - val dump = settings.Ydumpclasses.valueSetByUser map { dir: String => new DumpClassWriter(getDirectory(dir), genBCode.postProcessorFrontendAccess) } - new AllClassWriter(basicClassWriter, asmp, dump) - } - - if (statistics.enabled) new WithStatsWriter(statistics, withAdditionalFormats) else withAdditionalFormats - } - - /** - * a trait to specify the Classfilewriters that actually write, rather than layer functionality - */ - sealed trait UnderlyingClassfileWriter extends ClassfileWriter - - private final class JarClassWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends UnderlyingClassfileWriter { - //keep these imports local - avoid confusion with scala naming - import java.util.jar.Attributes.Name - import java.util.jar.{JarOutputStream, Manifest} - val storeOnly = compressionLevel == Deflater.NO_COMPRESSION - - val jarWriter: JarOutputStream = { - val manifest = new Manifest() - mainClass foreach { c => manifest.getMainAttributes.put(Name.MAIN_CLASS, c) } - val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) - jar.setLevel(compressionLevel) - if (storeOnly) jar.setMethod(ZipOutputStream.STORED) - jar - } - - lazy val crc = new CRC32 - - override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = this.synchronized { - val path = className + ".class" - val entry = new ZipEntry(path) - if (storeOnly) { - crc.reset() - crc.update(bytes) - entry.setCrc(crc.getValue) - } - jarWriter.putNextEntry(entry) - try jarWriter.write(bytes, 0, bytes.length) - finally jarWriter.flush() - } - - override def close(): Unit = this.synchronized(jarWriter.close()) - } - - private sealed class DirClassWriter(frontendAccess: PostProcessorFrontendAccess) extends UnderlyingClassfileWriter { - val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() - val noAttributes = Array.empty[FileAttribute[_]] - - def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { - import java.lang.Boolean.TRUE - val parent = filePath.getParent - if (!builtPaths.containsKey(parent)) { - try Files.createDirectories(parent, noAttributes: _*) - catch { - case e: FileAlreadyExistsException => - throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) - } - builtPaths.put(baseDir, TRUE) - var current = parent - while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { - current = current.getParent - } - } - } - - protected def getPath(unit: SourceUnit, className: InternalName) = unit.outputPath.resolve(className + ".class") - - protected def formatData(rawBytes: Array[Byte]) = rawBytes - - protected def qualifier: String = "" - - // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive - // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call - // even if the file is new. - // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails - - private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) - private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - - override def write(unit: SourceUnit, className: InternalName, rawBytes: Array[Byte]): Unit = try { - val path = getPath(unit, className) - val bytes = formatData(rawBytes) - ensureDirForPath(unit.outputPath, path) - val os = try FileChannel.open(path, fastOpenOptions) - catch { - case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) - } - - os.write(ByteBuffer.wrap(bytes), 0L) - os.close() - } catch { - case e: FileConflictException => - frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getMessage}") - case e: java.nio.file.FileSystemException => - if (frontendAccess.compilerSettings.debug) - e.printStackTrace() - frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getClass.getName} ${e.getMessage}") - - } - override def close(): Unit = () - } - - private final class AsmClassWriter( - asmOutputPath: Path, - frontendAccess: PostProcessorFrontendAccess) - extends DirClassWriter(frontendAccess) { - override protected def getPath(unit: SourceUnit, className: InternalName) = asmOutputPath.resolve(className + ".asmp") - - override protected def formatData(rawBytes: Array[Byte]) = AsmUtils.textify(AsmUtils.readClass(rawBytes)).getBytes(StandardCharsets.UTF_8) - - override protected def qualifier: String = " [for asmp]" - } - - private final class DumpClassWriter( - dumpOutputPath: Path, - frontendAccess: PostProcessorFrontendAccess) - extends DirClassWriter(frontendAccess) { - override protected def getPath(unit: SourceUnit, className: InternalName) = dumpOutputPath.resolve(className + ".class") - - override protected def qualifier: String = " [for dump]" - } - - private final class VirtualClassWriter() extends UnderlyingClassfileWriter { - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - def ensureDirectory(dir: AbstractFile): AbstractFile = - if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory") - - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part - ensureDirectory(dir) fileNamed pathParts.last + suffix - } - - private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { - val out = new DataOutputStream(outFile.bufferedOutput) - try out.write(bytes, 0, bytes.length) - finally out.close() - } - - override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { - val outFile = getFile(unit.outputDir, className, ".class") - writeBytes(outFile, bytes) - } - - override def close(): Unit = () - } - - private final class MultiClassWriter(underlying: Map[AbstractFile, UnderlyingClassfileWriter]) extends ClassfileWriter { - private def getUnderlying(unit: SourceUnit) = underlying.getOrElse(unit.outputDir, { - throw new Exception(s"Cannot determine output directory for ${unit.sourceFile} with output ${unit.outputDir}. Configured outputs are ${underlying.keySet}") - }) - - override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { - getUnderlying(unit).write(unit, className, bytes) - } - - override def close(): Unit = underlying.values.foreach(_.close()) - } - - private final class AllClassWriter(basic: ClassfileWriter, asmp: Option[UnderlyingClassfileWriter], dump: Option[UnderlyingClassfileWriter]) extends ClassfileWriter { - override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { - basic.write(unit, className, bytes) - asmp.foreach(_.write(unit, className, bytes)) - dump.foreach(_.write(unit, className, bytes)) - } - - override def close(): Unit = { - basic.close() - asmp.foreach(_.close()) - dump.foreach(_.close()) - } - } - - private final class WithStatsWriter(statistics: Statistics with BackendStats, underlying: ClassfileWriter) extends ClassfileWriter { - override def write(unit: SourceUnit, className: InternalName, bytes: Array[Byte]): Unit = { - val snap = statistics.startTimer(statistics.bcodeWriteTimer) - underlying.write(unit, className, bytes) - statistics.stopTimer(statistics.bcodeWriteTimer, snap) - } - - override def close(): Unit = underlying.close() - } -} - -/** Can't output a file due to the state of the file system. */ -class FileConflictException(msg: String, cause:Throwable = null) extends IOException(msg, cause) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala new file mode 100644 index 000000000000..125a343de704 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -0,0 +1,275 @@ +package scala.tools.nsc.backend.jvm + +import java.io.{BufferedOutputStream, DataOutputStream, FileOutputStream, IOException} +import java.nio.ByteBuffer +import java.nio.channels.FileChannel +import java.nio.charset.StandardCharsets +import java.nio.file._ +import java.nio.file.attribute.FileAttribute +import java.util +import java.util.concurrent.ConcurrentHashMap +import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} + +import scala.reflect.internal.util.{NoPosition, Statistics} +import scala.tools.nsc.Global +import scala.tools.nsc.backend.jvm.BTypes.InternalName +import scala.tools.nsc.io.AbstractFile + +abstract class ClassfileWriters { + val postProcessor: PostProcessor + import postProcessor.bTypes.frontendAccess + + /** + * The interface to writing classfiles. GeneratedClassHandler calls these methods to generate the + * directory and files that are created, and eventually calls `close` when the writing is complete. + * + * The companion object is responsible for constructing a appropriate and optimal implementation for + * the supplied settings. + * + * Operations are threadsafe. + */ + sealed trait ClassfileWriter { + /** + * Write a classfile + */ + def write(name: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths) + + /** + * Close the writer. Behavior is undefined after a call to `close`. + */ + def close(): Unit + } + + object ClassfileWriter { + private def getDirectory(dir: String): Path = Paths.get(dir) + + def apply(global: Global): ClassfileWriter = { + //Note dont import global._ - its too easy to leak non threadsafe structures + import global.{cleanup, log, settings, statistics} + def jarManifestMainClass: Option[String] = settings.mainClass.valueSetByUser.orElse { + cleanup.getEntryPoints match { + case List(name) => Some(name) + case es => + if (es.isEmpty) log("No Main-Class designated or discovered.") + else log(s"No Main-Class due to multiple entry points:\n ${es.mkString("\n ")}") + None + } + } + + def singleWriter(file: AbstractFile): UnderlyingClassfileWriter = { + if (file hasExtension "jar") { + new JarClassWriter(file, jarManifestMainClass, settings.YjarCompressionLevel.value) + } else if (file.isVirtual) { + new VirtualClassWriter() + } else if (file.isDirectory) { + new DirClassWriter() + } else { + throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") + } + } + + val basicClassWriter = settings.outputDirs.getSingleOutput match { + case Some(dest) => singleWriter(dest) + case None => + val distinctOutputs: Set[AbstractFile] = settings.outputDirs.outputs.map(_._2)(scala.collection.breakOut) + if (distinctOutputs.size == 1) singleWriter(distinctOutputs.head) + else new MultiClassWriter(distinctOutputs.map { output: AbstractFile => output -> singleWriter(output) }(scala.collection.breakOut)) + } + + val withAdditionalFormats = if (settings.Ygenasmp.valueSetByUser.isEmpty && settings.Ydumpclasses.valueSetByUser.isEmpty) basicClassWriter else { + val asmp = settings.Ygenasmp.valueSetByUser map { dir: String => new AsmClassWriter(getDirectory(dir)) } + val dump = settings.Ydumpclasses.valueSetByUser map { dir: String => new DumpClassWriter(getDirectory(dir)) } + new AllClassWriter(basicClassWriter, asmp, dump) + } + + val enableStats = statistics.enabled && settings.YaddBackendThreads.value == 1 + if (enableStats) new WithStatsWriter(statistics, withAdditionalFormats) else withAdditionalFormats + } + + /** + * A marker trait for Classfilewriters that actually write, rather than layer functionality + */ + sealed trait UnderlyingClassfileWriter extends ClassfileWriter + + private final class JarClassWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends UnderlyingClassfileWriter { + //keep these imports local - avoid confusion with scala naming + import java.util.jar.Attributes.Name + import java.util.jar.{JarOutputStream, Manifest} + + val storeOnly = compressionLevel == Deflater.NO_COMPRESSION + + val jarWriter: JarOutputStream = { + val manifest = new Manifest() + mainClass foreach { c => manifest.getMainAttributes.put(Name.MAIN_CLASS, c) } + val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) + jar.setLevel(compressionLevel) + if (storeOnly) jar.setMethod(ZipOutputStream.STORED) + jar + } + + lazy val crc = new CRC32 + + override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = this.synchronized { + val path = className + ".class" + val entry = new ZipEntry(path) + if (storeOnly) { + // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ + // uncompressed sizes to be written before the data. The JarOutputStream could compute the + // values while writing the data, but not patch them into the stream after the fact. So we + // need to pre-compute them here. The compressed size is taken from size. + // https://stackoverflow.com/questions/1206970/how-to-create-uncompressed-zip-archive-in-java/5868403 + // With compression method `DEFLATED` JarOutputStream computes and sets the values. + entry.setSize(bytes.length) + crc.reset() + crc.update(bytes) + entry.setCrc(crc.getValue) + } + jarWriter.putNextEntry(entry) + try jarWriter.write(bytes, 0, bytes.length) + finally jarWriter.flush() + } + + override def close(): Unit = this.synchronized(jarWriter.close()) + } + + private sealed class DirClassWriter extends UnderlyingClassfileWriter { + val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() + val noAttributes = Array.empty[FileAttribute[_]] + + def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { + import java.lang.Boolean.TRUE + val parent = filePath.getParent + if (!builtPaths.containsKey(parent)) { + try Files.createDirectories(parent, noAttributes: _*) + catch { + case e: FileAlreadyExistsException => + throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) + } + builtPaths.put(baseDir, TRUE) + var current = parent + while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { + current = current.getParent + } + } + } + + protected def getPath(className: InternalName, paths: CompilationUnitPaths) = paths.outputPath.resolve(className + ".class") + + protected def formatData(rawBytes: Array[Byte]) = rawBytes + + protected def qualifier: String = "" + + // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive + // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call + // even if the file is new. + // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails + + private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) + private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + + override def write(className: InternalName, rawBytes: Array[Byte], paths: CompilationUnitPaths): Unit = try { + val path = getPath(className, paths) + val bytes = formatData(rawBytes) + ensureDirForPath(paths.outputPath, path) + val os = try FileChannel.open(path, fastOpenOptions) + catch { + case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) + } + + os.write(ByteBuffer.wrap(bytes), 0L) + os.close() + } catch { + case e: FileConflictException => + frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getMessage}") + case e: java.nio.file.FileSystemException => + if (frontendAccess.compilerSettings.debug) + e.printStackTrace() + frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getClass.getName} ${e.getMessage}") + + } + + override def close(): Unit = () + } + + private final class AsmClassWriter(asmOutputPath: Path) extends DirClassWriter { + override protected def getPath(className: InternalName, paths: CompilationUnitPaths) = asmOutputPath.resolve(className + ".asmp") + + override protected def formatData(rawBytes: Array[Byte]) = AsmUtils.textify(AsmUtils.readClass(rawBytes)).getBytes(StandardCharsets.UTF_8) + + override protected def qualifier: String = " [for asmp]" + } + + private final class DumpClassWriter(dumpOutputPath: Path) extends DirClassWriter { + override protected def getPath(className: InternalName, paths: CompilationUnitPaths) = dumpOutputPath.resolve(className + ".class") + + override protected def qualifier: String = " [for dump]" + } + + private final class VirtualClassWriter extends UnderlyingClassfileWriter { + private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory") + + var dir = base + val pathParts = clsName.split("[./]").toList + for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part + ensureDirectory(dir) fileNamed pathParts.last + suffix + } + + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() + } + + override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { + val outFile = getFile(paths.outputDir, className, ".class") + writeBytes(outFile, bytes) + } + + override def close(): Unit = () + } + + private final class MultiClassWriter(underlying: Map[AbstractFile, UnderlyingClassfileWriter]) extends ClassfileWriter { + private def getUnderlying(paths: CompilationUnitPaths) = underlying.getOrElse(paths.outputDir, { + throw new Exception(s"Cannot determine output directory for ${paths.sourceFile} with output ${paths.outputDir}. Configured outputs are ${underlying.keySet}") + }) + + override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { + getUnderlying(paths).write(className, bytes, paths) + } + + override def close(): Unit = underlying.values.foreach(_.close()) + } + + private final class AllClassWriter(basic: ClassfileWriter, asmp: Option[UnderlyingClassfileWriter], dump: Option[UnderlyingClassfileWriter]) extends ClassfileWriter { + override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { + basic.write(className, bytes, paths) + asmp.foreach(_.write(className, bytes, paths)) + dump.foreach(_.write(className, bytes, paths)) + } + + override def close(): Unit = { + basic.close() + asmp.foreach(_.close()) + dump.foreach(_.close()) + } + } + + private final class WithStatsWriter(statistics: Statistics with Global#GlobalStats, underlying: ClassfileWriter) + extends ClassfileWriter { + override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { + val snap = statistics.startTimer(statistics.bcodeWriteTimer) + underlying.write(className, bytes, paths) + statistics.stopTimer(statistics.bcodeWriteTimer, snap) + } + + override def close(): Unit = underlying.close() + } + + } + + /** Can't output a file due to the state of the file system. */ + class FileConflictException(msg: String, cause: Throwable = null) extends IOException(msg, cause) +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 34a68869b6bf..0b01bbaab6aa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -23,25 +23,24 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { */ def genUnit(unit: CompilationUnit): Unit = { val generatedClasses = ListBuffer.empty[GeneratedClass] - val sourceFile = unit.source def genClassDef(cd: ClassDef): Unit = try { val sym = cd.symbol val position = sym.pos val fullSymbolName = sym.javaClassName val mainClassNode = genClass(cd, unit) - generatedClasses += GeneratedClass(mainClassNode, fullSymbolName, position, sourceFile, isArtifact = false) + generatedClasses += GeneratedClass(mainClassNode, fullSymbolName, position, isArtifact = false) if (bTypes.isTopLevelModuleClass(sym)) { if (sym.companionClass == NoSymbol) { val mirrorClassNode = genMirrorClass(sym, unit) - generatedClasses += GeneratedClass(mirrorClassNode, fullSymbolName, position, sourceFile, isArtifact = true) + generatedClasses += GeneratedClass(mirrorClassNode, fullSymbolName, position, isArtifact = true) } else log(s"No mirror class for module with linked class: ${sym.fullName}") } if (sym hasAnnotation coreBTypes.BeanInfoAttr) { val beanClassNode = genBeanInfoClass(cd, unit) - generatedClasses += GeneratedClass(beanClassNode, fullSymbolName, position, sourceFile, isArtifact = true) + generatedClasses += GeneratedClass(beanClassNode, fullSymbolName, position, isArtifact = true) } } catch { case ex: Throwable => diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 5e70220262c5..3d826901d807 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -9,6 +9,37 @@ package jvm import scala.tools.asm.Opcodes +/** + * Some notes about the backend's state and its initialization and release. + * + * State that is used in a single run is allocated through `recordPerRunCache`, for example + * `ByteCodeRepository.compilingClasses` or `CallGraph.callsites`. This state is cleared at the end + * of each run. + * + * Some state needs to be re-initialized per run, for example `CoreBTypes` (computed from Symbols / + * Types) or the `GeneratedClassHandler` (depends on the compiler settings). This state is + * (re-) initialized in the `GenBCode.initialize` method. There two categories: + * + * 1. State that is stored in a `var` field and (re-) assigned in the `initialize` method, for + * example the `GeneratedClassHandler` + * 2. State that uses the `PerRunInit` / `bTypes.perRunLazy` / `LazyVar` infrastructure, for + * example the types in `CoreBTypes` + * + * The reason to use the `LazyVar` infrastructure is to prevent eagerly computing all the state + * even if it's never used in a run. It can also be used to work around initialization ordering + * issues, just like ordinary lazy vals. For state that is known to be accessed, a `var` field is + * just fine. + * + * Typical `LazyVar` use: `lazy val state: LazyVar[T] = perRunLazy(component)(initializer)` + * - The `initializer` expression is executed lazily + * - When the initializer actually runs, it synchronizes on the + * `PostProcessorFrontendAccess.frontendLock` + * - The `component.initialize` method causes the `LazyVar` to be re-initialized on the next `get` + * - The `state` is itself a `lazy val` to make sure the `component.initialize` method only + * clears those `LazyVar`s that were ever accessed + * + * TODO: convert some uses of `LazyVar` to ordinary `var`. + */ abstract class GenBCode extends SubComponent { self => import global._ @@ -20,9 +51,8 @@ abstract class GenBCode extends SubComponent { val codeGen: CodeGen[global.type] = new { val bTypes: self.bTypes.type = self.bTypes } with CodeGen[global.type](global) - val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { val bTypes: self.bTypes.type = self.bTypes } with PostProcessor(statistics) + val postProcessor: PostProcessor { val bTypes: self.bTypes.type } = new { val bTypes: self.bTypes.type = self.bTypes } with PostProcessor - // re-initialized per run, as it depends on compiler settings that may change var generatedClassHandler: GeneratedClassHandler = _ val phaseName = "jvm" @@ -43,26 +73,27 @@ abstract class GenBCode extends SubComponent { super.run() // invokes `apply` for each compilation unit generatedClassHandler.complete() } finally { - // When writing to a jar, we need to close the jarWriter. - generatedClassHandler.close() + this.close() } } } - /** - * Several backend components have state that needs to be initialized in each run, because - * it depends on frontend data that may change between runs: Symbols, Types, Settings. - */ + /** See comment in [[GenBCode]] */ private def initialize(): Unit = { val initStart = statistics.startTimer(bcodeInitTimer) scalaPrimitives.init() bTypes.initialize() codeGen.initialize() postProcessorFrontendAccess.initialize() - postProcessor.initialize() + postProcessor.initialize(global) generatedClassHandler = GeneratedClassHandler(global) statistics.stopTimer(statistics.bcodeInitTimer, initStart) } + + private def close(): Unit = { + postProcessor.classfileWriter.close() + generatedClassHandler.close() + } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index 72b24c526e04..1b4e9483541c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -1,20 +1,21 @@ package scala.tools.nsc package backend.jvm +import java.nio.file.Path import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy import java.util.concurrent._ import scala.collection.mutable.ListBuffer import scala.concurrent.duration.Duration -import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future, Promise} -import scala.reflect.internal.util.{NoPosition, Position, SourceFile} -import scala.tools.nsc.backend.jvm.PostProcessorFrontendAccess.BackendReporting +import scala.concurrent.{Await, ExecutionContext, ExecutionContextExecutor, Future} +import scala.reflect.internal.util.NoPosition +import scala.tools.nsc.backend.jvm.PostProcessorFrontendAccess.BufferingBackendReporting import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.profile.ThreadPoolFactory import scala.util.control.NonFatal /** - * Interface to handle post-processing (see [[PostProcessor]]) and classfile writing of generated + * Interface to handle post-processing and classfile writing (see [[PostProcessor]]) of generated * classes, potentially in parallel. */ private[jvm] sealed trait GeneratedClassHandler { @@ -33,7 +34,7 @@ private[jvm] sealed trait GeneratedClassHandler { /** * Invoked at the end of the jvm phase */ - def close(): Unit + def close(): Unit = () } private[jvm] object GeneratedClassHandler { @@ -41,29 +42,22 @@ private[jvm] object GeneratedClassHandler { import global._ import genBCode.postProcessor - val cfWriter = ClassfileWriter(global) - - val unitInfoLookup = settings.outputDirs.getSingleOutput match { - case Some(dir) => new SingleUnitInfo(postProcessor.bTypes.frontendAccess, dir) - case None => new LookupUnitInfo(postProcessor.bTypes.frontendAccess) - } val handler = settings.YaddBackendThreads.value match { case 1 => - new SyncWritingClassHandler(unitInfoLookup, postProcessor, cfWriter) + new SyncWritingClassHandler(postProcessor) case maxThreads => if (global.statistics.enabled) global.reporter.warning(global.NoPosition, "jvm statistics are not reliable with multi-threaded jvm class writing") - val additionalThreads = maxThreads -1 - // the queue size is taken to be large enough to ensure that the a 'CallerRun' will not take longer to - // run that it takes to exhaust the queue for the backend workers - // when the queue is full, the main thread will no some background work - // so this provides back-pressure + val additionalThreads = maxThreads - 1 + // The thread pool queue is limited in size. When it's full, the `CallerRunsPolicy` causes + // a new task to be executed on the main thread, which provides back-pressure. + // The queue size is large enough to ensure that running a task on the main thread does + // not take longer than to exhaust the queue for the backend workers. val queueSize = if (settings.YmaxQueue.isSetByUser) settings.YmaxQueue.value else maxThreads * 2 val threadPoolFactory = ThreadPoolFactory(global, currentRun.jvmPhase) val javaExecutor = threadPoolFactory.newBoundedQueueFixedThreadPool(additionalThreads, queueSize, new CallerRunsPolicy, "non-ast") - val execInfo = ExecutorServiceInfo(additionalThreads, javaExecutor, javaExecutor.getQueue) - new AsyncWritingClassHandler(unitInfoLookup, postProcessor, cfWriter, execInfo) + new AsyncWritingClassHandler(postProcessor, javaExecutor) } if (settings.optInlinerEnabled || settings.optClosureInvocations) @@ -88,141 +82,125 @@ private[jvm] object GeneratedClassHandler { underlying.complete() } - def close(): Unit = underlying.close() + override def close(): Unit = underlying.close() override def toString: String = s"GloballyOptimising[$underlying]" } sealed abstract class WritingClassHandler(val javaExecutor: Executor) extends GeneratedClassHandler { - val unitInfoLookup: UnitInfoLookup - val cfWriter: ClassfileWriter + import postProcessor.bTypes.frontendAccess def tryStealing: Option[Runnable] - private val processingUnits = ListBuffer.empty[UnitResult] + private val processingUnits = ListBuffer.empty[CompilationUnitInPostProcess] def process(unit: GeneratedCompilationUnit): Unit = { - val unitProcess = new UnitResult(unitInfoLookup, unit.classes, unit.sourceFile) - postProcessUnit(unitProcess) - processingUnits += unitProcess + val unitInPostProcess = new CompilationUnitInPostProcess(unit.classes, unit.sourceFile, + frontendAccess.compilerSettings.outputDirectory(unit.sourceFile)) + postProcessUnit(unitInPostProcess) + processingUnits += unitInPostProcess } protected implicit val executionContext: ExecutionContextExecutor = ExecutionContext.fromExecutor(javaExecutor) - final def postProcessUnit(unitProcess: UnitResult): Unit = { - unitProcess.task = Future { - unitProcess.withBufferedReporter { + final def postProcessUnit(unitInPostProcess: CompilationUnitInPostProcess): Unit = { + unitInPostProcess.task = Future { + frontendAccess.withThreadLocalReporter(unitInPostProcess.bufferedReporting) { // we 'take' classes to reduce the memory pressure // as soon as the class is consumed and written, we release its data - unitProcess.takeClasses foreach { - postProcessor.sendToDisk(unitProcess, _, cfWriter) + unitInPostProcess.takeClasses() foreach { + postProcessor.sendToDisk(_, unitInPostProcess) } } } } - protected def getAndClearProcessingUnits(): List[UnitResult] = { + protected def takeProcessingUnits(): List[CompilationUnitInPostProcess] = { val result = processingUnits.result() processingUnits.clear() result } - override def complete(): Unit = { - val directBackendReporting = postProcessor.bTypes.frontendAccess.directBackendReporting + final def complete(): Unit = { + import frontendAccess.directBackendReporting - def stealWhileWaiting(unitResult: UnitResult, fut: Future[Unit]): Unit = { - while (!fut.isCompleted) + def stealWhileWaiting(unitInPostProcess: CompilationUnitInPostProcess): Unit = { + val task = unitInPostProcess.task + while (!task.isCompleted) tryStealing match { case Some(r) => r.run() - case None => Await.ready(fut, Duration.Inf) - } - //we know that they are complete by we need to check for exception - //but first get any reports - unitResult.relayReports(directBackendReporting) - fut.value.get.get // throw the exception if the future completed with a failure + case None => Await.ready(task, Duration.Inf) + } } - - /** We could consume the results when yey are ready, via use of a [[java.util.concurrent.CompletionService]] - * or something similar, but that would lead to non deterministic reports from backend threads, as the - * compilation unit could complete in a different order that when they were submitted, and thus the relayed - * reports would be in a different order. - * To avoid that non-determinism we read the result in order or submission, with a potential minimal performance - * loss, do to the memory being retained longer for tasks that it might otherwise. - * Most of the memory in the UnitResult is reclaimable anyway as the classes are deferenced after use - */ - getAndClearProcessingUnits().foreach { unitResult => + /** + * Go through each task in submission order, wait for it to finish and report its messages. + * When finding task that has not completed, steal work from the executor's queue and run + * it on the main thread (which we are on here), until the task is done. + * + * We could consume the results when they are ready, via use of a [[java.util.concurrent.CompletionService]] + * or something similar, but that would lead to non deterministic reports from backend threads, as the + * compilation unit could complete in a different order than when they were submitted, and thus the relayed + * reports would be in a different order. + * To avoid that non-determinism we read the result in order of submission, with a potential minimal performance + * loss, due to the memory being retained longer for tasks than it might otherwise. + * Most of the memory in the CompilationUnitInPostProcess is reclaimable anyway as the classes are dereferenced after use. + */ + takeProcessingUnits().foreach { unitInPostProcess => try { - stealWhileWaiting(unitResult, unitResult.task) + stealWhileWaiting(unitInPostProcess) + unitInPostProcess.bufferedReporting.relayReports(directBackendReporting) + // We know the future is complete, throw the exception if it completed with a failure + unitInPostProcess.task.value.get.get } catch { case NonFatal(t) => t.printStackTrace() - postProcessor.bTypes.frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitResult.sourceFile} $t") + frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.sourceFile} $t") } } } - - def close(): Unit = cfWriter.close() } - private final class SyncWritingClassHandler( - val unitInfoLookup: UnitInfoLookup, - val postProcessor: PostProcessor, - val cfWriter: ClassfileWriter) + private final class SyncWritingClassHandler(val postProcessor: PostProcessor) extends WritingClassHandler((r) => r.run()) { - override def toString: String = s"SyncWriting [$cfWriter]" + override def toString: String = s"SyncWriting" - override def tryStealing: Option[Runnable] = None + def tryStealing: Option[Runnable] = None } - private final case class ExecutorServiceInfo(maxThreads: Int, javaExecutor: ExecutorService, queue: BlockingQueue[Runnable]) - - private final class AsyncWritingClassHandler(val unitInfoLookup: UnitInfoLookup, - val postProcessor: PostProcessor, - val cfWriter: ClassfileWriter, - val executorServiceInfo: ExecutorServiceInfo) - extends WritingClassHandler(executorServiceInfo.javaExecutor) { + private final class AsyncWritingClassHandler(val postProcessor: PostProcessor, override val javaExecutor: ThreadPoolExecutor) + extends WritingClassHandler(javaExecutor) { - override def toString: String = s"AsyncWriting[additional threads:${executorServiceInfo.maxThreads} writer:$cfWriter]" + override def toString: String = s"AsyncWriting[additional threads:${javaExecutor.getMaximumPoolSize}]" override def close(): Unit = { super.close() - executorServiceInfo.javaExecutor.shutdownNow() + javaExecutor.shutdownNow() } - override def tryStealing: Option[Runnable] = Option(executorServiceInfo.queue.poll()) + def tryStealing: Option[Runnable] = Option(javaExecutor.getQueue.poll()) } } -//we avoid the lock on frontendSync for the common case, when compiling to a single target -sealed trait UnitInfoLookup { - def outputDir(source:AbstractFile) : AbstractFile - val frontendAccess: PostProcessorFrontendAccess -} -final class SingleUnitInfo(val frontendAccess: PostProcessorFrontendAccess, constantOutputDir:AbstractFile) extends UnitInfoLookup { - override def outputDir(source: AbstractFile) = constantOutputDir -} -final class LookupUnitInfo(val frontendAccess: PostProcessorFrontendAccess) extends UnitInfoLookup { - lazy val outputDirectories = frontendAccess.compilerSettings.outputDirectories - override def outputDir(source: AbstractFile) = outputDirectories.outputDirFor(source) -} -sealed trait SourceUnit { - def withBufferedReporter[T](fn: => T): T +/** Paths for a compilation unit, used during classfile writing */ +sealed trait CompilationUnitPaths { + val sourceFile: AbstractFile val outputDir: AbstractFile - val outputPath: java.nio.file.Path - def sourceFile:AbstractFile + def outputPath: Path = outputDir.file.toPath // `toPath` caches its result } -final class UnitResult(unitInfoLookup: UnitInfoLookup, _classes : List[GeneratedClass], val sourceFile: AbstractFile) extends SourceUnit with BackendReporting { - lazy val outputDir = unitInfoLookup.outputDir(sourceFile) - lazy val outputPath = outputDir.file.toPath - - private var classes: List[GeneratedClass] = _classes - - def copyClasses = classes - +/** + * State for a compilation unit being post-processed. + * - Holds the classes to post-process (released for GC when no longer used) + * - Keeps a reference to the future that runs the post-processor + * - Buffers messages reported during post-processing + */ +final class CompilationUnitInPostProcess( + private var classes: List[GeneratedClass], + val sourceFile: AbstractFile, + val outputDir: AbstractFile) extends CompilationUnitPaths { def takeClasses(): List[GeneratedClass] = { val c = classes classes = Nil @@ -232,64 +210,5 @@ final class UnitResult(unitInfoLookup: UnitInfoLookup, _classes : List[Generated /** the main async task submitted onto the scheduler */ var task: Future[Unit] = _ - def relayReports(backendReporting: BackendReporting): Unit = this.synchronized { - if (bufferedReports nonEmpty) { - for (report: Report <- bufferedReports.reverse) { - report.relay(backendReporting) - } - } - bufferedReports = Nil - } - - // We optimise access to the buffered reports for the common case - that there are no warning/errors to report - // We could use a listBuffer etc - but that would be extra allocation in the common case - // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and - // consumed in another - private var bufferedReports = List.empty[Report] - - override def withBufferedReporter[T](fn: => T) = unitInfoLookup.frontendAccess.withLocalReporter(this)(fn) - - override def inlinerWarning(pos: Position, message: String): Unit = - this.synchronized(bufferedReports ::= new ReportInlinerWarning(pos, message)) - - override def error(pos: Position, message: String): Unit = - this.synchronized(bufferedReports ::= new ReportError(pos, message)) - - override def warning(pos: Position, message: String): Unit = - this.synchronized(bufferedReports ::= new ReportWarning(pos, message)) - - override def inform(message: String): Unit = - this.synchronized(bufferedReports ::= new ReportInform(message)) - - override def log(message: String): Unit = - this.synchronized(bufferedReports ::= new ReportLog(message)) - - private sealed trait Report { - def relay(backendReporting: BackendReporting): Unit - } - - private class ReportInlinerWarning(pos: Position, message: String) extends Report { - override def relay(reporting: BackendReporting): Unit = - reporting.inlinerWarning(pos, message) - } - - private class ReportError(pos: Position, message: String) extends Report { - override def relay(reporting: BackendReporting): Unit = - reporting.error(pos, message) - } - - private class ReportWarning(pos: Position, message: String) extends Report { - override def relay(reporting: BackendReporting): Unit = - reporting.warning(pos, message) - } - - private class ReportInform(message: String) extends Report { - override def relay(reporting: BackendReporting): Unit = - reporting.inform(message) - } - - private class ReportLog(message: String) extends Report { - override def relay(reporting: BackendReporting): Unit = - reporting.log(message) - } + val bufferedReporting = new BufferingBackendReporting } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 67bd45b19284..86eeecdbe736 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -14,7 +14,7 @@ import scala.tools.nsc.backend.jvm.opt._ * Implements late stages of the backend that don't depend on a Global instance, i.e., * optimizations, post-processing and classfile serialization and writing. */ -abstract class PostProcessor(statistics: Statistics with BackendStats) extends PerRunInit { +abstract class PostProcessor extends PerRunInit { self => val bTypes: BTypes @@ -29,17 +29,21 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P val closureOptimizer : ClosureOptimizer { val postProcessor: self.type } = new { val postProcessor: self.type = self } with ClosureOptimizer val callGraph : CallGraph { val postProcessor: self.type } = new { val postProcessor: self.type = self } with CallGraph val bTypesFromClassfile : BTypesFromClassfile { val postProcessor: self.type } = new { val postProcessor: self.type = self } with BTypesFromClassfile + val classfileWriters : ClassfileWriters { val postProcessor: self.type } = new { val postProcessor: self.type = self } with ClassfileWriters + + var classfileWriter: classfileWriters.ClassfileWriter = _ private val caseInsensitively = recordPerRunJavaMapCache(new ConcurrentHashMap[String, String]) - override def initialize(): Unit = { - super.initialize() + def initialize(global: Global): Unit = { + this.initialize() backendUtils.initialize() inlinerHeuristics.initialize() byteCodeRepository.initialize() + classfileWriter = classfileWriters.ClassfileWriter(global) } - def sendToDisk(unit:SourceUnit, clazz: GeneratedClass, writer: ClassfileWriter): Unit = { + def sendToDisk(clazz: GeneratedClass, paths: CompilationUnitPaths): Unit = { val classNode = clazz.classNode val internalName = classNode.name val bytes = try { @@ -68,7 +72,7 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) AsmUtils.traceClass(bytes) - writer.write(unit, internalName, bytes) + classfileWriter.write(internalName, bytes, paths) } } private def warnCaseInsensitiveOverwrite(clazz: GeneratedClass): Unit = { @@ -105,7 +109,8 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P } def localOptimizations(classNode: ClassNode): Unit = { - statistics.timed(statistics.methodOptTimer)(localOpt.methodOptimizations(classNode)) + val stats = frontendAccess.unsafeStatistics + stats.timed(stats.methodOptTimer)(localOpt.methodOptimizations(classNode)) } def setInnerClasses(classNode: ClassNode): Unit = { @@ -145,5 +150,5 @@ abstract class PostProcessor(statistics: Statistics with BackendStats) extends P /** * The result of code generation. [[isArtifact]] is `true` for mirror and bean-info classes. */ -case class GeneratedClass(classNode: ClassNode, sourceClassName: String, position: Position, sourceFile: SourceFile, isArtifact: Boolean) +case class GeneratedClass(classNode: ClassNode, sourceClassName: String, position: Position, isArtifact: Boolean) case class GeneratedCompilationUnit(sourceFile: AbstractFile, classes: List[GeneratedClass]) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 33e82a683bb2..317b2873e0b9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -2,7 +2,7 @@ package scala.tools.nsc package backend.jvm import scala.collection.generic.Clearable -import scala.reflect.internal.util.{JavaClearable, Position} +import scala.reflect.internal.util.{JavaClearable, Position, Statistics} import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.jvm.BTypes.InternalName import java.util.{Collection => JCollection, Map => JMap} @@ -21,10 +21,15 @@ sealed abstract class PostProcessorFrontendAccess { def compilerSettings: CompilerSettings - def withLocalReporter[T](reporter: BackendReporting)(fn: => T): T + def withThreadLocalReporter[T](reporter: BackendReporting)(fn: => T): T def backendReporting: BackendReporting def directBackendReporting: BackendReporting + /** + * Statistics are not thread-safe, they can only be used if `compilerSettings.backendThreads == 1` + */ + def unsafeStatistics: Statistics with BackendStats + def backendClassPath: BackendClassPath def getEntryPoints: List[String] @@ -44,7 +49,7 @@ object PostProcessorFrontendAccess { def target: String - def outputDirectories : Settings#OutputDirs + def outputDirectory(source: AbstractFile): AbstractFile def optAddToBytecodeRepository: Boolean def optBuildCallGraph: Boolean @@ -84,6 +89,65 @@ object PostProcessorFrontendAccess { def log(message: String): Unit } + final class BufferingBackendReporting extends BackendReporting { + // We optimise access to the buffered reports for the common case - that there are no warning/errors to report + // We could use a listBuffer etc - but that would be extra allocation in the common case + // Note - all access is externally synchronized, as this allow the reports to be generated in on thread and + // consumed in another + private var bufferedReports = List.empty[Report] + + def inlinerWarning(pos: Position, message: String): Unit = + this.synchronized(bufferedReports ::= new ReportInlinerWarning(pos, message)) + + def error(pos: Position, message: String): Unit = + this.synchronized(bufferedReports ::= new ReportError(pos, message)) + + def warning(pos: Position, message: String): Unit = + this.synchronized(bufferedReports ::= new ReportWarning(pos, message)) + + def inform(message: String): Unit = + this.synchronized(bufferedReports ::= new ReportInform(message)) + + def log(message: String): Unit = + this.synchronized(bufferedReports ::= new ReportLog(message)) + + def relayReports(toReporting: BackendReporting): Unit = this.synchronized { + if (bufferedReports.nonEmpty) { + bufferedReports.reverse.foreach(_.relay(toReporting)) + bufferedReports = Nil + } + } + + private sealed trait Report { + def relay(backendReporting: BackendReporting): Unit + } + + private class ReportInlinerWarning(pos: Position, message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.inlinerWarning(pos, message) + } + + private class ReportError(pos: Position, message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.error(pos, message) + } + + private class ReportWarning(pos: Position, message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.warning(pos, message) + } + + private class ReportInform(message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.inform(message) + } + + private class ReportLog(message: String) extends Report { + override def relay(reporting: BackendReporting): Unit = + reporting.log(message) + } + } + sealed trait BackendClassPath { def findClassFile(className: String): Option[AbstractFile] } @@ -102,7 +166,10 @@ object PostProcessorFrontendAccess { val debug: Boolean = s.debug val target: String = s.target.value - val outputDirectories = s.outputDirs + + private val singleOutDir = s.outputDirs.getSingleOutput + // the call to `outputDirFor` should be frontendSynch'd, but we assume that the setting is not mutated during the backend + def outputDirectory(source: AbstractFile): AbstractFile = singleOutDir.getOrElse(s.outputDirs.outputDirFor(source)) val optAddToBytecodeRepository: Boolean = s.optAddToBytecodeRepository val optBuildCallGraph: Boolean = s.optBuildCallGraph @@ -139,7 +206,7 @@ object PostProcessorFrontendAccess { private lazy val localReporter = perRunLazy(this)(new ThreadLocal[BackendReporting]) - override def withLocalReporter[T](reporter: BackendReporting)(fn: => T): T = { + override def withThreadLocalReporter[T](reporter: BackendReporting)(fn: => T): T = { val threadLocal = localReporter.get val old = threadLocal.get() threadLocal.set(reporter) @@ -156,19 +223,24 @@ object PostProcessorFrontendAccess { def inlinerWarning(pos: Position, message: String): Unit = frontendSynch { currentRun.reporting.inlinerWarning(pos, message) } + def error(pos: Position, message: String): Unit = frontendSynch { reporter.error(pos, message) } + def warning(pos: Position, message: String): Unit = frontendSynch { global.warning(pos, message) } + def inform(message: String): Unit = frontendSynch { global.inform(message) } + def log(message: String): Unit = frontendSynch { global.log(message) } } + def unsafeStatistics: Statistics with BackendStats = global.statistics private lazy val cp = perRunLazy(this)(frontendSynch(optimizerClassPath(classPath))) object backendClassPath extends BackendClassPath { @@ -196,4 +268,4 @@ object PostProcessorFrontendAccess { cache } } -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala b/src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala deleted file mode 100644 index 97409b080ecb..000000000000 --- a/src/compiler/scala/tools/nsc/backend/jvm/ThreadFactories.scala +++ /dev/null @@ -1,20 +0,0 @@ -package scala.tools.nsc.backend.jvm - -import java.util.concurrent.ThreadFactory -import java.util.concurrent.atomic.AtomicInteger - -class CommonThreadFactory(namePrefix:String, - threadGroup: ThreadGroup = Thread.currentThread().getThreadGroup, - daemon:Boolean = true, - priority:Int = Thread.NORM_PRIORITY) extends ThreadFactory { - private val group: ThreadGroup = Thread.currentThread().getThreadGroup - private val threadNumber: AtomicInteger = new AtomicInteger(1) - - - override def newThread(r: Runnable): Thread = { - val t: Thread = new Thread(group, r, namePrefix + threadNumber.getAndIncrement, 0) - if (t.isDaemon != daemon) t.setDaemon(daemon) - if (t.getPriority != priority) t.setPriority(priority) - t - } -} diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 198a3e06bc68..85f65f6c6911 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -309,16 +309,13 @@ class MutableSettings(val errorFn: String => Unit) def isBelow(srcDir: AbstractFile, outDir: AbstractFile) = src.path.startsWith(srcDir.path) - singleOutDir match { - case Some(d) => d - case None => - (outputs find (isBelow _).tupled) match { - case Some((_, d)) => d - case _ => - throw new FatalError("Could not find an output directory for " - + src.path + " in " + outputs) - } - } + singleOutDir.getOrElse(outputs.find((isBelow _).tupled) match { + case Some((_, d)) => d + case _ => + throw new FatalError("Could not find an output directory for " + + src.path + " in " + outputs) + } + ) } /** Return the source file path(s) which correspond to the given From 801c5dabfba4051706139910b17b05e2a8f50976 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 15 Feb 2018 15:51:40 +1000 Subject: [PATCH 1219/2793] Issue macro expansion errors during "late expansion" The typechecker defers macro macro expansion while the macro application has type arguments that are yet to be determined. Instead, it proceed with typechecking the surrounding expression, and then performs another pass on the resulting tree when the type inference has fixed the type parameters. This "late expansion" typechecks the macro application with a typer focussed on the original Context of the application, and any errors issued by the macro (either explicit c.error or c.abort, or an exception) are issued to that contexts's reporter. However, if that reporter was setup to buffer errors, rather than immediately report them, these macro expansion errors would sit in the never be issued, and the unexpanded macro application would remain in the tree, doomed to be flagged with a "macro not expanded" error in refchecks. This commit copies any buffered errors to the currently active typer context reporter after late expansion. I refactored the existing code that did this to make it more easily reusable in this context. --- .../tools/nsc/typechecker/Contexts.scala | 27 +++++++++++-------- .../scala/tools/nsc/typechecker/Macros.scala | 7 ++++- .../scala/tools/nsc/typechecker/Typers.scala | 6 +++-- test/files/neg/t10073.check | 4 +++ test/files/neg/t10073.scala | 8 ++++++ test/files/neg/t10073b.check | 4 +++ test/files/neg/t10073b.scala | 8 ++++++ 7 files changed, 50 insertions(+), 14 deletions(-) create mode 100644 test/files/neg/t10073.check create mode 100644 test/files/neg/t10073.scala create mode 100644 test/files/neg/t10073b.check create mode 100644 test/files/neg/t10073b.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 0351d2807f0b..a4f191720ad5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1306,18 +1306,23 @@ trait Contexts { self: Analyzer => res } - @inline final def propagatingErrorsTo[T](target: ContextReporter)(expr: => T): T = { - val res = expr // TODO: make sure we're okay skipping the try/finally overhead - if ((this ne target) && hasErrors) { // `this eq target` in e.g., test/files/neg/divergent-implicit.scala - // assert(target.errorBuffer ne _errorBuffer) - target ++= errors - // TODO: is clearAllErrors necessary? (no tests failed when dropping it) - // NOTE: even though `this ne target`, it may still be that `target.errorBuffer eq _errorBuffer`, - // so don't clear the buffer, but null out the reference so that a new one will be created when necessary (should be never??) - // (we should refactor error buffering to avoid mutation on shared buffers) - clearAllErrors() + final def propagateErrorsTo[T](target: ContextReporter): Unit = { + if (this ne target) { // `this eq target` in e.g., test/files/neg/divergent-implicit.scala + if (hasErrors) { + // assert(target.errorBuffer ne _errorBuffer) + if (target.isBuffering) { + target ++= errors + } else { + errors.foreach(e => target.handleError(e.errPos, e.errMsg)) + } + // TODO: is clearAllErrors necessary? (no tests failed when dropping it) + // NOTE: even though `this ne target`, it may still be that `target.errorBuffer eq _errorBuffer`, + // so don't clear the buffer, but null out the reference so that a new one will be created when necessary (should be never??) + // (we should refactor error buffering to avoid mutation on shared buffers) + clearAllErrors() + } + // TODO propagate warnings if no errors, like `silent` does? } - res } protected final def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 637864c92c85..b17cdc0ee47c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -906,7 +906,12 @@ trait Macros extends MacroRuntimes with Traces with Helpers { context.implicitsEnabled = typer.context.implicitsEnabled context.enrichmentEnabled = typer.context.enrichmentEnabled context.macrosEnabled = typer.context.macrosEnabled - macroExpand(newTyper(context), tree, EXPRmode, WildcardType) + try { + macroExpand(newTyper(context), tree, EXPRmode, WildcardType) + } finally { + if (context.reporter.isBuffering) + context.reporter.propagateErrorsTo(typer.context.reporter) + } case _ => tree }) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 08e5d73dfbce..09f978bc4c93 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -482,8 +482,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (cond) typerWithLocalContext(c)(f) else f(this) @inline - final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = - c.reporter.propagatingErrorsTo(context.reporter)(f(newTyper(c))) + final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = { + try f(newTyper(c)) + finally c.reporter.propagateErrorsTo(context.reporter) + } /** The typer for a label definition. If this is part of a template we * first have to enter the label definition. diff --git a/test/files/neg/t10073.check b/test/files/neg/t10073.check new file mode 100644 index 000000000000..9782135040db --- /dev/null +++ b/test/files/neg/t10073.check @@ -0,0 +1,4 @@ +t10073.scala:7: error: tpe Unused is an unresolved spliceable type + "".yo() + ^ +one error found diff --git a/test/files/neg/t10073.scala b/test/files/neg/t10073.scala new file mode 100644 index 000000000000..06f3167854a3 --- /dev/null +++ b/test/files/neg/t10073.scala @@ -0,0 +1,8 @@ +class Yo[Unused] { + def yo(hasDefault: Any = ""): String = "" +} + +class MacroNotExpanded { + implicit def toYo[Unused](a: Any)(implicit ct: reflect.ClassTag[Unused]): Yo[Unused] = new Yo[Unused] + "".yo() +} \ No newline at end of file diff --git a/test/files/neg/t10073b.check b/test/files/neg/t10073b.check new file mode 100644 index 000000000000..309fea6b9ac4 --- /dev/null +++ b/test/files/neg/t10073b.check @@ -0,0 +1,4 @@ +t10073b.scala:7: error: tpe Unused is an unresolved spliceable type + "".yo() + ^ +one error found diff --git a/test/files/neg/t10073b.scala b/test/files/neg/t10073b.scala new file mode 100644 index 000000000000..21e32587a9c7 --- /dev/null +++ b/test/files/neg/t10073b.scala @@ -0,0 +1,8 @@ +class Yo[Unused] { + def yo(hasDefault: Any = ""): String = "" +} + +class MacroNotExpanded { + implicit def toYo[Unused](a: Any)(implicit ct: reflect.ClassTag[Unused]): Yo[Unused] = new Yo[Unused] + "".yo() +} From 39dcb7ec7a7095a21dbc6451dcbdc6edbbd5444c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 18 Feb 2018 16:45:11 -0800 Subject: [PATCH 1220/2793] Unleash test of existential inference It doesn't entirely work, so perhaps the test is noise. --- test/files/pos/existentials.scala | 49 ++++++++++++++++++++++++------- 1 file changed, 39 insertions(+), 10 deletions(-) diff --git a/test/files/pos/existentials.scala b/test/files/pos/existentials.scala index 9ca86d13d80d..e54115efbd95 100644 --- a/test/files/pos/existentials.scala +++ b/test/files/pos/existentials.scala @@ -1,3 +1,5 @@ +import language.existentials + /** All of these should work, some don't yet. * !!! */ @@ -6,17 +8,44 @@ class A { val quux0 = f() def quux1 = f() - // lazy val quux2 = f() - // def quux3 = { - // lazy val quux3a = f() - // quux3a - // } + lazy val quux2 = f() + def quux3 = { + lazy val quux3a = f() + quux3a + } + // spurious warning until scala/bug#10612, cf test/files/neg/t7187.scala val bippy0 = f _ def bippy1 = f _ - // lazy val bippy2 = f _ - // val bippy3 = { - // lazy val bippy3a = f _ - // bippy3a - // } + lazy val bippy2 = f _ + /* + val bippy3 = { + lazy val bippy3a = f _ + bippy3a + } + */ } + +/* +error: scala.reflect.internal.Types$TypeError: type mismatch; + found : () => Bob.type(in value $anonfun) forSome { type Bob.type(in value $anonfun) <: scala.runtime.AbstractFunction0[Bob(in value $anonfun)] with Serializable{case def unapply(x$0: Bob(in value $anonfun)): Boolean} with Singleton; type Bob(in value $anonfun) <: Product with Serializable{def copy(): Bob(in value $anonfun)} } + required: () => (some other)Bob.type(in value $anonfun) forSome { type (some other)Bob.type(in value $anonfun) <: scala.runtime.AbstractFunction0[(some other)Bob(in value $anonfun)] with Serializable{case def unapply(x$0: (some other)Bob(in value $anonfun)): Boolean} with Singleton; type (some other)Bob(in value $anonfun) <: Product with Serializable{def copy(): (some other)Bob(in value $anonfun)} } + + at scala.tools.nsc.typechecker.Contexts$ThrowingReporter.handleError(Contexts.scala:1426) + at scala.tools.nsc.typechecker.Contexts$ContextReporter.issue(Contexts.scala:1278) + at scala.tools.nsc.typechecker.Contexts$Context.issue(Contexts.scala:584) + at scala.tools.nsc.typechecker.ContextErrors$ErrorUtils$.issueTypeError(ContextErrors.scala:106) + at scala.tools.nsc.typechecker.ContextErrors$ErrorUtils$.issueNormalTypeError(ContextErrors.scala:99) + at scala.tools.nsc.typechecker.ContextErrors$TyperContextErrors$TyperErrorGen$.AdaptTypeError(ContextErrors.scala:219) + at scala.tools.nsc.typechecker.Typers$Typer.adaptMismatchedSkolems$1(Typers.scala:1058) + at scala.tools.nsc.typechecker.Typers$Typer.lastTry$1(Typers.scala:1069) + at scala.tools.nsc.typechecker.Typers$Typer.adaptExprNotFunMode$1(Typers.scala:1124) + at scala.tools.nsc.typechecker.Typers$Typer.vanillaAdapt$1(Typers.scala:1170) + at scala.tools.nsc.typechecker.Typers$Typer.adapt(Typers.scala:1214) + at scala.tools.nsc.typechecker.Typers$Typer.runTyper$1(Typers.scala:5598) + at scala.tools.nsc.typechecker.Typers$Typer.typedInternal(Typers.scala:5616) + at scala.tools.nsc.typechecker.Typers$Typer.body$2(Typers.scala:5557) + at scala.tools.nsc.typechecker.Typers$Typer.typed(Typers.scala:5562) + at scala.tools.nsc.typechecker.Typers$Typer.$anonfun$typedArg$1(Typers.scala:3247) + at scala.tools.nsc.typechecker.Typers$Typer.typedArg(Typers.scala:477) +*/ From aa39836a589d282e71f7931239e37274132de47a Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Mon, 19 Feb 2018 08:48:09 +0000 Subject: [PATCH 1221/2793] review feedback --- .../nsc/backend/jvm/ClassfileWriters.scala | 5 +++-- .../nsc/backend/jvm/GeneratedClassHandler.scala | 17 ++++++----------- 2 files changed, 9 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 125a343de704..840a71311ff5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -83,7 +83,7 @@ abstract class ClassfileWriters { } val enableStats = statistics.enabled && settings.YaddBackendThreads.value == 1 - if (enableStats) new WithStatsWriter(statistics, withAdditionalFormats) else withAdditionalFormats + if (enableStats) new WithStatsWriter(withAdditionalFormats) else withAdditionalFormats } /** @@ -257,9 +257,10 @@ abstract class ClassfileWriters { } } - private final class WithStatsWriter(statistics: Statistics with Global#GlobalStats, underlying: ClassfileWriter) + private final class WithStatsWriter(underlying: ClassfileWriter) extends ClassfileWriter { override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { + val statistics = frontendAccess.unsafeStatistics val snap = statistics.startTimer(statistics.bcodeWriteTimer) underlying.write(className, bytes, paths) statistics.stopTimer(statistics.bcodeWriteTimer, snap) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index 1b4e9483541c..c4350e2ca052 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -95,8 +95,8 @@ private[jvm] object GeneratedClassHandler { private val processingUnits = ListBuffer.empty[CompilationUnitInPostProcess] def process(unit: GeneratedCompilationUnit): Unit = { - val unitInPostProcess = new CompilationUnitInPostProcess(unit.classes, unit.sourceFile, - frontendAccess.compilerSettings.outputDirectory(unit.sourceFile)) + val unitInPostProcess = new CompilationUnitInPostProcess(unit.classes, + CompilationUnitPaths(unit.sourceFile, frontendAccess.compilerSettings.outputDirectory(unit.sourceFile))) postProcessUnit(unitInPostProcess) processingUnits += unitInPostProcess } @@ -109,7 +109,7 @@ private[jvm] object GeneratedClassHandler { // we 'take' classes to reduce the memory pressure // as soon as the class is consumed and written, we release its data unitInPostProcess.takeClasses() foreach { - postProcessor.sendToDisk(_, unitInPostProcess) + postProcessor.sendToDisk(_, unitInPostProcess.paths) } } } @@ -155,7 +155,7 @@ private[jvm] object GeneratedClassHandler { } catch { case NonFatal(t) => t.printStackTrace() - frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.sourceFile} $t") + frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.paths.sourceFile} $t") } } } @@ -185,9 +185,7 @@ private[jvm] object GeneratedClassHandler { } /** Paths for a compilation unit, used during classfile writing */ -sealed trait CompilationUnitPaths { - val sourceFile: AbstractFile - val outputDir: AbstractFile +final case class CompilationUnitPaths(sourceFile: AbstractFile, outputDir: AbstractFile) { def outputPath: Path = outputDir.file.toPath // `toPath` caches its result } @@ -197,10 +195,7 @@ sealed trait CompilationUnitPaths { * - Keeps a reference to the future that runs the post-processor * - Buffers messages reported during post-processing */ -final class CompilationUnitInPostProcess( - private var classes: List[GeneratedClass], - val sourceFile: AbstractFile, - val outputDir: AbstractFile) extends CompilationUnitPaths { +final class CompilationUnitInPostProcess(private var classes: List[GeneratedClass], val paths: CompilationUnitPaths) { def takeClasses(): List[GeneratedClass] = { val c = classes classes = Nil From 149b66070d959b275fb378fba0739c1efba4e409 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 20 Feb 2018 18:43:05 -0800 Subject: [PATCH 1222/2793] Test status quo nowarn for vanishing local --- test/files/neg/warn-unused-privates.scala | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala index 7df4dfcfa785..280d6b15a2a2 100644 --- a/test/files/neg/warn-unused-privates.scala +++ b/test/files/neg/warn-unused-privates.scala @@ -236,3 +236,10 @@ object `classof something` { private class intrinsically def f = classOf[intrinsically].toString() } + +trait `short comings` { + def f: Int = { + val x = 42 + 17 + } +} From 2791989109d101a9d8356dd1c84f5bd5ac3ebd81 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Dec 2017 20:47:50 +1000 Subject: [PATCH 1223/2793] Remove statistics reporting code from some hot paths --- src/reflect/scala/reflect/internal/SymbolTable.scala | 3 --- src/reflect/scala/reflect/internal/Symbols.scala | 11 ----------- 2 files changed, 14 deletions(-) diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 0d4a3500ce0a..76eabcfae52f 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -188,8 +188,6 @@ abstract class SymbolTable extends macros.Universe final def atPhaseStack: List[Phase] = phStack.toList final def phase: Phase = { - if (StatisticsStatics.areSomeColdStatsEnabled) - statistics.incCounter(statistics.phaseCounter) ph } @@ -462,7 +460,6 @@ abstract class SymbolTable extends macros.Universe trait SymbolTableStats { self: TypesStats with Statistics => - val phaseCounter = newCounter("#phase calls") // Defined here because `SymbolLoaders` is defined in `scala.tools.nsc` // and only has access to the `statistics` definition from `scala.reflect`. val classReadNanos = newSubTimer("time classfilereading", typerNanos) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 50ff562e1143..677a270a69ec 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -769,7 +769,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => } final def flags: Long = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(flagsCount) val fs = _rawflags & phase.flagMask (fs | ((fs & LateFlags) >>> LateShift)) & ~((fs & AntiFlags) >>> AntiShift) } @@ -1199,7 +1198,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => * `assertOwner` aborts compilation immediately if called on NoSymbol. */ def owner: Symbol = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(ownerCount) rawowner } final def safeOwner: Symbol = if (this eq NoSymbol) NoSymbol else owner @@ -2785,7 +2783,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => private[this] var _rawname: TermName = initName def rawname = _rawname def name = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) _rawname } override def name_=(name: Name) { @@ -2917,13 +2914,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def moduleClass = referenced override def owner = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(ownerCount) // a non-static module symbol gets the METHOD flag in uncurry's info transform -- see isModuleNotMethod if (!isMethod && needsFlatClasses) rawowner.owner else rawowner } override def name: TermName = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) if (!isMethod && needsFlatClasses) { if (flatname eq null) flatname = nme.flattenedName(rawowner.name, rawname) @@ -3055,7 +3050,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => def rawname = _rawname def name = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) _rawname } final def asNameType(n: Name) = n.toTypeName @@ -3342,12 +3336,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => } override def owner: Symbol = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(ownerCount) if (needsFlatClasses) rawowner.owner else rawowner } override def name: TypeName = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(nameCount) if (needsFlatClasses) { if (flatname eq null) flatname = tpnme.flattenedName(rawowner.name, rawname) @@ -3761,7 +3753,4 @@ trait SymbolsStats { val symbolsCount = newView("#symbols")(symbolTable.getCurrentSymbolIdCount) val typeSymbolCount = newCounter("#type symbols") val classSymbolCount = newCounter("#class symbols") - val flagsCount = newCounter("#flags ops") - val ownerCount = newCounter("#owner ops") - val nameCount = newCounter("#name ops") } From d53b0ca9f5bb1e04ba8df999f9dcac10f75c7a6b Mon Sep 17 00:00:00 2001 From: Philippus Baalman Date: Wed, 31 Jan 2018 08:57:00 +0100 Subject: [PATCH 1224/2793] Toggle comment if no text is selected fixes scala/scala-lang/issues/553 --- src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js index 64177a772350..a9cc19a6eae1 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.js @@ -259,7 +259,8 @@ $(document).ready(function() { }; $("#template li[fullComment=yes]").click(function() { - commentToggleFct($(this)); + var sel = window.getSelection().toString(); + if (!sel) commentToggleFct($(this)); }); /* Linear super types and known subclasses */ From 7e954d607a8072fbd9ef85b42e4759bcdb4e719d Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 1 Feb 2018 15:22:27 -0500 Subject: [PATCH 1225/2793] Warn also on mirror class clobbering. ... and mention the source file if it's different. This confused someone on gitter today, and I noticed that the clobbering check misses out on `class Foo; object foo` (whereby `foo`'s mirror will overwrite `Foo`, on a Mac). Slap a call to the check in `genMirrorClass`, and folks are duly warned. Also drop `neg/case-collision2`, which existed just to test the same test on both JVM backends. --- .../tools/nsc/backend/jvm/PostProcessor.scala | 25 ++++++++++++------- test/files/jvm/typerep.scala | 4 +-- test/files/neg/case-collision-multifile.check | 7 ++++++ test/files/neg/case-collision-multifile.flags | 1 + .../neg/case-collision-multifile/one.scala | 1 + .../neg/case-collision-multifile/two.scala | 1 + test/files/neg/case-collision.check | 23 ++++++++++++++--- test/files/neg/case-collision.scala | 3 +++ test/files/neg/case-collision2.check | 12 --------- test/files/neg/case-collision2.flags | 1 - test/files/neg/case-collision2.scala | 12 --------- 11 files changed, 50 insertions(+), 40 deletions(-) create mode 100644 test/files/neg/case-collision-multifile.check create mode 100644 test/files/neg/case-collision-multifile.flags create mode 100644 test/files/neg/case-collision-multifile/one.scala create mode 100644 test/files/neg/case-collision-multifile/two.scala delete mode 100644 test/files/neg/case-collision2.check delete mode 100644 test/files/neg/case-collision2.flags delete mode 100644 test/files/neg/case-collision2.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 86eeecdbe736..c4f8233de09c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -3,7 +3,7 @@ package backend.jvm import java.util.concurrent.ConcurrentHashMap -import scala.reflect.internal.util.{NoPosition, Position, SourceFile, Statistics} +import scala.reflect.internal.util.{NoPosition, Position, StringContextStripMarginOps} import scala.reflect.io.AbstractFile import scala.tools.asm.ClassWriter import scala.tools.asm.tree.ClassNode @@ -33,7 +33,8 @@ abstract class PostProcessor extends PerRunInit { var classfileWriter: classfileWriters.ClassfileWriter = _ - private val caseInsensitively = recordPerRunJavaMapCache(new ConcurrentHashMap[String, String]) + // from lowercase to first-seen name and position thereof + private val caseInsensitively = recordPerRunJavaMapCache(new ConcurrentHashMap[String, (String, Position)]) def initialize(global: Global): Unit = { this.initialize() @@ -48,13 +49,13 @@ abstract class PostProcessor extends PerRunInit { val internalName = classNode.name val bytes = try { if (!clazz.isArtifact) { - warnCaseInsensitiveOverwrite(clazz) localOptimizations(classNode) backendUtils.onIndyLambdaImplMethodIfPresent(internalName) { methods => if (methods.nonEmpty) backendUtils.addLambdaDeserialize(classNode, methods) } } + warnCaseInsensitiveOverwrite(clazz) setInnerClasses(classNode) serializeClass(classNode) } catch { @@ -75,18 +76,24 @@ abstract class PostProcessor extends PerRunInit { classfileWriter.write(internalName, bytes, paths) } } + private def warnCaseInsensitiveOverwrite(clazz: GeneratedClass): Unit = { val name = clazz.classNode.name val lowercaseJavaClassName = name.toLowerCase - val sourceClassName = clazz.sourceClassName - val duplicate = caseInsensitively.putIfAbsent(lowercaseJavaClassName, sourceClassName) - if (duplicate != null) { + val overwrites = caseInsensitively.putIfAbsent(lowercaseJavaClassName, (name, clazz.position)) + if (overwrites ne null) { + val (dupName, dupPos) = overwrites + val locationAddendum = + if (dupPos.source.path != clazz.position.source.path) + s" (defined in ${dupPos.source.file.name})" + else "" + def nicify(name: String): String = name.replace('/', '.') backendReporting.warning( clazz.position, - s"Class ${sourceClassName} differs only in case from ${duplicate}. " + - "Such classes will overwrite one another on case-insensitive filesystems." - ) + sm"""Generated class ${nicify(name)} differs only in case from ${nicify(dupName)}$locationAddendum. + | Such classes will overwrite one another on case-insensitive filesystems.""" + ) } } diff --git a/test/files/jvm/typerep.scala b/test/files/jvm/typerep.scala index 4f900d98d763..b6862bb116d2 100644 --- a/test/files/jvm/typerep.scala +++ b/test/files/jvm/typerep.scala @@ -117,11 +117,11 @@ class Foo { } -object foo extends Foo +object Foo extends Foo package pkg1 { class C1 - object c1 extends C1 + object C1 extends C1 } object testClasses { diff --git a/test/files/neg/case-collision-multifile.check b/test/files/neg/case-collision-multifile.check new file mode 100644 index 000000000000..f8970cd754f0 --- /dev/null +++ b/test/files/neg/case-collision-multifile.check @@ -0,0 +1,7 @@ +two.scala:1: warning: Generated class hotDog differs only in case from HotDog (defined in one.scala). + Such classes will overwrite one another on case-insensitive filesystems. +class hotDog + ^ +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/case-collision-multifile.flags b/test/files/neg/case-collision-multifile.flags new file mode 100644 index 000000000000..e8fb65d50c20 --- /dev/null +++ b/test/files/neg/case-collision-multifile.flags @@ -0,0 +1 @@ +-Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/case-collision-multifile/one.scala b/test/files/neg/case-collision-multifile/one.scala new file mode 100644 index 000000000000..7c9cb4fec892 --- /dev/null +++ b/test/files/neg/case-collision-multifile/one.scala @@ -0,0 +1 @@ +class HotDog \ No newline at end of file diff --git a/test/files/neg/case-collision-multifile/two.scala b/test/files/neg/case-collision-multifile/two.scala new file mode 100644 index 000000000000..61616181f0c8 --- /dev/null +++ b/test/files/neg/case-collision-multifile/two.scala @@ -0,0 +1 @@ +class hotDog \ No newline at end of file diff --git a/test/files/neg/case-collision.check b/test/files/neg/case-collision.check index 7360833a7da3..e5ce041d073d 100644 --- a/test/files/neg/case-collision.check +++ b/test/files/neg/case-collision.check @@ -1,12 +1,27 @@ -case-collision.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems. +case-collision.scala:5: warning: Generated class foo.BIPPY differs only in case from foo.Bippy. + Such classes will overwrite one another on case-insensitive filesystems. class BIPPY ^ -case-collision.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems. +case-collision.scala:8: warning: Generated class foo.DINGO$ differs only in case from foo.Dingo$. + Such classes will overwrite one another on case-insensitive filesystems. object DINGO ^ -case-collision.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems. +case-collision.scala:8: warning: Generated class foo.DINGO differs only in case from foo.Dingo. + Such classes will overwrite one another on case-insensitive filesystems. +object DINGO + ^ +case-collision.scala:11: warning: Generated class foo.HyRaX$ differs only in case from foo.Hyrax$. + Such classes will overwrite one another on case-insensitive filesystems. object HyRaX ^ +case-collision.scala:11: warning: Generated class foo.HyRaX differs only in case from foo.Hyrax. + Such classes will overwrite one another on case-insensitive filesystems. +object HyRaX + ^ +case-collision.scala:14: warning: Generated class foo.wackO differs only in case from foo.Wacko. + Such classes will overwrite one another on case-insensitive filesystems. +object wackO + ^ error: No warnings can be incurred under -Xfatal-warnings. -three warnings found +6 warnings found one error found diff --git a/test/files/neg/case-collision.scala b/test/files/neg/case-collision.scala index 241169a77aea..bbfe469bf372 100644 --- a/test/files/neg/case-collision.scala +++ b/test/files/neg/case-collision.scala @@ -9,3 +9,6 @@ object DINGO case class Hyrax() object HyRaX + +class Wacko +object wackO \ No newline at end of file diff --git a/test/files/neg/case-collision2.check b/test/files/neg/case-collision2.check deleted file mode 100644 index b8481f46bb16..000000000000 --- a/test/files/neg/case-collision2.check +++ /dev/null @@ -1,12 +0,0 @@ -case-collision2.scala:5: warning: Class foo.BIPPY differs only in case from foo.Bippy. Such classes will overwrite one another on case-insensitive filesystems. -class BIPPY - ^ -case-collision2.scala:8: warning: Class foo.DINGO$ differs only in case from foo.Dingo$. Such classes will overwrite one another on case-insensitive filesystems. -object DINGO - ^ -case-collision2.scala:11: warning: Class foo.HyRaX$ differs only in case from foo.Hyrax$. Such classes will overwrite one another on case-insensitive filesystems. -object HyRaX - ^ -error: No warnings can be incurred under -Xfatal-warnings. -three warnings found -one error found diff --git a/test/files/neg/case-collision2.flags b/test/files/neg/case-collision2.flags deleted file mode 100644 index 85d8eb2ba295..000000000000 --- a/test/files/neg/case-collision2.flags +++ /dev/null @@ -1 +0,0 @@ --Xfatal-warnings diff --git a/test/files/neg/case-collision2.scala b/test/files/neg/case-collision2.scala deleted file mode 100644 index 924e33005a3f..000000000000 --- a/test/files/neg/case-collision2.scala +++ /dev/null @@ -1,12 +0,0 @@ -package foo - -class Bippy - -class BIPPY - -object Dingo -object DINGO - -case class Hyrax() -object HyRaX - From 29fabf0af7e28b5858f54ebd8aaf0691b1c12502 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=93lafur=20P=C3=A1ll=20Geirsson?= Date: Fri, 8 Dec 2017 09:27:29 +0100 Subject: [PATCH 1226/2793] Propagate fatal errors during macro expansion. Fixes #10552. Previously, fatal errors got swallowed during macro expansion. In the case of implicit blackbox macros, a fatal error (for example OutOfMemoryException) got reported with the message "exception during macro expansion". For implicit whitebox macros the fatal error is not even reported unless -Xlog-implicits is enabled. By default, the user only sees a cryptic "implicit not found" error message. See #10649. This commit changes the error handling of exceptions during macro expansion to propagate fatal errors. Now fatal errors are left uncaught and crash compilation with a full stack trace instead of getting swallowed. ``` error: java.lang.OutOfMemoryError at Macros$BlackBox$.materializeImpl(so.scala:8) ``` This change caused the sip-19-macro-revised partest to fail since it previously relied on triggering a stack overflow to fail expansion if another implicit SourceContext was in scope. Instead, the test now guards against infinite recursion itself. --- .../scala/tools/nsc/typechecker/Macros.scala | 4 +++- .../run/macro-sip19-revised/Impls_Macros_1.scala | 6 +++++- test/files/run/t10552/Macros_1.scala | 7 +++++++ test/files/run/t10552/Test_2.scala | 14 ++++++++++++++ 4 files changed, 29 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t10552/Macros_1.scala create mode 100644 test/files/run/t10552/Test_2.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 637864c92c85..e72f0f0f6ed8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -13,6 +13,7 @@ import scala.reflect.internal.util.ListOfNil import scala.reflect.macros.runtime.{AbortMacroException, MacroRuntimes} import scala.reflect.macros.compiler.DefaultMacroCompiler import scala.tools.reflect.FastTrack +import scala.util.control.NonFatal import Fingerprint._ /** @@ -815,7 +816,8 @@ trait Macros extends MacroRuntimes with Traces with Helpers { case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex) case ex: ControlThrowable => throw ex case ex: TypeError => MacroGeneratedTypeError(expandee, ex) - case _ => MacroGeneratedException(expandee, realex) + case NonFatal(_) => MacroGeneratedException(expandee, realex) + case fatal => throw fatal } } finally { expandee.removeAttachment[MacroRuntimeAttachment] diff --git a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala index ded4d85cfc63..0d8af43f3a7e 100644 --- a/test/files/run/macro-sip19-revised/Impls_Macros_1.scala +++ b/test/files/run/macro-sip19-revised/Impls_Macros_1.scala @@ -4,6 +4,10 @@ object Macros { def impl(c: Context) = { import c.universe._ + val thisMacro = c.macroApplication.symbol + val depth = c.enclosingMacros.count(_.macroApplication.symbol == thisMacro) + if (depth > 1) c.abort(c.enclosingPosition, "") // avoid StackOverflow + val inscope = c.inferImplicitValue(c.mirror.staticClass("SourceLocation").toType) val outer = c.Expr[SourceLocation](if (!inscope.isEmpty) inscope else Literal(Constant(null))) @@ -32,4 +36,4 @@ trait SourceLocation { val charOffset: Int } -case class SourceLocation1(val outer: SourceLocation, val fileName: String, val line: Int, val charOffset: Int) extends SourceLocation \ No newline at end of file +case class SourceLocation1(val outer: SourceLocation, val fileName: String, val line: Int, val charOffset: Int) extends SourceLocation diff --git a/test/files/run/t10552/Macros_1.scala b/test/files/run/t10552/Macros_1.scala new file mode 100644 index 000000000000..0e9b0ad1dec6 --- /dev/null +++ b/test/files/run/t10552/Macros_1.scala @@ -0,0 +1,7 @@ +import scala.language.experimental.macros +import scala.reflect.macros.whitebox +object A { + def f: Unit = macro f_impl + implicit def f_impl(c: whitebox.Context): c.Expr[Unit] = + throw new OutOfMemoryError("OOM") with scala.util.control.NoStackTrace +} diff --git a/test/files/run/t10552/Test_2.scala b/test/files/run/t10552/Test_2.scala new file mode 100644 index 000000000000..ddd8ab01efd8 --- /dev/null +++ b/test/files/run/t10552/Test_2.scala @@ -0,0 +1,14 @@ +import scala.tools.partest._ + +object Test extends DirectTest { + override def extraSettings: String = "-usejavacp -Ystop-after:typer" + + def code = "class C { A.f }" + + def show(): Unit = try { + compile() + throw new Error("Expected OutOfMemoryError") + } catch { + case e: OutOfMemoryError if e.getMessage == "OOM" => + } +} From 1bfd374bc32ef2ed4560668c519db01ccaef94e7 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 5 Dec 2017 12:07:30 -0800 Subject: [PATCH 1227/2793] Narrow scope of sensibility check for equals I have my doubts whether it's correct, but at least it's less ambitious in its erring. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 7 ++++++- test/files/pos/t10644.flags | 1 + test/files/pos/t10644/Objs_1.scala | 8 ++++++++ test/files/pos/t10644/Test_2.scala | 6 ++++++ 4 files changed, 21 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t10644.flags create mode 100644 test/files/pos/t10644/Objs_1.scala create mode 100644 test/files/pos/t10644/Test_2.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 911432d07ec6..08f6f47bf666 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1084,7 +1084,12 @@ abstract class RefChecks extends Transform { nonSensiblyNew() else if (isNew(other) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y nonSensiblyNew() - else if (receiver.isEffectivelyFinal && !(receiver isSubClass actual) && !actual.isRefinementClass) { // object X, Y; X == Y + else if (!(receiver.isRefinementClass || actual.isRefinementClass) && + // Rule out receiver of refinement class because checking receiver.isEffectivelyFinal does not work for them. + // (the owner of the refinement depends on where the refinement was inferred, which has no bearing on the finality of the intersected classes) + // TODO: should we try to decide finality for refinements? + // TODO: Also, is subclassing really the right relationship to detect non-sensible equals between "effectively final" types?? + receiver.isEffectivelyFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y if (isEitherNullable) nonSensible("non-null ", false) else diff --git a/test/files/pos/t10644.flags b/test/files/pos/t10644.flags new file mode 100644 index 000000000000..e8fb65d50c20 --- /dev/null +++ b/test/files/pos/t10644.flags @@ -0,0 +1 @@ +-Xfatal-warnings \ No newline at end of file diff --git a/test/files/pos/t10644/Objs_1.scala b/test/files/pos/t10644/Objs_1.scala new file mode 100644 index 000000000000..18c3bdb375b7 --- /dev/null +++ b/test/files/pos/t10644/Objs_1.scala @@ -0,0 +1,8 @@ +case object A ; case object B +object C { +// inferred refinement type `Product with Serializable` of val `objs` has owner `C` +// (and thus the receiver of the equality check was seen as effectivelyFinal, +// which then boosted our confidence in being able to say something about how +// final types compare for equality...) + val objs = Seq(A, B) +} diff --git a/test/files/pos/t10644/Test_2.scala b/test/files/pos/t10644/Test_2.scala new file mode 100644 index 000000000000..185cb83b66b2 --- /dev/null +++ b/test/files/pos/t10644/Test_2.scala @@ -0,0 +1,6 @@ +object Test { + // Should not result in the spurious warning: + // comparing non-null values of types Product with Serializable + // and A.type using `==' will always yield false + assert(C.objs.head == A) +} \ No newline at end of file From 14caff65eefce78105af8fc3e87ba855c77026a6 Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Wed, 8 Nov 2017 11:01:37 +0100 Subject: [PATCH 1228/2793] Fixes #10587 by removing adaptations to accommodate for t - case r => new ExecutionContextImpl.AdaptedForkJoinTask(r) - } - Thread.currentThread match { - case fjw: ForkJoinWorkerThread if fjw.getPool eq this => fjt.fork() - case _ => super.execute(fjt) - } - } - } - } - - final class AdaptedForkJoinTask(runnable: Runnable) extends ForkJoinTask[Unit] { - final override def setRawResult(u: Unit): Unit = () - final override def getRawResult(): Unit = () - final override def exec(): Boolean = try { runnable.run(); true } catch { - case anything: Throwable => - val t = Thread.currentThread - t.getUncaughtExceptionHandler match { - case null => - case some => some.uncaughtException(t, anything) - } - throw anything - } + new ForkJoinPool(desiredParallelism, threadFactory, uncaughtExceptionHandler, true) } def fromExecutor(e: Executor, reporter: Throwable => Unit = ExecutionContext.defaultReporter): ExecutionContextImpl = diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index 7197c1d85394..e18273972ac9 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -2,17 +2,17 @@ import scala.concurrent.{ Future, Promise, TimeoutException, - SyncVar, ExecutionException, ExecutionContext, CanAwait, - Await + Await, + blocking } -import scala.concurrent.blocking import scala.util.{ Try, Success, Failure } import scala.concurrent.duration.Duration import scala.reflect.{ classTag, ClassTag } import scala.tools.partest.TestUtil.intercept +import scala.annotation.tailrec trait TestBase { trait Done { def apply(proof: => Boolean): Unit } @@ -22,7 +22,7 @@ trait TestBase { body(new Done { def apply(proof: => Boolean): Unit = q offer Try(proof) }) - assert(q.poll(2000, TimeUnit.MILLISECONDS).get) + assert(Option(q.poll(2000, TimeUnit.MILLISECONDS)).map(_.get).getOrElse(false)) // Check that we don't get more than one completion assert(q.poll(50, TimeUnit.MILLISECONDS) eq null) } @@ -737,6 +737,8 @@ trait Exceptions extends TestBase { } trait GlobalExecutionContext extends TestBase { + import ExecutionContext.Implicits._ + def testNameOfGlobalECThreads(): Unit = once { done => Future({ val expectedName = "scala-execution-context-global-"+ Thread.currentThread.getId @@ -860,6 +862,39 @@ trait CustomExecutionContext extends TestBase { assert(count >= 1) } + def testUncaughtExceptionReporting(): Unit = once { + done => + import java.util.concurrent.TimeUnit.SECONDS + val example = new InterruptedException() + val latch = new java.util.concurrent.CountDownLatch(1) + @volatile var thread: Thread = null + @volatile var reported: Throwable = null + val ec = ExecutionContext.fromExecutorService(null, t => { + reported = t + latch.countDown() + }) + + @tailrec def waitForThreadDeath(turns: Int): Boolean = + if (turns <= 0) false + else if ((thread ne null) && thread.isAlive == false) true + else { + Thread.sleep(10) + waitForThreadDeath(turns - 1) + } + + try { + ec.execute(() => { + thread = Thread.currentThread + throw example + }) + latch.await(2, SECONDS) + done(waitForThreadDeath(turns = 100) && (reported eq example)) + } finally { + ec.shutdown() + } + } + + testUncaughtExceptionReporting() testOnSuccessCustomEC() testKeptPromiseCustomEC() testCallbackChainCustomEC() From 1df3796485b4c72affa6eb1c185ec94ed1603798 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 4 Dec 2017 19:01:36 -0500 Subject: [PATCH 1229/2793] Compare positions in tryTypedApply by focus. Synthetic trees usually get offset positions, even with range positions enabled. The comparison previously used by `errorInResult` did not consider the error issued by `AdaptTypeError` on an `ApplyToImplicitArgs` to be part of the result expression, meaning that an implicit view wouldn't be sought on a second try. Fixes scala/bug#10643. --- .../scala/tools/nsc/typechecker/Typers.scala | 8 ++++++- .../reflect/internal/util/Position.scala | 6 +++++ test/files/pos/t10643.flags | 1 + test/files/pos/t10643.scala | 23 +++++++++++++++++++ 4 files changed, 37 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t10643.flags create mode 100644 test/files/pos/t10643.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index fbdb2e122ed5..c7f70f722697 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4637,7 +4637,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case TypeApply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult) case _ => Nil }) - def errorInResult(tree: Tree) = treesInResult(tree) exists (err => typeErrors.exists(_.errPos == err.pos)) + /* Only retry if the error hails from a result expression of `tree` + * (for instance, it makes no sense to retry on an error from a block statement) + * compare with `samePointAs` since many synthetic trees are made with + * offset positions even under -Yrangepos. + */ + def errorInResult(tree: Tree) = + treesInResult(tree).exists(err => typeErrors.exists(_.errPos samePointAs err.pos)) val retry = (typeErrors.forall(_.errPos != null)) && (fun :: tree :: args exists errorInResult) typingStack.printTyping({ diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index 0db91144c9ef..05577cba9b38 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -94,6 +94,8 @@ sealed abstract class UndefinedPosition extends Position { override def start = fail("start") override def point = fail("point") override def end = fail("end") + + override def samePointAs(that: Position) = false } private[util] trait InternalPositionImpl { @@ -200,6 +202,10 @@ private[util] trait InternalPositionImpl { else "[NoPosition]" ) + /* Same as `this.focus == that.focus`, but less allocation-y. */ + def samePointAs(that: Position): Boolean = + that.isDefined && this.point == that.point && this.source.file == that.source.file + private def asOffset(point: Int): Position = Position.offset(source, point) private def copyRange(source: SourceFile = source, start: Int = start, point: Int = point, end: Int = end): Position = Position.range(source, start, point, end) diff --git a/test/files/pos/t10643.flags b/test/files/pos/t10643.flags new file mode 100644 index 000000000000..fcf951d90723 --- /dev/null +++ b/test/files/pos/t10643.flags @@ -0,0 +1 @@ +-Yrangepos \ No newline at end of file diff --git a/test/files/pos/t10643.scala b/test/files/pos/t10643.scala new file mode 100644 index 000000000000..697039dae4c4 --- /dev/null +++ b/test/files/pos/t10643.scala @@ -0,0 +1,23 @@ +trait AA +trait BB +trait Foo { + def consume(a: AA): Unit +} + +object FooOpss { + implicit class FooOps(val self: Foo) { + def consume(a: BB): Unit = ??? + } +} +import FooOpss._ + +class Test { + val theFoo: Foo = ??? + def doIt(id: Long): Unit = + theFoo.consume(BBFactory.create(id)) +} + +object BBFactory { + def create(id: Long)(implicit i: DummyImplicit): BB = ??? +} + From 72642f6274a74664bbaf51d8bd755c1902ec19b5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 18 Aug 2017 15:54:01 +1000 Subject: [PATCH 1230/2793] Intrinsify StringConcat.{s,raw} and improve string concat code gen --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 18 ++++++- .../nsc/backend/jvm/BCodeIdiomatic.scala | 5 +- .../tools/nsc/typechecker/RefChecks.scala | 54 ++++++++++++++++++- src/library/scala/StringContext.scala | 4 ++ .../scala/reflect/internal/Definitions.scala | 4 ++ .../scala/reflect/internal/StdNames.scala | 2 + .../scala/reflect/internal/TreeInfo.scala | 5 ++ .../reflect/runtime/JavaUniverseForce.scala | 1 + .../files/neg/string-context-refchecked.check | 5 ++ .../files/neg/string-context-refchecked.scala | 4 ++ .../nsc/backend/jvm/StringConcatTest.scala | 43 +++++++++++++-- 11 files changed, 136 insertions(+), 9 deletions(-) create mode 100644 test/files/neg/string-context-refchecked.check create mode 100644 test/files/neg/string-context-refchecked.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index de820b7a01f1..b3d97e9afe94 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1000,8 +1000,22 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { genCallMethod(String_valueOf, InvokeStyle.Static, arg.pos) case concatenations => - bc.genStartConcat(tree.pos) - for (elem <- concatenations) { + val approxBuilderSize = concatenations.map { + case Literal(Constant(s: String)) => s.length + case Literal(c @ Constant(value)) if c.isNonUnitAnyVal => String.valueOf(c).length + case _ => + // could add some guess based on types of primitive args. + // or, we could stringify all the args onto the stack, compute the exact size of + // the stringbuffer. + // or, just let http://openjdk.java.net/jeps/280 (or a re-implementation thereof in our 2.13.x stdlib) do all the hard work at link time + 0 + }.sum + bc.genStartConcat(tree.pos, approxBuilderSize) + def isEmptyString(t: Tree) = t match { + case Literal(Constant("")) => true + case _ => false + } + for (elem <- concatenations if !isEmptyString(elem)) { val loadedElem = elem match { case Apply(boxOp, value :: Nil) if currentRun.runDefinitions.isBox(boxOp.symbol) => // Eliminate boxing of primitive values. Boxing is introduced by erasure because diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index 33b03f4e4a45..7385011eac05 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -172,13 +172,14 @@ abstract class BCodeIdiomatic { /* * can-multi-thread */ - final def genStartConcat(pos: Position): Unit = { + final def genStartConcat(pos: Position, size: Int): Unit = { jmethod.visitTypeInsn(Opcodes.NEW, JavaStringBuilderClassName) jmethod.visitInsn(Opcodes.DUP) + jmethod.visitLdcInsn(Integer.valueOf(size)) invokespecial( JavaStringBuilderClassName, INSTANCE_CONSTRUCTOR_NAME, - "()V", + "(I)V", itf = false, pos ) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 911432d07ec6..53b099dffeca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1499,7 +1499,26 @@ abstract class RefChecks extends Transform { isIrrefutable(pat1, tpt.tpe) && (qual.tpe <:< tree.tpe)) => transform(qual) - + case StringContextIntrinsic(treated, args) => + var result: Tree = treated.head + def concat(t: Tree): Unit = { + result = atPos(t.pos)(gen.mkMethodCall(gen.mkAttributedSelect(result, definitions.String_+), t :: Nil)).setType(StringTpe) + } + val numLits = treated.length + foreachWithIndex(treated.tail) { (lit, i) => + val treatedContents = lit.asInstanceOf[Literal].value.stringValue + val emptyLit = treatedContents.isEmpty + if (i < numLits - 1) { + concat(args(i)) + if (!emptyLit) concat(lit) + } else if (!emptyLit) { + concat(lit) + } + } + result match { + case ap: Apply => transformApply(ap) + case _ => result + } case Apply(fn, args) => // sensicality should be subsumed by the unreachability/exhaustivity/irrefutability // analyses in the pattern matcher @@ -1510,6 +1529,39 @@ abstract class RefChecks extends Transform { currentApplication = tree tree } + + private object StringContextIntrinsic { + def unapply(t: Apply): Option[(List[Tree], List[Tree])] = { + val sym = t.fun.symbol + // symbol check done first for performance + val rd = currentRun.runDefinitions + if (sym == rd.StringContext_s || sym == rd.StringContext_raw) { + t match { + case Apply(fn @ Select(Apply(qual1 @ Select(qual, _), lits), _), args) + if qual1.symbol == rd.StringContext_apply && + treeInfo.isQualifierSafeToElide(qual) && + lits.forall(lit => treeInfo.isLiteralString(lit)) && + lits.length == (args.length + 1) => + val isRaw = sym == rd.StringContext_raw + if (isRaw) Some((lits, args)) + else { + try { + val treated = lits.mapConserve { lit => + val stringVal = lit.asInstanceOf[Literal].value.stringValue + treeCopy.Literal(lit, Constant(StringContext.processEscapes(stringVal))) + } + Some((treated, args)) + } catch { + case _: StringContext.InvalidEscapeException => + None + } + } + case _ => None + + } + } else None + } + } private def transformSelect(tree: Select): Tree = { val Select(qual, _) = tree val sym = tree.symbol diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala index 69533c12da34..b5e946c75acc 100644 --- a/src/library/scala/StringContext.scala +++ b/src/library/scala/StringContext.scala @@ -91,6 +91,8 @@ case class StringContext(parts: String*) { * @throws StringContext.InvalidEscapeException * if a `parts` string contains a backslash (`\`) character * that does not start a valid escape sequence. + * @note The Scala compiler may replace a call to this method with an equivalent, but more efficient, + * use of a StringBuilder. */ def s(args: Any*): String = standardInterpolator(treatEscapes, args) @@ -113,6 +115,8 @@ case class StringContext(parts: String*) { * @throws IllegalArgumentException * if the number of `parts` in the enclosing `StringContext` does not exceed * the number of arguments `arg` by exactly 1. + * @note The Scala compiler may replace a call to this method with an equivalent, but more efficient, + * use of a StringBuilder. */ def raw(args: Any*): String = standardInterpolator(identity, args) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index eeff6776b85c..20c330a56f2f 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -527,6 +527,7 @@ trait Definitions extends api.StandardDefinitions { lazy val MacroImplAnnotation = requiredClass[scala.reflect.macros.internal.macroImpl] lazy val StringContextClass = requiredClass[scala.StringContext] + lazy val StringContextModule = requiredModule[scala.StringContext.type] // scala/bug#8392 a reflection universe on classpath may not have // quasiquotes, if e.g. crosstyping with -Xsource on @@ -1451,6 +1452,9 @@ trait Definitions extends api.StandardDefinitions { def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+ lazy val StringContext_f = getMemberMethod(StringContextClass, nme.f) + lazy val StringContext_s = getMemberMethod(StringContextClass, nme.s) + lazy val StringContext_raw = getMemberMethod(StringContextClass, nme.raw_) + lazy val StringContext_apply = getMemberMethod(StringContextModule, nme.apply) lazy val ArrowAssocClass = getMemberClass(PredefModule, TypeName("ArrowAssoc")) // scala/bug#5731 def isArrowAssoc(sym: Symbol) = sym.owner == ArrowAssocClass diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index ea04230df3ec..a4bad5789371 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -752,6 +752,7 @@ trait StdNames { val productElement: NameType = "productElement" val productIterator: NameType = "productIterator" val productPrefix: NameType = "productPrefix" + val raw_ : NameType = "raw" val readResolve: NameType = "readResolve" val reify : NameType = "reify" val reificationSupport : NameType = "reificationSupport" @@ -759,6 +760,7 @@ trait StdNames { val runtime: NameType = "runtime" val runtimeClass: NameType = "runtimeClass" val runtimeMirror: NameType = "runtimeMirror" + val s: NameType = "s" val scala_ : NameType = "scala" val selectDynamic: NameType = "selectDynamic" val selectOverloadedMethod: NameType = "selectOverloadedMethod" diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 79a91020de0e..4e62da7650af 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -437,6 +437,11 @@ abstract class TreeInfo { case _ => false } + def isLiteralString(t: Tree): Boolean = t match { + case Literal(Constant(_: String)) => true + case _ => false + } + /** Does the tree have a structure similar to typechecked trees? */ private[internal] def detectTypecheckedTree(tree: Tree) = tree.hasExistingSymbol || tree.exists { diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index bc5e259678c1..2c05a14604ec 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -321,6 +321,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.WhiteboxContextClass definitions.MacroImplAnnotation definitions.StringContextClass + definitions.StringContextModule definitions.QuasiquoteClass definitions.QuasiquoteClass_api definitions.QuasiquoteClass_api_apply diff --git a/test/files/neg/string-context-refchecked.check b/test/files/neg/string-context-refchecked.check new file mode 100644 index 000000000000..6d0d8f640aa8 --- /dev/null +++ b/test/files/neg/string-context-refchecked.check @@ -0,0 +1,5 @@ +string-context-refchecked.scala:3: error: overriding method foo in class C of type => Int; + method foo cannot override final member + s"foo${class D extends C { def foo = 2 }; new D}bar" + ^ +one error found diff --git a/test/files/neg/string-context-refchecked.scala b/test/files/neg/string-context-refchecked.scala new file mode 100644 index 000000000000..2e2231949887 --- /dev/null +++ b/test/files/neg/string-context-refchecked.scala @@ -0,0 +1,4 @@ +class C { + final def foo = 1 + s"foo${class D extends C { def foo = 2 }; new D}bar" +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala index af2c8f9ce008..3eef02b99ebb 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/StringConcatTest.scala @@ -48,6 +48,36 @@ class StringConcatTest extends BytecodeTesting { | sbuf: java.lang.StringBuffer, | chsq: java.lang.CharSequence, | chrs: Array[Char]) = this + str + v + z + c + b + s + i + f + l + d + sbuf + chsq + chrs + | + | def t3( + | v: Unit, + | z: Boolean, + | c: Char, + | b: Byte, + | s: Short, + | i: Int, + | l: Long, + | f: Float, + | d: Double, + | str: String, + | sbuf: java.lang.StringBuffer, + | chsq: java.lang.CharSequence, + | chrs: Array[Char]) = s"$str$this$v$z$c$b$s$i$f$l$d$sbuf$chsq$chrs" + | def t4( + | v: Unit, + | z: Boolean, + | c: Char, + | b: Byte, + | s: Short, + | i: Int, + | l: Long, + | f: Float, + | d: Double, + | str: String, + | sbuf: java.lang.StringBuffer, + | chsq: java.lang.CharSequence, + | chrs: Array[Char]) = raw"$str$this$v$z$c$b$s$i$f$l$d$sbuf$chsq$chrs" + | |} """.stripMargin val c = compileClass(code) @@ -55,8 +85,8 @@ class StringConcatTest extends BytecodeTesting { def invokeNameDesc(m: String): List[String] = getInstructions(c, m) collect { case Invoke(_, _, name, desc, _) => name + desc } - assertEquals(invokeNameDesc("t1"), List( - "()V", + val t1Expected = List( + "(I)V", "append(Ljava/lang/String;)Ljava/lang/StringBuilder;", "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;", "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;", @@ -71,10 +101,11 @@ class StringConcatTest extends BytecodeTesting { "append(Ljava/lang/StringBuffer;)Ljava/lang/StringBuilder;", "append(Ljava/lang/CharSequence;)Ljava/lang/StringBuilder;", "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;", // test that we're not using the [C overload - "toString()Ljava/lang/String;")) + "toString()Ljava/lang/String;") + assertEquals(invokeNameDesc("t1"), t1Expected) assertEquals(invokeNameDesc("t2"), List( - "()V", + "(I)V", "any2stringadd(Ljava/lang/Object;)Ljava/lang/Object;", "$plus$extension(Ljava/lang/Object;Ljava/lang/String;)Ljava/lang/String;", "append(Ljava/lang/String;)Ljava/lang/StringBuilder;", @@ -91,6 +122,10 @@ class StringConcatTest extends BytecodeTesting { "append(Ljava/lang/CharSequence;)Ljava/lang/StringBuilder;", "append(Ljava/lang/Object;)Ljava/lang/StringBuilder;", // test that we're not using the [C overload "toString()Ljava/lang/String;")) + + // intrinsics for StringContext.{raw,s} + assertEquals(invokeNameDesc("t3"), t1Expected) + assertEquals(invokeNameDesc("t4"), t1Expected) } @Test From 32ad4a233fc927817c9bbd265cb61051f4345ce9 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 22 Feb 2018 12:18:17 -0500 Subject: [PATCH 1231/2793] Also augment assertion errors raised in reflect. And improve the augmentation. `Global` shadows `Predef`'s assertion methods to give them information about what the compiler was doing when the assertion failed. However, inside the reflect cake layer (`SymbolTable`) these methods weren't visible, so calls to `assert` in reflect went through `Predef.assert` and didn't get augmented. Moving the shadowing methods to `SymbolTable` fixes that. Also improved some things about the augmentation: - `supplementErrorMessage` is wrapped in a `try`/`catch` block to avoid erroring while reporting an error. However, there were a few places where exceptions were easy to hit: - `sym` can be `null`; don't emit symbol details in that case - `lastSeenContext` is `null` in pre-namer phases; make it `NoContext` instead. - `context_s` is wrapped in its own `try`/`catch` because the file I/O may fail. However, the tree's position's file is a `SourceFile`, so we can just look at its `content` to find the context. Add a `lines` method there to provide the lines context properly. This has the benefit of making sourcefile context work better in the repl. Thanks to retronym for the idea to use `throwAssertionError` to make the best use of inlining the assertion methods. --- src/compiler/scala/tools/nsc/Global.scala | 58 +++++++------------ .../scala/tools/reflect/ToolBoxFactory.scala | 2 +- .../scala/reflect/internal/SymbolTable.scala | 27 +++++++++ .../reflect/internal/util/SourceFile.scala | 41 +++++++++---- test/files/presentation/t7678/Runner.scala | 1 + test/files/run/t5294.scala | 4 +- test/files/run/t8029.scala | 2 +- 7 files changed, 81 insertions(+), 54 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 6e571a7348c0..7aa1e88834b5 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -261,27 +261,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) // ------------------ Debugging ------------------------------------- - // Getting in front of Predef's asserts to supplement with more info. - // This has the happy side effect of masking the one argument forms - // of assert and require (but for now I've reproduced them here, - // because there are a million to fix.) - @inline final def assert(assertion: Boolean, message: => Any) { - // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument. - if (!assertion) - throw new java.lang.AssertionError("assertion failed: "+ supplementErrorMessage("" + message)) - } - @inline final def assert(assertion: Boolean) { - assert(assertion, "") - } - @inline final def require(requirement: Boolean, message: => Any) { - // calling Predef.require would send a freshly allocated closure wrapping the one received as argument. - if (!requirement) - throw new IllegalArgumentException("requirement failed: "+ supplementErrorMessage("" + message)) - } - @inline final def require(requirement: Boolean) { - require(requirement, "") - } - @inline final def ifDebug(body: => Unit) { if (settings.debug) body @@ -966,7 +945,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) /** Let's share a lot more about why we crash all over the place. * People will be very grateful. */ - protected var lastSeenContext: analyzer.Context = null + protected var lastSeenContext: analyzer.Context = analyzer.NoContext /** The currently active run */ @@ -1015,46 +994,49 @@ class Global(var currentSettings: Settings, var reporter: Reporter) else sym.ownerChain takeWhile (!_.isPackageClass) mkString " -> " ) - private def formatExplain(pairs: (String, Any)*): String = ( - pairs collect { case (k, v) if v != null => f"$k%20s: $v" } mkString "\n" - ) /** Don't want to introduce new errors trying to report errors, * so swallow exceptions. */ override def supplementTyperState(errorMessage: String): String = try { + def formatExplain(pairs: List[(String, Any)]): String = + pairs collect { case (k, v) if v != null => f"$k%20s: $v" } mkString "\n" + val tree = analyzer.lastTreeToTyper val sym = tree.symbol val tpe = tree.tpe val site = lastSeenContext.enclClassOrMethod.owner val pos_s = if (tree.pos.isDefined) s"line ${tree.pos.line} of ${tree.pos.source.file}" else "" val context_s = try { - import scala.reflect.io.{File => SFile} // Taking 3 before, 3 after the fingered line. - val start = 1 max (tree.pos.line - 3) - val xs = SFile(tree.pos.source.file.file).lines.drop(start-1).take(7) - val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx}%6d $line" } + val start = 0 max (tree.pos.line - 4) + val xs = tree.pos.source.lines(start, start + 7) + val strs = xs.zipWithIndex map { case (line, idx) => f"${start + idx + 1}%6d $line" } strs.mkString("== Source file context for tree position ==\n\n", "\n", "") } catch { case t: Exception => devWarning("" + t) ; "" } - val info1 = formatExplain( + val info1 = formatExplain(List( "while compiling" -> currentSource.path, "during phase" -> ( if (globalPhase eq phase) phase else "globalPhase=%s, enteringPhase=%s".format(globalPhase, phase) ), "library version" -> scala.util.Properties.versionString, - "compiler version" -> Properties.versionString, + "compiler version" -> scala.tools.nsc.Properties.versionString, "reconstructed args" -> settings.recreateArgs.mkString(" ") + )) + // useful things to know if we have a sym + val symbolInfos = if (sym eq null) List("symbol" -> "null") else List( + "symbol" -> sym.debugLocationString, + "symbol definition" -> s"${sym.defString} (a ${sym.shortSymbolClass})", + "symbol package" -> sym.enclosingPackage.fullName, + "symbol owners" -> ownerChainString(sym), ) - val info2 = formatExplain( + val info2 = formatExplain(List( "last tree to typer" -> tree.summaryString, "tree position" -> pos_s, - "tree tpe" -> tpe, - "symbol" -> Option(sym).fold("null")(_.debugLocationString), - "symbol definition" -> Option(sym).fold("null")(s => s.defString + s" (a ${s.shortSymbolClass})"), - "symbol package" -> sym.enclosingPackage.fullName, - "symbol owners" -> ownerChainString(sym), + "tree tpe" -> tpe + ) ::: symbolInfos ::: List( "call site" -> (site.fullLocationString + " in " + site.enclosingPackage) - ) + )) ("\n " + errorMessage + "\n" + info1) :: info2 :: context_s :: Nil mkString "\n\n" } catch { case _: Exception | _: TypeError => errorMessage } diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 999af2cbd86b..61166f4239b8 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -60,7 +60,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => undoLog.clear() analyzer.lastTreeToTyper = EmptyTree lastSeenSourceFile = NoSourceFile - lastSeenContext = null + lastSeenContext = analyzer.NoContext } def verify(expr: Tree): Tree = { diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 76eabcfae52f..40546145ba90 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -131,6 +131,33 @@ abstract class SymbolTable extends macros.Universe result } + // Getting in front of Predef's asserts to supplement with more info; see `supplementErrorMessage`. + // This has the happy side effect of masking the one argument form of assert + // (but for now it's reproduced here, because there are a million uses to fix). + @inline + final def assert(assertion: Boolean, message: => Any): Unit = { + // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument. + if (!assertion) throwAssertionError(message) + } + + // for those of us who use IDEs, this will now at least show up struck-through + @deprecated("prefer to use the two-argument form", since = "2.12.5") + final def assert(assertion: Boolean): Unit = { + assert(assertion, "") + } + + @inline + final def require(requirement: Boolean, message: => Any): Unit = { + // calling Predef.require would send a freshly allocated closure wrapping the one received as argument. + if (!requirement) throwRequirementError(message) + } + + // extracted from `assert`/`require` to make them as small (and inlineable) as possible + private[internal] def throwAssertionError(msg: Any): Nothing = + throw new java.lang.AssertionError(s"assertion failed: ${supplementErrorMessage(String valueOf msg)}") + private[internal] def throwRequirementError(msg: Any): Nothing = + throw new java.lang.IllegalArgumentException(s"requirement failed: ${supplementErrorMessage(String valueOf msg)}") + @inline final def findSymbol(xs: TraversableOnce[Symbol])(p: Symbol => Boolean): Symbol = { xs find p getOrElse NoSymbol } diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index 64b697229801..18deb7d139ab 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -1,9 +1,8 @@ /* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL + * Copyright 2005-2018 LAMP/EPFL * @author Martin Odersky */ - package scala package reflect.internal.util @@ -22,6 +21,7 @@ abstract class SourceFile { def isEndOfLine(idx: Int): Boolean def isSelfContained: Boolean def length : Int + def lineCount: Int def position(offset: Int): Position = { assert(offset < length, file + ": " + offset + " >= " + length) Position.offset(this, offset) @@ -49,20 +49,28 @@ abstract class SourceFile { if (content(offset).isWhitespace) skipWhitespace(offset + 1) else offset def identifier(pos: Position): Option[String] = None + + /** An iterator over the lines between `start` and `end`. + * + * Bounds are checked and clipped as necessary. + */ + def lines(start: Int = 0, end: Int = lineCount): Iterator[String] } /** An object representing a missing source file. */ object NoSourceFile extends SourceFile { - def content = Array() - def file = NoFile - def isLineBreak(idx: Int) = false - def isEndOfLine(idx: Int) = false - def isSelfContained = true - def length = -1 - def offsetToLine(offset: Int) = -1 - def lineToOffset(index : Int) = -1 - override def toString = "" + def content = Array() + def file = NoFile + def isLineBreak(idx: Int) = false + def isEndOfLine(idx: Int) = false + def isSelfContained = true + def length = -1 + def lineCount = 0 + def offsetToLine(offset: Int) = -1 + def lineToOffset(index : Int) = -1 + def lines(start: Int, end: Int) = Iterator.empty + override def toString = "" } object NoFile extends VirtualFile("", "") @@ -122,7 +130,8 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So content0 :+ '\n' else content0 ) - val length = content.length + def length = content.length + def lineCount = lineIndices.length - 1 def start = 0 def isSelfContained = true @@ -187,6 +196,14 @@ class BatchSourceFile(val file : AbstractFile, content0: Array[Char]) extends So lastLine } + override def lines(start: Int, end: Int): Iterator[String] = + ((start max 0) until (end min lineCount)).iterator.map { ix => + val off = lineIndices(ix) + val len = 0 max (lineIndices(ix + 1) - off - 1) // drop newline character + String.valueOf(content, off, len) + } + + override def equals(that : Any) = that match { case that : BatchSourceFile => file.path == that.file.path && start == that.start case _ => false diff --git a/test/files/presentation/t7678/Runner.scala b/test/files/presentation/t7678/Runner.scala index c6736a65b020..42001813c00c 100644 --- a/test/files/presentation/t7678/Runner.scala +++ b/test/files/presentation/t7678/Runner.scala @@ -18,6 +18,7 @@ object Test extends InteractiveTest { () => { val runDefinitions = currentRun.runDefinitions import runDefinitions._ + import Predef._ assert(TypeTagsClass.map(sym => getMemberClass(sym, tpnme.TypeTag)) == TypeTagClass) assert(TypeTagsClass.map(sym => getMemberClass(sym, tpnme.WeakTypeTag)) == WeakTypeTagClass) assert(TypeTagsClass.map(sym => getMemberModule(sym, nme.WeakTypeTag)) == WeakTypeTagModule) diff --git a/test/files/run/t5294.scala b/test/files/run/t5294.scala index 2551ae89a635..d3309fe116e5 100644 --- a/test/files/run/t5294.scala +++ b/test/files/run/t5294.scala @@ -16,7 +16,7 @@ object Test { val TClass = reflect.runtime.universe.symbolOf[p.T[_, _]].asInstanceOf[symtab.Symbol] import symtab._ val from = CTpe.member(TermName("test")).paramss.head.head - assert(from.baseClasses contains TClass) - assert(from.info.baseTypeIndex(TClass) != -1) // was failing! + assert(from.baseClasses contains TClass, from.baseClasses) + assert(from.info.baseTypeIndex(TClass) != -1, from.info.baseTypeSeq) // was failing! } } diff --git a/test/files/run/t8029.scala b/test/files/run/t8029.scala index dbd5c41387d7..47882bf7f8c1 100644 --- a/test/files/run/t8029.scala +++ b/test/files/run/t8029.scala @@ -43,7 +43,7 @@ package object p4 { val sourceFile = newSources(code).head global.reporter.reset() r.compileSources(sourceFile :: Nil) - assert(!global.reporter.hasErrors) + assert(!global.reporter.hasErrors, global.reporter.errorCount) } def typecheckTwice(code: String): Unit = { From c5fb888d6c0045f892d1a1ca7e40b5c8f8c7c226 Mon Sep 17 00:00:00 2001 From: howtonotwin Date: Sun, 25 Feb 2018 12:50:04 -0500 Subject: [PATCH 1232/2793] Fix typo --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 604f80ecd5d5..ab3b6a23fa52 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4183,7 +4183,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val op = if(args.exists(_.isInstanceOf[AssignOrNamedArg])) nme.applyDynamicNamed else nme.applyDynamic // not supported: foo.bar(a1,..., an: _*) val fn1 = if(treeInfo.isWildcardStarArgList(args)) DynamicVarArgUnsupported(fn, op) else fn - Some((op, fn)) + Some((op, fn1)) case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs)) case _ if matches(t) => Some((nme.selectDynamic, t)) case _ => t.children.flatMap(findSelection).headOption From 0ee887573512ecd6411ac656ca03fc43696fa710 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Wed, 29 Nov 2017 12:36:48 +0100 Subject: [PATCH 1233/2793] don't evaluate the head of a traversable twice don't call head before looping over the traversable fixes scala/bug#10631 --- src/library/scala/collection/TraversableLike.scala | 9 ++++++--- test/files/run/view-headoption.check | 2 -- .../scala/collection/TraversableLikeTest.scala | 13 +++++++++++++ 3 files changed, 19 insertions(+), 5 deletions(-) diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index bf6c9401374d..43aa0f7562fe 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -428,10 +428,13 @@ trait TraversableLike[+A, +Repr] extends Any * @throws NoSuchElementException If the $coll is empty. */ def last: A = { - var lst = head - for (x <- this) + var lst: A = null.asInstanceOf[A] + var hasElements = false + for (x <- this){ + hasElements = true lst = x - lst + } + if (hasElements) lst else throw new NoSuchElementException("last of empty traversable") } /** Optionally selects the last element. diff --git a/test/files/run/view-headoption.check b/test/files/run/view-headoption.check index 5c98b54b46cb..10e02753507f 100644 --- a/test/files/run/view-headoption.check +++ b/test/files/run/view-headoption.check @@ -17,8 +17,6 @@ f3: Some(5) fail success fail -success -fail fail success fail diff --git a/test/junit/scala/collection/TraversableLikeTest.scala b/test/junit/scala/collection/TraversableLikeTest.scala index f703abf3e47c..ba44e4a7993b 100644 --- a/test/junit/scala/collection/TraversableLikeTest.scala +++ b/test/junit/scala/collection/TraversableLikeTest.scala @@ -66,4 +66,17 @@ class TraversableLikeTest { val frenchLowercase = Foo.mkFrenchLowercase() assertEquals("étrangeNomDeClasseMinuscules", frenchLowercase.stringPrefix) } + + @Test + def test_SI10631 { + val baselist = List(1, 2) + var checklist = List.empty[Int] + val lst = baselist.view.map { x => + checklist = x :: checklist + x + } + + assertEquals(2, lst.last) + assertEquals(baselist.reverse, checklist) + } } From d4852dc6f6c0dd9a7f970059c84958325ea5adb7 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Mon, 26 Feb 2018 10:14:59 +0100 Subject: [PATCH 1234/2793] move safe last impl to TraversableViewLike also align with lastOption impl in terms of empty/hasElements --- src/library/scala/collection/TraversableLike.scala | 9 +++------ .../scala/collection/TraversableViewLike.scala | 12 ++++++++++++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 43aa0f7562fe..bf6c9401374d 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -428,13 +428,10 @@ trait TraversableLike[+A, +Repr] extends Any * @throws NoSuchElementException If the $coll is empty. */ def last: A = { - var lst: A = null.asInstanceOf[A] - var hasElements = false - for (x <- this){ - hasElements = true + var lst = head + for (x <- this) lst = x - } - if (hasElements) lst else throw new NoSuchElementException("last of empty traversable") + lst } /** Optionally selects the last element. diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index 0901d749c33c..5bc117ecdf40 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -112,6 +112,18 @@ trait TraversableViewLike[+A, None } + + override def last: B = { + // (Should be) better than allocating a Some for every element. + var empty = true + var result: B = null.asInstanceOf[B] + for (x <- this) { + empty = false + result = x + } + if (empty) throw new NoSuchElementException("last of empty traversable") else result + } + override def lastOption: Option[B] = { // (Should be) better than allocating a Some for every element. var empty = true From 289f3bbc2ccf5c4c6c2608bd010718f99b90de63 Mon Sep 17 00:00:00 2001 From: Alex Levenson Date: Mon, 18 Dec 2017 17:33:50 -0800 Subject: [PATCH 1235/2793] Make MapWrapper.Entry's hashCode conform to the contract in java.util.Map.Entry's documentation --- .../scala/collection/convert/Wrappers.scala | 13 ++++++- test/files/run/t5880.scala | 38 ------------------- .../collection/convert/MapWrapperTest.scala | 24 +++++++++++- 3 files changed, 34 insertions(+), 41 deletions(-) delete mode 100644 test/files/run/t5880.scala diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index 9f7e3e8174ae..e580d0f7c879 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -188,11 +188,20 @@ private[collection] trait Wrappers { val (k, v) = ui.next() prev = Some(k) new ju.Map.Entry[A, B] { - import scala.util.hashing.byteswap32 def getKey = k def getValue = v def setValue(v1 : B) = self.put(k, v1) - override def hashCode = byteswap32(k.##) + (byteswap32(v.##) << 16) + + + // It's important that this implementation conform to the contract + // specified in the javadocs of java.util.Map.Entry.hashCode + // + // See https://github.com/scala/bug/issues/10663 + override def hashCode = { + (if (k == null) 0 else k.hashCode()) ^ + (if (v == null) 0 else v.hashCode()) + } + override def equals(other: Any) = other match { case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue case _ => false diff --git a/test/files/run/t5880.scala b/test/files/run/t5880.scala deleted file mode 100644 index 284ba03ff643..000000000000 --- a/test/files/run/t5880.scala +++ /dev/null @@ -1,38 +0,0 @@ - -import scala.collection.convert.ImplicitConversionsToJava._ - -object Test { - - def main(args:Array[String]) = { - val tests = 5000 - val jm: java.util.Map[Int, Int] = scala.collection.mutable.Map((0 until tests) zip (0 until tests).reverse: _*) - val es = jm.entrySet() - val it = es.iterator - - // chi square test - val groups = 10 - val hits = new Array[Int](groups) - def hit(hc: Int) { - val bucket = math.abs(hc) / (Int.MaxValue / groups) - hits(bucket) += 1 - } - def expected = tests / groups - def Dstat = { - val diffs = for (i <- 0 until groups) yield math.abs(hits(i) - expected) - diffs.sum.toDouble / expected - } - def ChiSquare = { - val diffs = for (i <- 0 until groups) yield (hits(i) - expected) * (hits(i) - expected) - diffs.sum.toDouble / expected - } - - while (it.hasNext) { - val x = it.next() - hit(x.##) - } - // println(hits.toBuffer) - // println(ChiSquare) - assert(ChiSquare < 4.0, ChiSquare + " -> " + hits.mkString(", ")) - } - -} diff --git a/test/junit/scala/collection/convert/MapWrapperTest.scala b/test/junit/scala/collection/convert/MapWrapperTest.scala index c86b582e0e97..4a7171c223b7 100644 --- a/test/junit/scala/collection/convert/MapWrapperTest.scala +++ b/test/junit/scala/collection/convert/MapWrapperTest.scala @@ -4,6 +4,7 @@ import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 +import java.util @RunWith(classOf[JUnit4]) class MapWrapperTest { @@ -49,11 +50,32 @@ class MapWrapperTest { // test for scala/bug#8504 @Test - def testHashCode() { + def testHashCodeNulls() { import scala.collection.JavaConverters._ val javaMap = Map(1 -> null).asJava // Before the fix for scala/bug#8504, this throws a NPE javaMap.hashCode } + + // regression test for https://github.com/scala/bug/issues/10663 + @Test + def testHashCodeEqualsMatchesJavaMap() { + import scala.collection.JavaConverters._ + val jmap = new util.HashMap[String, String]() + jmap.put("scala", "rocks") + jmap.put("java interop is fun!", "ya!") + jmap.put("Ĺởồҝ ïŧ\\'ş ūŋǐčōđẹ", "whyyyy") + jmap.put("nulls nooo", null) + jmap.put(null, "null keys are you serious??") + + // manually convert to scala map + val scalaMap = jmap.entrySet().iterator().asScala.map { e => e.getKey -> e.getValue}.toMap + + val mapWrapper = scalaMap.asJava + + assertEquals(jmap.hashCode(), mapWrapper.hashCode()) + assertTrue(jmap == mapWrapper) + assertTrue(mapWrapper == jmap) + } } From 34f51526b05e53f75b3a2fce81b6e3fafc323dc5 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Wed, 28 Feb 2018 13:09:45 +0100 Subject: [PATCH 1236/2793] Remove .desired.sha1 files The ones for dependencies that we still use for partest are hardcoded at the top of build.sbt like all other dependencies. --- build.sbt | 25 +++++++++++-------- lib/ant/ant-contrib.jar.desired.sha1 | 1 - lib/ant/ant-dotnet-1.0.jar.desired.sha1 | 1 - lib/ant/ant.jar.desired.sha1 | 1 - .../maven-ant-tasks-2.1.1.jar.desired.sha1 | 1 - lib/ant/vizant.jar.desired.sha1 | 1 - project/VersionUtil.scala | 9 +++---- test/files/codelib/code.jar.desired.sha1 | 1 - test/files/lib/annotations.jar.desired.sha1 | 1 - test/files/lib/enums.jar.desired.sha1 | 1 - test/files/lib/genericNest.jar.desired.sha1 | 1 - test/files/lib/jsoup-1.3.1.jar.desired.sha1 | 1 - test/files/lib/macro210.jar.desired.sha1 | 1 - test/files/lib/methvsfield.jar.desired.sha1 | 1 - test/files/lib/nest.jar.desired.sha1 | 1 - .../speclib/instrumented.jar.desired.sha1 | 1 - 16 files changed, 19 insertions(+), 29 deletions(-) delete mode 100644 lib/ant/ant-contrib.jar.desired.sha1 delete mode 100644 lib/ant/ant-dotnet-1.0.jar.desired.sha1 delete mode 100644 lib/ant/ant.jar.desired.sha1 delete mode 100644 lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 delete mode 100644 lib/ant/vizant.jar.desired.sha1 delete mode 100644 test/files/codelib/code.jar.desired.sha1 delete mode 100644 test/files/lib/annotations.jar.desired.sha1 delete mode 100644 test/files/lib/enums.jar.desired.sha1 delete mode 100644 test/files/lib/genericNest.jar.desired.sha1 delete mode 100644 test/files/lib/jsoup-1.3.1.jar.desired.sha1 delete mode 100644 test/files/lib/macro210.jar.desired.sha1 delete mode 100644 test/files/lib/methvsfield.jar.desired.sha1 delete mode 100644 test/files/lib/nest.jar.desired.sha1 delete mode 100644 test/files/speclib/instrumented.jar.desired.sha1 diff --git a/build.sbt b/build.sbt index 9a18d06f4ba4..0d3925c961dc 100644 --- a/build.sbt +++ b/build.sbt @@ -50,6 +50,19 @@ val asmDep = "org.scala-lang.modules" % "scala-asm" % versionPr val jlineDep = "jline" % "jline" % versionProps("jline.version") val antDep = "org.apache.ant" % "ant" % "1.9.4" +val partestDependencies = Seq( + "annotations" -> "02fe2ed93766323a13f22c7a7e2ecdcd84259b6c", + "enums" -> "981392dbd1f727b152cd1c908c5fce60ad9d07f7", + "genericNest" -> "b1ec8a095cec4902b3609d74d274c04365c59c04", + "jsoup-1.3.1" -> "346d3dff4088839d6b4d163efa2892124039d216", + "macro210" -> "3794ec22d9b27f2b179bd34e9b46db771b934ec3", + "methvsfield" -> "be8454d5e7751b063ade201c225dcedefd252775", + "nest" -> "cd33e0a0ea249eb42363a2f8ba531186345ff68c" +).map(bootstrapDep("test/files/lib")) ++ Seq( + bootstrapDep("test/files/codelib")("code" -> "e737b123d31eede5594ceda07caafed1673ec472") % "test", + bootstrapDep("test/files/speclib")("instrumented" -> "1b11ac773055c1e942c6b5eb4aabdf02292a7194") % "test" +) + /** Publish to ./dists/maven-sbt, similar to the Ant build which publishes to ./dists/maven. This * can be used to compare the output of the sbt and Ant builds during the transition period. Any * real publishing should be done with sbt's standard `publish` task. */ @@ -663,15 +676,7 @@ lazy val test = project .settings(Defaults.itSettings) .settings( libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep), - libraryDependencies ++= { - // Resolve the JARs for all test/files/lib/*.jar.desired.sha1 files through Ivy - val baseDir = (baseDirectory in ThisBuild).value - (baseDir / "test/files/lib").list.toSeq.filter(_.endsWith(".jar.desired.sha1")) - .map(f => bootstrapDep(baseDir, "test/files/lib", f.dropRight(17))) - }, - // Two hardcoded dependencies in partest, resolved in the otherwise unused scope "test": - libraryDependencies += bootstrapDep((baseDirectory in ThisBuild).value, "test/files/codelib", "code") % "test", - libraryDependencies += bootstrapDep((baseDirectory in ThisBuild).value, "test/files/speclib", "instrumented") % "test", + libraryDependencies ++= partestDependencies, // no main sources sources in Compile := Seq.empty, // test sources are compiled in partest run, not here @@ -685,7 +690,7 @@ lazy val test = project testOptions in IntegrationTest += Tests.Setup { () => val cp = (dependencyClasspath in Test).value val baseDir = (baseDirectory in ThisBuild).value - // Copy code.jar and instrumented.jar to the location where partest expects them + // Copy code.jar and instrumented.jar (resolved in the otherwise unused scope "test") to the location where partest expects them copyBootstrapJar(cp, baseDir, "test/files/codelib", "code") copyBootstrapJar(cp, baseDir, "test/files/speclib", "instrumented") }, diff --git a/lib/ant/ant-contrib.jar.desired.sha1 b/lib/ant/ant-contrib.jar.desired.sha1 deleted file mode 100644 index 65bcd122bf57..000000000000 --- a/lib/ant/ant-contrib.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -943cd5c8802b2a3a64a010efb86ec19bac142e40 *ant-contrib.jar diff --git a/lib/ant/ant-dotnet-1.0.jar.desired.sha1 b/lib/ant/ant-dotnet-1.0.jar.desired.sha1 deleted file mode 100644 index d8b6a1ca8527..000000000000 --- a/lib/ant/ant-dotnet-1.0.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -3fc1e35ca8c991fc3488548f7a276bd9053c179d *ant-dotnet-1.0.jar diff --git a/lib/ant/ant.jar.desired.sha1 b/lib/ant/ant.jar.desired.sha1 deleted file mode 100644 index bcb610d6dec2..000000000000 --- a/lib/ant/ant.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b456ca6b93900f96e58cc8371f03d90a9c1c8d1 *ant.jar diff --git a/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 b/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 deleted file mode 100644 index 53f87c3461c6..000000000000 --- a/lib/ant/maven-ant-tasks-2.1.1.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -7e50e3e227d834695f1e0bf018a7326e06ee4c86 *maven-ant-tasks-2.1.1.jar diff --git a/lib/ant/vizant.jar.desired.sha1 b/lib/ant/vizant.jar.desired.sha1 deleted file mode 100644 index 998da4643aec..000000000000 --- a/lib/ant/vizant.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c61d6e9a912b3253194d5d6d3e1db7e2545ac4b *vizant.jar diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index f4dc467fdb3a..233bfc633346 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -196,11 +196,9 @@ object VersionUtil { "org.scala-lang.scala-sha-bootstrap." + path.replace('/', '.') /** Build a dependency to a JAR file in the bootstrap repository */ - def bootstrapDep(baseDir: File, path: String, libName: String): ModuleID = { - val sha = IO.read(baseDir / path / s"$libName.jar.desired.sha1").split(' ')(0) - bootstrapOrganization(path) % libName % sha from - s"https://repo.lightbend.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap/$sha/$path/$libName.jar" - } + def bootstrapDep(path: String)(libNameAndSha: (String, String)): ModuleID = + bootstrapOrganization(path) % libNameAndSha._1 % libNameAndSha._2 from + s"https://repo.lightbend.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap/${libNameAndSha._2}/$path/${libNameAndSha._1}.jar" /** Copy a bootstrap dependency JAR that is on the classpath to a file */ def copyBootstrapJar(cp: Seq[Attributed[File]], baseDir: File, path: String, libName: String): Unit = { @@ -209,6 +207,7 @@ object VersionUtil { val mod = a.get(moduleID.key) mod.map(_.organization) == Some(org) && mod.map(_.name) == Some(libName) }.map(_.data).get + if(!(baseDir / path).exists()) IO.createDirectory(baseDir / path) IO.copyFile(resolved, baseDir / path / s"$libName.jar") } } diff --git a/test/files/codelib/code.jar.desired.sha1 b/test/files/codelib/code.jar.desired.sha1 deleted file mode 100644 index c4cc74c244a0..000000000000 --- a/test/files/codelib/code.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -e737b123d31eede5594ceda07caafed1673ec472 *code.jar diff --git a/test/files/lib/annotations.jar.desired.sha1 b/test/files/lib/annotations.jar.desired.sha1 deleted file mode 100644 index ff7bc9425e88..000000000000 --- a/test/files/lib/annotations.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -02fe2ed93766323a13f22c7a7e2ecdcd84259b6c *annotations.jar diff --git a/test/files/lib/enums.jar.desired.sha1 b/test/files/lib/enums.jar.desired.sha1 deleted file mode 100644 index 040dff448708..000000000000 --- a/test/files/lib/enums.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -981392dbd1f727b152cd1c908c5fce60ad9d07f7 *enums.jar diff --git a/test/files/lib/genericNest.jar.desired.sha1 b/test/files/lib/genericNest.jar.desired.sha1 deleted file mode 100644 index 77e4fec40891..000000000000 --- a/test/files/lib/genericNest.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -b1ec8a095cec4902b3609d74d274c04365c59c04 *genericNest.jar diff --git a/test/files/lib/jsoup-1.3.1.jar.desired.sha1 b/test/files/lib/jsoup-1.3.1.jar.desired.sha1 deleted file mode 100644 index 46fa3dae9d6d..000000000000 --- a/test/files/lib/jsoup-1.3.1.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -346d3dff4088839d6b4d163efa2892124039d216 ?jsoup-1.3.1.jar diff --git a/test/files/lib/macro210.jar.desired.sha1 b/test/files/lib/macro210.jar.desired.sha1 deleted file mode 100644 index ff87a55129ed..000000000000 --- a/test/files/lib/macro210.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -3794ec22d9b27f2b179bd34e9b46db771b934ec3 ?macro210.jar diff --git a/test/files/lib/methvsfield.jar.desired.sha1 b/test/files/lib/methvsfield.jar.desired.sha1 deleted file mode 100644 index 6655f45ddb25..000000000000 --- a/test/files/lib/methvsfield.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -be8454d5e7751b063ade201c225dcedefd252775 *methvsfield.jar diff --git a/test/files/lib/nest.jar.desired.sha1 b/test/files/lib/nest.jar.desired.sha1 deleted file mode 100644 index 056e7ada904f..000000000000 --- a/test/files/lib/nest.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -cd33e0a0ea249eb42363a2f8ba531186345ff68c *nest.jar diff --git a/test/files/speclib/instrumented.jar.desired.sha1 b/test/files/speclib/instrumented.jar.desired.sha1 deleted file mode 100644 index 9dd577164e36..000000000000 --- a/test/files/speclib/instrumented.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -1b11ac773055c1e942c6b5eb4aabdf02292a7194 ?instrumented.jar From dfe236c412ec28fa39d0832778110db567411924 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Wed, 28 Feb 2018 13:10:28 +0100 Subject: [PATCH 1237/2793] Compile only necessary projects for stability test --- scripts/jobs/integrate/bootstrap | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 6f5e6fed2f8c..fd7f720a9457 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -456,7 +456,7 @@ testStability() { ${updatedModuleVersions[@]} \ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ $clean \ - dist/mkQuick + library/compile reflect/compile compiler/compile mv build/quick build/strap mv quick1 build/quick $scriptsDir/stability-test.sh From c6544d408be415f1a23c670f0a895dfb8c6b71da Mon Sep 17 00:00:00 2001 From: sh0hei Date: Wed, 28 Feb 2018 21:42:44 +0900 Subject: [PATCH 1238/2793] Explicit type annotations and !isEmpty replace with nonEmpty --- src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala index 6dcfa173df4e..64b9db52510a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala @@ -65,7 +65,7 @@ object Utility { rfb.clear() unescape(ref,sb) match { case null => - if (!sb.isEmpty) { // flush buffer + if (sb.nonEmpty) { // flush buffer nb += text(sb.toString()) sb.clear() } @@ -77,7 +77,7 @@ object Utility { else sb append c } - if(!sb.isEmpty) // flush buffer + if(sb.nonEmpty) // flush buffer nb += text(sb.toString()) nb.toList @@ -129,7 +129,7 @@ object Utility { * }}} * See [4] and Appendix B of XML 1.0 specification. */ - def isNameChar(ch: Char) = { + def isNameChar(ch: Char): Boolean = { import java.lang.Character._ // The constants represent groups Mc, Me, Mn, Lm, and Nd. @@ -150,7 +150,7 @@ object Utility { * We do not allow a name to start with `:`. * See [3] and Appendix B of XML 1.0 specification */ - def isNameStart(ch: Char) = { + def isNameStart(ch: Char): Boolean = { import java.lang.Character._ getType(ch).toByte match { From b53fdefee3a5ebac0a140063a07050bb2deae8be Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 1 Mar 2018 08:45:43 -0800 Subject: [PATCH 1239/2793] Review: be less annoying, more conforming --- .../scala/tools/nsc/reporters/AbstractReporter.scala | 5 +---- .../scala/tools/nsc/reporters/LimitingReporter.scala | 2 -- .../scala/tools/nsc/reporters/NoReporter.scala | 11 +++++------ src/compiler/scala/tools/nsc/reporters/Reporter.scala | 7 +++---- .../scala/tools/nsc/reporters/StoreReporter.scala | 8 ++------ 5 files changed, 11 insertions(+), 22 deletions(-) diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index 75afd057afb1..c3ac5d647d05 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -9,11 +9,8 @@ package reporters import scala.collection.mutable import scala.tools.nsc.Settings import scala.reflect.internal.util.Position -// TODO -//import scala.reflect.internal.Reporter -/** - * This reporter implements filtering. +/** This reporter implements filtering by severity and position. */ abstract class AbstractReporter extends Reporter { val settings: Settings diff --git a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala index 1eedc4fff6a1..68a1319b4d4e 100644 --- a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala @@ -1,8 +1,6 @@ package scala.tools.nsc package reporters -// TODO -//import scala.reflect.internal.Reporter import scala.reflect.internal.{Reporter => InternalReporter, FilteringReporter} import scala.reflect.internal.util.Position diff --git a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala index 6aa9b4315612..26335bd6c4cf 100644 --- a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala @@ -1,12 +1,11 @@ package scala.tools.nsc.reporters import scala.reflect.internal.util.Position -// TODO -//import scala.reflect.internal.Reporter -/** - * A reporter that ignores reports. - */ +/** A reporter that ignores reports. + * + * It should probably be called RudeReporter. + */ object NoReporter extends Reporter { - override protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = () + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = () } diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index bd438f0e755e..91a28f61f970 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -10,11 +10,10 @@ import scala.reflect.internal.util._ /** Report information, warnings and errors. * - * This describes the internal interface for issuing information, warnings and errors. - * The only abstract method in this class must be info0. + * This describes the internal interface for issuing information, warnings and errors. + * The only abstract method in this class must be info0. * - * TODO: Move external clients (sbt/ide/partest) to reflect.internal.Reporter, - * and remove this class. + * TODO: Move external clients (sbt/ide/partest) to reflect.internal.Reporter, and remove this class. */ abstract class Reporter extends scala.reflect.internal.Reporter { /** Informational messages. If `!force`, they may be suppressed. */ diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala index ce1912c72c0d..735ad89c8221 100644 --- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala @@ -8,18 +8,14 @@ package reporters import scala.collection.mutable import scala.reflect.internal.util.Position -// TODO -//import scala.reflect.internal.Reporter -/** - * This class implements a Reporter that stores its reports in the set `infos`. - */ +/** This class implements a Reporter that stores its reports in the set `infos`. */ class StoreReporter extends Reporter { case class Info(pos: Position, msg: String, severity: Severity) { override def toString() = s"pos: $pos $msg $severity" } val infos = new mutable.LinkedHashSet[Info] - override protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = { + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = { if (!force) { infos += Info(pos, msg, severity) severity.count += 1 From 55da02d3c5b381e47cabd1acaffe489d4305748b Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 8 Jan 2018 18:28:19 -0500 Subject: [PATCH 1240/2793] Improve error message when using a Java class as a value. As pointed out in gitter, it's kinda unfair to refer to a Java class as an `object` in error messages, despite that being what the compiler sees. In general, saying "Java {class,interface}" everywhere winds up looking a bit ugly (in almost all situations, we don't really care), so changing the `symbolKind` of Java symbols would be going too far. Also, the `enum object` error emitted by `ClassfileParser` sounds kinda strange to my picky ears. `enum Foobar` is perfectly fine IMVHO. --- .../symtab/classfile/ClassfileParser.scala | 5 ++- .../tools/nsc/typechecker/ContextErrors.scala | 7 +++- .../scala/reflect/internal/Symbols.scala | 16 ++++++--- test/files/neg/object-not-a-value.check | 2 +- test/files/neg/protected-static-fail.check | 2 +- test/files/neg/t0673.check | 2 +- test/files/neg/t6934.check | 4 +-- test/files/neg/t7014.check | 2 +- test/files/neg/t7251.check | 2 +- .../run/reflection-fancy-java-classes.check | 2 +- test/files/run/t6814.check | 2 +- test/files/run/t6989.check | 36 +++++++++---------- test/files/run/t7582-private-within.check | 4 +-- 13 files changed, 50 insertions(+), 36 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 1639265796d9..f99b85b7cfdd 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -970,7 +970,10 @@ abstract class ClassfileParser { val s = module.info.decls.lookup(n) if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s))) else { - warning(s"""While parsing annotations in ${in.file}, could not find $n in enum $module.\nThis is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (scala/bug#7014).""") + warning( + sm"""While parsing annotations in ${in.file}, could not find $n in enum ${module.nameString}. + |This is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (scala/bug#7014).""" + ) None } diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 582a8e1a183b..48cdafb033e2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -750,7 +750,12 @@ trait ContextErrors { // def stabilize def NotAValueError(tree: Tree, sym: Symbol) = { - issueNormalTypeError(tree, sym.kindString + " " + sym.fullName + " is not a value") + /* Give a better error message for `val thread = java.lang.Thread`. */ + val betterKindString = + if (sym.isJavaDefined && sym.isTrait) "Java interface" + else if (sym.isJavaDefined && (sym.isClass || sym.isModule)) "Java class" + else sym.kindString + issueNormalTypeError(tree, s"$betterKindString ${sym.fullName} is not a value") setError(tree) } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 94158bd8cfe8..40d67d8b7c83 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -197,7 +197,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => def paramLists: List[List[Symbol]] = paramss } - private[reflect] case class SymbolKind(accurate: String, sanitized: String, abbreviation: String) + private[reflect] final case class SymbolKind(accurate: String, sanitized: String, abbreviation: String) { + def skolemize: SymbolKind = copy(accurate = s"$accurate skolem", abbreviation = s"$abbreviation#SKO") + } protected def newStubSymbol(owner: Symbol, name: Name, @@ -2579,7 +2581,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => else "" private def symbolKind: SymbolKind = { - var kind = + implicit val triple2SK = (SymbolKind.apply _).tupled + val kind: SymbolKind = if (isTermMacro) ("term macro", "macro method", "MACM") else if (isInstanceOf[FreeTermSymbol]) ("free term", "free term", "FTE") else if (isInstanceOf[FreeTypeSymbol]) ("free type", "free type", "FTY") @@ -2589,6 +2592,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => else if (isPackageObjectClass) ("package object class", "package", "PKOC") else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC") else if (isRefinementClass) ("refinement class", "", "RC") + else if (isJavaAnnotation) ("Java annotation", "Java annotation", "JANN") + else if (isJavaEnum + || companion.isJavaEnum) ("Java enumeration", "Java enum", "JENUM") + else if (isJava && isModule) ("Java module", "class", "JMOD") + else if (isJava && isModuleClass) ("Java module class", "class", "JMODC") else if (isModule) ("module", "object", "MOD") else if (isModuleClass) ("module class", "object", "MODC") else if (isAccessor && @@ -2606,9 +2614,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else if (isTerm) ("value", "value", "VAL") else ("", "", "???") - if (isSkolem) kind = (kind._1, kind._2, kind._3 + "#SKO") - - SymbolKind(kind._1, kind._2, kind._3) + if (isSkolem) kind.skolemize else kind } /** Accurate string representation of symbols' kind, suitable for developers. */ diff --git a/test/files/neg/object-not-a-value.check b/test/files/neg/object-not-a-value.check index 613210f27c4d..b181210877fe 100644 --- a/test/files/neg/object-not-a-value.check +++ b/test/files/neg/object-not-a-value.check @@ -1,4 +1,4 @@ -object-not-a-value.scala:5: error: object java.util.List is not a value +object-not-a-value.scala:5: error: Java class java.util.List is not a value List(1) map (_ + 1) ^ one error found diff --git a/test/files/neg/protected-static-fail.check b/test/files/neg/protected-static-fail.check index 9f0bc92e7dc7..1d1d32653c97 100644 --- a/test/files/neg/protected-static-fail.check +++ b/test/files/neg/protected-static-fail.check @@ -1,4 +1,4 @@ -S.scala:5: error: method f in object J cannot be accessed in object bippy.J +S.scala:5: error: method f in class J cannot be accessed in object bippy.J J.f() ^ S.scala:6: error: method f1 in object S1 cannot be accessed in object bippy.S1 diff --git a/test/files/neg/t0673.check b/test/files/neg/t0673.check index fd27afc23fff..2d11d0ef9f3b 100644 --- a/test/files/neg/t0673.check +++ b/test/files/neg/t0673.check @@ -1,4 +1,4 @@ -Test.scala:2: error: object JavaClass.InnerClass is not a value +Test.scala:2: error: Java class JavaClass.InnerClass is not a value val x = JavaClass.InnerClass ^ one error found diff --git a/test/files/neg/t6934.check b/test/files/neg/t6934.check index 7a51439eaeb6..6ec2ebdbfd05 100644 --- a/test/files/neg/t6934.check +++ b/test/files/neg/t6934.check @@ -1,7 +1,7 @@ -ScalaMain.scala:6: error: variable STATIC_PROTECTED_FIELD in object JavaClass cannot be accessed in object test.JavaClass +ScalaMain.scala:6: error: variable STATIC_PROTECTED_FIELD in class JavaClass cannot be accessed in object test.JavaClass Access to protected variable STATIC_PROTECTED_FIELD not permitted because enclosing object ScalaMain in package test2 is not a subclass of - object JavaClass in package test where target is defined + class JavaClass in package test where target is defined val a = test.JavaClass.STATIC_PROTECTED_FIELD ^ one error found diff --git a/test/files/neg/t7014.check b/test/files/neg/t7014.check index 3554b41f9aab..9351079918e8 100644 --- a/test/files/neg/t7014.check +++ b/test/files/neg/t7014.check @@ -1,4 +1,4 @@ -warning: While parsing annotations in t7014-neg.obj/t7014/ThreadSafetyLevel_1.class, could not find COMPLETELY_THREADSAFE in enum object ThreadSafetyLevel_1. +warning: While parsing annotations in t7014-neg.obj/t7014/ThreadSafetyLevel_1.class, could not find COMPLETELY_THREADSAFE in enum ThreadSafetyLevel_1. This is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (scala/bug#7014). error: No warnings can be incurred under -Xfatal-warnings. one warning found diff --git a/test/files/neg/t7251.check b/test/files/neg/t7251.check index 8df8984d6372..a904804e4357 100644 --- a/test/files/neg/t7251.check +++ b/test/files/neg/t7251.check @@ -1,4 +1,4 @@ -B_2.scala:5: error: object s.Outer$Triple$ is not a value +B_2.scala:5: error: Java class s.Outer$Triple$ is not a value println( s.Outer$Triple$ ) ^ one error found diff --git a/test/files/run/reflection-fancy-java-classes.check b/test/files/run/reflection-fancy-java-classes.check index 258208dd9959..9362327eaeec 100644 --- a/test/files/run/reflection-fancy-java-classes.check +++ b/test/files/run/reflection-fancy-java-classes.check @@ -9,4 +9,4 @@ isAnonymousClass = true ===== SCALA POV ===== class 1 -object Foo_1 +class Foo_1 diff --git a/test/files/run/t6814.check b/test/files/run/t6814.check index 97ada7720284..74f1ba114364 100644 --- a/test/files/run/t6814.check +++ b/test/files/run/t6814.check @@ -1,6 +1,6 @@ List[Int] scala.collection.immutable.List.type -object java.lang.RuntimeException is not a value +Java class java.lang.RuntimeException is not a value List[Int] List scala.collection.immutable.List.type diff --git a/test/files/run/t6989.check b/test/files/run/t6989.check index 43d4bbaf020b..baa118e1e5df 100644 --- a/test/files/run/t6989.check +++ b/test/files/run/t6989.check @@ -47,43 +47,43 @@ isProtected = false isPublic = true privateWithin = ============ -sym = object PackagePrivateJavaClass, signature = foo.PackagePrivateJavaClass.type, owner = package foo +sym = class PackagePrivateJavaClass, signature = foo.PackagePrivateJavaClass.type, owner = package foo isPrivate = false isProtected = false isPublic = false privateWithin = package foo ============ -sym = variable privateStaticField, signature = Int, owner = object PackagePrivateJavaClass +sym = variable privateStaticField, signature = Int, owner = class PackagePrivateJavaClass isPrivate = true isProtected = false isPublic = false privateWithin = ============ -sym = method privateStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass +sym = method privateStaticMethod, signature = ()Unit, owner = class PackagePrivateJavaClass isPrivate = true isProtected = false isPublic = false privateWithin = ============ -sym = variable protectedStaticField, signature = Int, owner = object PackagePrivateJavaClass +sym = variable protectedStaticField, signature = Int, owner = class PackagePrivateJavaClass isPrivate = false isProtected = true isPublic = false privateWithin = package foo ============ -sym = method protectedStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass +sym = method protectedStaticMethod, signature = ()Unit, owner = class PackagePrivateJavaClass isPrivate = false isProtected = true isPublic = false privateWithin = package foo ============ -sym = variable publicStaticField, signature = Int, owner = object PackagePrivateJavaClass +sym = variable publicStaticField, signature = Int, owner = class PackagePrivateJavaClass isPrivate = false isProtected = false isPublic = true privateWithin = ============ -sym = method publicStaticMethod, signature = ()Unit, owner = object PackagePrivateJavaClass +sym = method publicStaticMethod, signature = ()Unit, owner = class PackagePrivateJavaClass isPrivate = false isProtected = false isPublic = true @@ -113,7 +113,7 @@ isProtected = false isPublic = false privateWithin = package foo ============ -sym = object $PrivateJavaClass, signature = JavaClass_1.this.$PrivateJavaClass.type, owner = class JavaClass_1 +sym = class $PrivateJavaClass, signature = JavaClass_1.this.$PrivateJavaClass.type, owner = class JavaClass_1 isPrivate = true isProtected = false isPublic = false @@ -137,7 +137,7 @@ isProtected = false isPublic = false privateWithin = package foo ============ -sym = object $ProtectedJavaClass, signature = JavaClass_1.this.$ProtectedJavaClass.type, owner = class JavaClass_1 +sym = class $ProtectedJavaClass, signature = JavaClass_1.this.$ProtectedJavaClass.type, owner = class JavaClass_1 isPrivate = false isProtected = false isPublic = false @@ -161,7 +161,7 @@ isProtected = false isPublic = false privateWithin = package foo ============ -sym = object $PublicJavaClass, signature = JavaClass_1.this.$PublicJavaClass.type, owner = class JavaClass_1 +sym = class $PublicJavaClass, signature = JavaClass_1.this.$PublicJavaClass.type, owner = class JavaClass_1 isPrivate = false isProtected = false isPublic = true @@ -173,13 +173,13 @@ isProtected = false isPublic = true privateWithin = ============ -sym = object JavaClass_1, signature = foo.JavaClass_1.type, owner = package foo +sym = class JavaClass_1, signature = foo.JavaClass_1.type, owner = package foo isPrivate = false isProtected = false isPublic = true privateWithin = ============ -sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1 +sym = class PrivateStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1 isPrivate = true isProtected = false isPublic = false @@ -191,13 +191,13 @@ isProtected = false isPublic = true privateWithin = ============ -sym = object PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = object JavaClass_1 +sym = class PrivateStaticJavaClass, signature = foo.JavaClass_1.PrivateStaticJavaClass.type, owner = class JavaClass_1 isPrivate = true isProtected = false isPublic = false privateWithin = ============ -sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1 +sym = class ProtectedStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1 isPrivate = true isProtected = false isPublic = false @@ -209,13 +209,13 @@ isProtected = false isPublic = true privateWithin = ============ -sym = object ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = object JavaClass_1 +sym = class ProtectedStaticJavaClass, signature = foo.JavaClass_1.ProtectedStaticJavaClass.type, owner = class JavaClass_1 isPrivate = true isProtected = false isPublic = false privateWithin = ============ -sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = object JavaClass_1 +sym = class PublicStaticJavaClass, signature = ClassInfoType(...), owner = class JavaClass_1 isPrivate = false isProtected = false isPublic = true @@ -227,13 +227,13 @@ isProtected = false isPublic = true privateWithin = ============ -sym = object PublicStaticJavaClass, signature = foo.JavaClass_1.PublicStaticJavaClass.type, owner = object JavaClass_1 +sym = class PublicStaticJavaClass, signature = foo.JavaClass_1.PublicStaticJavaClass.type, owner = class JavaClass_1 isPrivate = false isProtected = false isPublic = true privateWithin = ============ -sym = variable staticField, signature = Int, owner = object JavaClass_1 +sym = variable staticField, signature = Int, owner = class JavaClass_1 isPrivate = true isProtected = false isPublic = false diff --git a/test/files/run/t7582-private-within.check b/test/files/run/t7582-private-within.check index b2743ffa06af..48773971bc01 100644 --- a/test/files/run/t7582-private-within.check +++ b/test/files/run/t7582-private-within.check @@ -1,6 +1,6 @@ private[package pack] class JavaPackagePrivate -private[package pack] module JavaPackagePrivate -private[package pack] module class JavaPackagePrivate +private[package pack] Java module JavaPackagePrivate +private[package pack] Java module class JavaPackagePrivate private[package pack] field field private[package pack] primary constructor private[package pack] method meth From 910c56305614a6b644e415426e82ac5f25c2dafc Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 1 Mar 2018 16:00:59 +0100 Subject: [PATCH 1241/2793] Report an error when using an unstable qualifier in a type selection The `treeInfo.admitsTypeSelection` check got lost in the refactoring in 0b055c6cf697. In 2.12.4, `typedType` of `newOuter.Inner` produces `Outer#Inner`: scala> val x: newOuter.Inner = null x: Outer#Inner = null Fixes scala/bug#10619 --- .../scala/tools/nsc/typechecker/Typers.scala | 10 +++++++--- test/files/neg/t10619.check | 10 ++++++++++ test/files/neg/t10619.scala | 13 +++++++++++++ 3 files changed, 30 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t10619.check create mode 100644 test/files/neg/t10619.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 76da5cdd40d8..22a70344f2f5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5037,9 +5037,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // the qualifier type of a supercall constructor is its first parent class typedSelect(tree, typedSelectOrSuperQualifier(qual), nme.CONSTRUCTOR) case Select(qual, name) => - if (name.isTypeName) - typedSelect(tree, typedTypeSelectionQualifier(tree.qualifier, WildcardType), name) - else { + if (name.isTypeName) { + val qualTyped = typedTypeSelectionQualifier(tree.qualifier, WildcardType) + val qualStableOrError = + if (qualTyped.isErrorTyped || treeInfo.admitsTypeSelection(qualTyped)) qualTyped + else UnstableTreeError(qualTyped) + typedSelect(tree, qualStableOrError, name) + } else { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedSelectCount) val qualTyped = checkDead(typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) diff --git a/test/files/neg/t10619.check b/test/files/neg/t10619.check new file mode 100644 index 000000000000..3bea5fd28b63 --- /dev/null +++ b/test/files/neg/t10619.check @@ -0,0 +1,10 @@ +t10619.scala:4: error: stable identifier required, but Test.this.newOuter found. + val a: newOuter.Inner = { val o = newOuter; new o.Inner } + ^ +t10619.scala:5: error: stable identifier required, but Test.this.newOuter found. + val b: newOuter.Inner = a + ^ +t10619.scala:12: error: stable identifier required, but Test.this.newOuter found. + val f = new newOuter.Inner + ^ +three errors found diff --git a/test/files/neg/t10619.scala b/test/files/neg/t10619.scala new file mode 100644 index 000000000000..4bdc56ca4eec --- /dev/null +++ b/test/files/neg/t10619.scala @@ -0,0 +1,13 @@ +class Outer { class Inner } +object Test { + def newOuter = new Outer + val a: newOuter.Inner = { val o = newOuter; new o.Inner } + val b: newOuter.Inner = a + + val o = newOuter + val c: o.Inner = b + val d: o.Inner = new o.Inner + val e: o.Inner = d + + val f = new newOuter.Inner +} From 0714bfefea435e315dda7a254038bff0abbf4ad8 Mon Sep 17 00:00:00 2001 From: Philippus Baalman Date: Sun, 18 Feb 2018 09:45:11 +0100 Subject: [PATCH 1242/2793] Set pointer-events to none in headings closes scala/bug#10728 --- spec/public/stylesheets/screen.css | 1 + 1 file changed, 1 insertion(+) diff --git a/spec/public/stylesheets/screen.css b/spec/public/stylesheets/screen.css index fdddba0b454b..b7babaf5bf42 100644 --- a/spec/public/stylesheets/screen.css +++ b/spec/public/stylesheets/screen.css @@ -54,6 +54,7 @@ h1, h2, h3, h4, h5, h6 { -webkit-font-smoothing: antialiased; cursor: text; position: relative; + pointer-events: none; } h1, h2 { From 353d439645f50ae5c02453a8bca43e51a1abba82 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 2 Mar 2018 21:15:17 -0800 Subject: [PATCH 1243/2793] Compile from irregular files Non-directories that are not regular files do not report `isFile`, but it can be useful to compile them. --- .../scala/tools/nsc/ScriptRunner.scala | 24 +++++++++---------- .../scala/tools/nsc/io/SourceReader.scala | 10 ++++---- .../scala/reflect/io/AbstractFile.scala | 2 +- 3 files changed, 18 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 1f1953803ea0..41db2bb4fdba 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -176,20 +176,20 @@ class ScriptRunner extends HasCompileSocket { } } - /** Run a script file with the specified arguments and compilation - * settings. + /** Run a script file with the specified arguments and compilation settings. * - * @return true if compilation and execution succeeded, false otherwise. + * @return true if compilation and execution succeeded, false otherwise. */ - def runScript( - settings: GenericRunnerSettings, - scriptFile: String, - scriptArgs: List[String]): Boolean = - { - if (File(scriptFile).isFile) - withCompiledScript(settings, scriptFile) { runCompiled(settings, _, scriptArgs) } - else - throw new IOException("no such file: " + scriptFile) + def runScript(settings: GenericRunnerSettings, scriptFile: String, scriptArgs: List[String]): Boolean = { + def checkedScript = { + val f = File(scriptFile) + if (!f.exists) throw new IOException(s"no such file: $scriptFile") + if (!f.canRead) throw new IOException(s"can't read: $scriptFile") + if (f.isDirectory) throw new IOException(s"can't compile a directory: $scriptFile") + if (!settings.nc && !f.isFile) throw new IOException(s"compile server requires a regular file: $scriptFile") + scriptFile + } + withCompiledScript(settings, checkedScript) { runCompiled(settings, _, scriptArgs) } } /** Calls runScript and catches the enumerated exceptions, routing diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index b84c509a32b3..89964003ab24 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -27,10 +27,10 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { /** The output character buffer */ private var chars: CharBuffer = CharBuffer.allocate(0x4000) - private def reportEncodingError(filename:String) = { + private def reportEncodingError(filename: String, e: Exception) = { + val advice = "Please try specifying another one using the -encoding option" reporter.error(scala.reflect.internal.util.NoPosition, - "IO error while decoding "+filename+" with "+decoder.charset()+"\n"+ - "Please try specifying another one using the -encoding option") + s"IO error while decoding $filename with ${decoder.charset()}: ${e.getMessage}\n$advice") } /** Reads the specified file. */ @@ -38,7 +38,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { val c = new FileInputStream(file).getChannel try read(c) - catch { case e: Exception => reportEncodingError("" + file) ; Array() } + catch { case e: Exception => reportEncodingError("" + file, e) ; Array() } finally c.close() } @@ -51,7 +51,7 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { case _ => read(ByteBuffer.wrap(file.toByteArray)) } catch { - case e: Exception => reportEncodingError("" + file) ; Array() + case e: Exception => reportEncodingError("" + file, e) ; Array() } } diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index e77dd6846c09..066df2b4227c 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -30,7 +30,7 @@ object AbstractFile { * abstract regular file backed by it. Otherwise, returns `null`. */ def getFile(file: File): AbstractFile = - if (file.isFile) new PlainFile(file) else null + if (!file.isDirectory) new PlainFile(file) else null /** Returns "getDirectory(new File(path))". */ def getDirectory(path: Path): AbstractFile = getDirectory(path.toFile) From 034c0bec4cec709b9bee13a83a4000bfdc1ad232 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 27 Feb 2018 10:58:08 -0800 Subject: [PATCH 1244/2793] Enable implicits to check completion enrichment --- .../scala/tools/nsc/interactive/Global.scala | 5 ++++- .../files/presentation/infix-completion.check | 20 ++++++++++++++++++- 2 files changed, 23 insertions(+), 2 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 6db2e4e10a9c..3ba7fe7b1e40 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1106,7 +1106,10 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") */ def viewApply(view: SearchResult): Tree = { assert(view.tree != EmptyTree) - analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false)) + val t = analyzer.newTyper(context.makeImplicit(reportAmbiguousErrors = false)) + .typed(Apply(view.tree, List(tree)) setPos tree.pos) + if (!t.tpe.isErroneous) t + else analyzer.newTyper(context.makeSilent(reportAmbiguousErrors = true)) .typed(Apply(view.tree, List(tree)) setPos tree.pos) .onTypeError(EmptyTree) } diff --git a/test/files/presentation/infix-completion.check b/test/files/presentation/infix-completion.check index f62dc81d3434..5c69cd84cb3a 100644 --- a/test/files/presentation/infix-completion.check +++ b/test/files/presentation/infix-completion.check @@ -3,7 +3,7 @@ reload: Snippet.scala askTypeCompletion at Snippet.scala(1,34) ================================================================================ [response] askTypeCompletion at (1,34) -retrieved 192 members +retrieved 211 members [inaccessible] protected def integralNum: math.Numeric.DoubleAsIfIntegral.type [inaccessible] protected def num: math.Numeric.DoubleIsFractional.type [inaccessible] protected def ord: math.Ordering.Double.type @@ -109,10 +109,16 @@ def ^(x: Short): Int def byteValue(): Byte def ceil: Double def compare(y: Double): Int +def compare(y: Float): Int +def compare(y: Int): Int def compare(y: Long): Int def compareTo(that: Double): Int +def compareTo(that: Float): Int +def compareTo(that: Int): Int def compareTo(that: Long): Int def compareTo(x$1: Double): Int +def compareTo(x$1: Float): Int +def compareTo(x$1: Integer): Int def compareTo(x$1: Long): Int def doubleValue(): Double def ensuring(cond: Boolean): Int @@ -136,6 +142,10 @@ def round: Long def shortValue(): Short def to(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]] def to(end: Double,step: Double): scala.collection.immutable.NumericRange.Inclusive[Double] +def to(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]] +def to(end: Float,step: Float): scala.collection.immutable.NumericRange.Inclusive[Float] +def to(end: Int): scala.collection.immutable.Range.Inclusive +def to(end: Int,step: Int): scala.collection.immutable.Range.Inclusive def to(end: Long): scala.collection.immutable.NumericRange.Inclusive[Long] def to(end: Long,step: Long): scala.collection.immutable.NumericRange.Inclusive[Long] def toBinaryString: String @@ -157,6 +167,10 @@ def unary_~: Int def underlying(): AnyRef def until(end: Double): Range.Partial[Double,scala.collection.immutable.NumericRange[Double]] def until(end: Double,step: Double): scala.collection.immutable.NumericRange.Exclusive[Double] +def until(end: Float): Range.Partial[Float,scala.collection.immutable.NumericRange[Float]] +def until(end: Float,step: Float): scala.collection.immutable.NumericRange.Exclusive[Float] +def until(end: Int): scala.collection.immutable.Range +def until(end: Int,step: Int): scala.collection.immutable.Range def until(end: Long): scala.collection.immutable.NumericRange.Exclusive[Long] def until(end: Long,step: Long): scala.collection.immutable.NumericRange.Exclusive[Long] def |(x: Byte): Int @@ -185,8 +199,12 @@ override def isValidInt: Boolean override def isValidShort: Boolean override def isWhole(): Boolean override def max(that: Double): Double +override def max(that: Float): Float +override def max(that: Int): Int override def max(that: Long): Long override def min(that: Double): Double +override def min(that: Float): Float +override def min(that: Int): Int override def min(that: Long): Long override def signum: Int private[this] val self: Double From 45e53dac4e99804e5e19cd50c90fc830089b8bb0 Mon Sep 17 00:00:00 2001 From: Shohei Shimomura Date: Fri, 2 Mar 2018 23:57:53 +0900 Subject: [PATCH 1245/2793] Test completion of char literal --- test/junit/scala/tools/nsc/interpreter/CompletionTest.scala | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 8b493714f121..83db7079caff 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -50,6 +50,10 @@ class CompletionTest { // Output is sorted assertEquals(List("prefix_aaa", "prefix_nnn", "prefix_zzz"), completer.complete( """class C { def prefix_nnn = 0; def prefix_zzz = 0; def prefix_aaa = 0; prefix_""").candidates) + + // Enable implicits to check completion enrichment + assert(completer.complete("""'c'.""").candidates.contains("toUpper")) + assert(completer.complete("""val c = 'c'; c.""").candidates.contains("toUpper")) } @Test From a6873a2ebe168b14b7742c3030a481c62cc589b9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 5 Mar 2018 18:39:30 +1000 Subject: [PATCH 1246/2793] Disable parallelism in the scalacheck suite This is a workaround for a race condition we identified: https://github.com/scala/scala-jenkins-infra/issues/249 A future SBT version will include this fix: https://github.com/sbt/sbt/pull/3985 --- build.sbt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build.sbt b/build.sbt index 0d3925c961dc..3adcfc9b4d57 100644 --- a/build.sbt +++ b/build.sbt @@ -598,6 +598,9 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") libraryDependencies ++= Seq(scalacheckDep), unmanagedSourceDirectories in Compile := Nil, unmanagedSourceDirectories in Test := List(baseDirectory.value) + ).settings( + // Workaround for https://github.com/sbt/sbt/pull/3985 + List(Keys.test, Keys.testOnly).map(task => parallelExecution in task := false) : _* ) lazy val osgiTestFelix = osgiTestProject( From 02b6cdadd9caa6dbf3772490ed7714efe5929ec7 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 4 Mar 2018 15:41:21 -0800 Subject: [PATCH 1247/2793] REPL command completion is cursor sensitive Completes `:lo^x.s` to `:load^x.s` and `:lo` to `:load ^`. --- .../tools/nsc/interpreter/Completion.scala | 2 +- .../tools/nsc/interpreter/LoopCommands.scala | 42 ++++++++++++------- 2 files changed, 27 insertions(+), 17 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala index 6f5194d2f9a7..fa937d3067d7 100644 --- a/src/repl/scala/tools/nsc/interpreter/Completion.scala +++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala @@ -21,7 +21,7 @@ object NoCompletion extends Completion { } object Completion { - case class Candidates(cursor: Int, candidates: List[String]) { } + case class Candidates(cursor: Int, candidates: List[String]) val NoCandidates = Candidates(-1, Nil) // a leading dot plus something, but not ".." or "./", ignoring leading whitespace diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala index afbec0768ddf..fb2a1d54fafe 100644 --- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala @@ -86,7 +86,7 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => } def ambiguousError(cmd: String): Result = { matchingCommands(cmd) match { - case Nil => echo(cmd + ": no such command. Type :help for help.") + case Nil => echo(s"No such command '$cmd'. Type :help for help.") case xs => echo(cmd + " is ambiguous: did you mean " + xs.map(":" + _.name).mkString(" or ") + "?") } Result(keepRunning = true, None) @@ -95,7 +95,7 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => // all commands with given prefix private def matchingCommands(cmd: String) = commands.filter(_.name.startsWith(cmd.stripPrefix(":"))) - // extract command from partial name, or prefer exact match if multiple matches + // extract unique command from partial name, or prefer exact match if multiple matches private object CommandMatch { def unapply(name: String): Option[LoopCommand] = matchingCommands(name) match { @@ -108,6 +108,7 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => // extract command name and rest of line private val commandish = """(\S+)(?:\s+)?(.*)""".r + // expect line includes leading colon def colonCommand(line: String): Result = line.trim match { case "" => helpSummary() case commandish(CommandMatch(cmd), rest) => cmd(rest) @@ -117,21 +118,30 @@ trait LoopCommands { self: { def echo(msg: String): Unit } => import Completion.Candidates - def colonCompletion(line: String, cursor: Int): Completion = line.trim match { - case commandish(name @ CommandMatch(cmd), rest) => - if (name.length > cmd.name.length) cmd.completion - else - new Completion { - def resetVerbosity(): Unit = () - def complete(buffer: String, cursor: Int) = Candidates(cursor - name.length + 1, List(cmd.name)) + def colonCompletion(line: String, cursor: Int): Completion = + line match { + case commandish(name0, rest) => + val name = name0 take cursor + val cmds = matchingCommands(name) + val cursorAtName = cursor <= name.length + cmds match { + case Nil => NoCompletion + case cmd :: Nil if !cursorAtName => cmd.completion + case cmd :: Nil if cmd.name == name => NoCompletion + case cmd :: Nil => + val completion = if (cmd.isInstanceOf[NullaryCmd] || cursor < line.length) cmd.name else cmd.name + " " + new Completion { + def resetVerbosity(): Unit = () + def complete(buffer: String, cursor: Int) = Candidates(cursor = 1, List(completion)) + } + case cmd :: rest => + new Completion { + def resetVerbosity(): Unit = () + def complete(buffer: String, cursor: Int) = Candidates(cursor = 1, cmds.map(_.name)) + } } - case commandish(name, _) if matchingCommands(name).nonEmpty => - new Completion { - def resetVerbosity(): Unit = () - def complete(buffer: String, cursor: Int) = Candidates(cursor - name.length + 1, matchingCommands(name).map(_.name)) - } - case _ => NoCompletion - } + case _ => NoCompletion + } class NullaryCmd(name: String, help: String, detailedHelp: Option[String], f: String => Result) extends LoopCommand(name, help, detailedHelp) { From a36b7383d30d9771ff20230dff944ea116a31785 Mon Sep 17 00:00:00 2001 From: jvican Date: Wed, 21 Feb 2018 18:01:07 +0100 Subject: [PATCH 1248/2793] Don't add original attachment for constant folded trees Scalac seems to run twice constant-folding in certain scenarios. Consider the following example: ```scala object A { final val x = 1 } object B { def main(args: Array[String]) = assert(args(0).toInt == A.x ) } ``` The tree `A.x` in `main` is constant folded because `x` is a constant, but for some reason the resulting tree `Literal(Constant(1))` get constant-folded again. The previous logic would add the original tree attachment on the constant-folded tree, and would destroy the previous annotation that gets copied by `treeCopy.Literal`. Therefore, to avoid this issue, we don't update the original attachment if there already exists one. In the future, it would be worthwhile to figure out why scalac tries to constant-fold twice the same tree. --- .../scala/tools/nsc/typechecker/Typers.scala | 8 ++++-- test/junit/scala/tools/nsc/SampleTest.scala | 16 ----------- .../tools/nsc/typechecker/TypedTreeTest.scala | 27 +++++++++++++++++++ 3 files changed, 33 insertions(+), 18 deletions(-) delete mode 100644 test/junit/scala/tools/nsc/SampleTest.scala create mode 100644 test/junit/scala/tools/nsc/typechecker/TypedTreeTest.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 08e5d73dfbce..8d1fd4ba14cc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -994,8 +994,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val sym = tree.symbol if (sym != null && sym.isDeprecated) context.deprecationWarning(tree.pos, sym) - // Keep the original tree in an annotation to avoid losing tree information for plugins - treeCopy.Literal(tree, value).updateAttachment(OriginalTreeAttachment(original)) + tree match { + case Literal(`value`) => tree + case _ => + // If the original tree is not a literal, make it available to plugins in an attachment + treeCopy.Literal(tree, value).updateAttachment(OriginalTreeAttachment(tree)) + } } // Ignore type errors raised in later phases that are due to mismatching types with existential skolems diff --git a/test/junit/scala/tools/nsc/SampleTest.scala b/test/junit/scala/tools/nsc/SampleTest.scala deleted file mode 100644 index 60bb09e98f02..000000000000 --- a/test/junit/scala/tools/nsc/SampleTest.scala +++ /dev/null @@ -1,16 +0,0 @@ -package scala.tools.nsc - -import org.junit.Assert._ -import org.junit.Test -import org.junit.runner.RunWith -import org.junit.runners.JUnit4 - -/** Sample JUnit test that shows that all pieces - of JUnit infrastructure work correctly */ -@RunWith(classOf[JUnit4]) -class SampleTest { - @Test - def testMath: Unit = { - assertTrue("you didn't get the math right fellow", 2 + 2 == 4) - } -} diff --git a/test/junit/scala/tools/nsc/typechecker/TypedTreeTest.scala b/test/junit/scala/tools/nsc/typechecker/TypedTreeTest.scala new file mode 100644 index 000000000000..3baae9a85d61 --- /dev/null +++ b/test/junit/scala/tools/nsc/typechecker/TypedTreeTest.scala @@ -0,0 +1,27 @@ +package scala.tools.nsc.typechecker + +import org.junit.Assert.assertEquals +import org.junit.Test + +import scala.tools.testing.BytecodeTesting + +class TypedTreeTest extends BytecodeTesting { + override def compilerArgs = "-Ystop-after:typer" + + @Test + def constantFoldedOriginalTreeAttachment(): Unit = { + val code = + """object O { + | final val x = 42 + | def f(x: Int) = x + | def f(x: Boolean) = x + | f(O.x) + |} + """.stripMargin + val run = compiler.newRun + run.compileSources(List(BytecodeTesting.makeSourceFile(code, "UnitTestSource.scala"))) + val tree = run.units.next().body + val List(t) = tree.filter(_.attachments.all.nonEmpty).toList + assertEquals(s"$t:${t.attachments.all}", "42:Set(OriginalTreeAttachment(O.x))") + } +} From aac690c6fb8bb364374b1eb0fa5c03bcf49f4c0e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 7 Mar 2018 08:52:32 +1000 Subject: [PATCH 1249/2793] Adapt to changes in clean task in SBT 0.13.17+ We need to customize the new `cleanFilesTask`, rather than the settings `cleanFiles`. --- build.sbt | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/build.sbt b/build.sbt index 0d3925c961dc..319f60fef1ad 100644 --- a/build.sbt +++ b/build.sbt @@ -155,6 +155,25 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // to make sure they are being cleaned properly cleanFiles += (classDirectory in Compile).value, cleanFiles += (target in Compile in doc).value, + // SBT 0.13.17+ doesn't seem to respect `cleanFiles` anymore: https://github.com/sbt/sbt/pull/3834/files#r172686677 + // Let's override `cleanFilesTask`. + cleanFilesTask := { + val filesAndDirs = (Vector(managedDirectory.value, target.value) ++ cleanFiles.value).distinct + + // START: Copy/pasted from SBT + val preserve = cleanKeepFiles.value + val (dirs, fs) = filesAndDirs.filter(_.exists).partition(_.isDirectory) + val preserveSet = preserve.filter(_.exists).toSet + // performance reasons, only the direct items under `filesAndDirs` are allowed to be preserved. + val dirItems = dirs flatMap { _.*("*").get } + (preserveSet diff dirItems.toSet) match { + case xs if xs.isEmpty => () + case xs => sys.error(s"cleanKeepFiles contains directory/file that are not directly under cleanFiles: $xs") + } + val toClean = (dirItems filterNot { preserveSet(_) }) ++ fs + toClean + // END: Copy/pasted from SBT + }, fork in run := true, scalacOptions in Compile in doc ++= Seq( "-doc-footer", "epfl", From 41e376a265398cc1e218c471a47f66ce9cfbc268 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 2 Mar 2018 15:28:47 +0100 Subject: [PATCH 1250/2793] Switch to Travis CI for building releases Both nightly and full releases migrate to Travis. PR validation remains on our Jenkins cluster for now. Main difference is that we don't use our artifactory as a cache, nor do we need to wipe stuff, since Travis gives us a fresh machine. We do cache the ivy/sbt cache. TODO: decide whether to publish a "mergely" (a nightly for each merge, or skip this script on merge and use a scheduled job for publishing the nightly.) Note: we don't use `travis encrypt-file` because it nukes the iv/key variables on each invocation.. too much magic Instead, I did: ``` cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret openssl aes-256-cbc -pass "file:./secret" -in gpg_subkey -out admin/files/gpg_subkey.enc travis encrypt "GPG_SUBKEY_SECRET=$(cat ./secret)" ``` --- .travis.yml | 33 ++++++++++++++--- admin/files/credentials-private-repo | 4 +++ admin/files/credentials-sonatype | 4 +++ admin/files/gpg.sbt | 1 + admin/files/gpg_subkey.enc | Bin 0 -> 7328 bytes admin/files/m2-settings.xml | 31 ++++++++++++++++ admin/files/sonatype-curl | 1 + admin/init.sh | 30 ++++++++++++++++ project/ScriptCommands.scala | 10 ++++-- scripts/common | 27 +++++++++++--- scripts/jobs/integrate/bootstrap | 51 +++++++++++++++++---------- 11 files changed, 161 insertions(+), 31 deletions(-) create mode 100644 admin/files/credentials-private-repo create mode 100644 admin/files/credentials-sonatype create mode 100644 admin/files/gpg.sbt create mode 100644 admin/files/gpg_subkey.enc create mode 100644 admin/files/m2-settings.xml create mode 100644 admin/files/sonatype-curl create mode 100755 admin/init.sh diff --git a/.travis.yml b/.travis.yml index 923ffaf44cdf..76aa5f7968ca 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,19 +1,37 @@ -# opt-in to Travis's newer/faster container-based infrastructure -sudo: false +sudo: required # GCE VMs have better performance (will be upgrading to premium VMs soon) # this builds the spec using jekyll # based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html -language: ruby + +language: scala +jdk: openjdk8 + +# the spec is built with jekyll rvm: - 2.2 -script: bundle exec jekyll build -s spec/ -d build/spec + +cache: + directories: + - $HOME/.ivy2/cache + - $HOME/.sbt + +script: + - (cd admin && ./init.sh) + - scripts/jobs/integrate/bootstrap + - bundle exec jekyll build -s spec/ -d build/spec + install: bundle install # cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret # openssl aes-256-cbc -pass "file:./secret" -in id_dsa_spec212_b4096 -out spec/id_dsa_travis.enc -a # travis encrypt "PRIV_KEY_SECRET=`cat ./secret`" env: - - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" + global: + - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" + - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS + - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER + - secure: "ek3As5q2tL8UBXcxSBbv4v5YgsoPD41SCzPOSu72kzfbngyxgQxrcziU5pIM+Lib9KaWex7hVVWNL38tMyDbu+0OpDv8bPjMujzlDx5I2pJUfuOJo7QRYsJE1nsXcY4cA72cCLfbRcLEkvtDAhcdLSaUOqlyQe5BY4X4fY5eoPA=" # SONA_PASS + - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET # ^^^ set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc @@ -23,3 +41,8 @@ after_success: # using S3 would be simpler, but we want to upload to scala-lang.org # after_success: bundle exec s3_website push --headless + +before_cache: + # Cleanup the cached directories to avoid unnecessary cache updates + - find $HOME/.ivy2/cache -name "ivydata-*.properties" -print -delete + - find $HOME/.sbt -name "*.lock" -print -delete diff --git a/admin/files/credentials-private-repo b/admin/files/credentials-private-repo new file mode 100644 index 000000000000..ea665bb6b3f3 --- /dev/null +++ b/admin/files/credentials-private-repo @@ -0,0 +1,4 @@ +realm=Artifactory Realm +host=scala-ci.typesafe.com +user=scala-ci +password=${PRIVATE_REPO_PASS} \ No newline at end of file diff --git a/admin/files/credentials-sonatype b/admin/files/credentials-sonatype new file mode 100644 index 000000000000..906466c4054d --- /dev/null +++ b/admin/files/credentials-sonatype @@ -0,0 +1,4 @@ +realm=Sonatype Nexus Repository Manager +host=oss.sonatype.org +user=${SONA_USER} +password=${SONA_PASS} diff --git a/admin/files/gpg.sbt b/admin/files/gpg.sbt new file mode 100644 index 000000000000..2efcc4b691e8 --- /dev/null +++ b/admin/files/gpg.sbt @@ -0,0 +1 @@ +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0") diff --git a/admin/files/gpg_subkey.enc b/admin/files/gpg_subkey.enc new file mode 100644 index 0000000000000000000000000000000000000000..de7e4ea4f40246cea920785f9b7a3c869b02e317 GIT binary patch literal 7328 zcmV;R9AD#8VQh3|WM5ydc&=++@<4HWK1?dG?Gajsu`p>+6#-Nx60jQAj8Vr+x%A!& zPDGh+%<0?TMNX!xFIoAy&SeoWeymTk%>h(3H~fIx;urknsV*5F+lIDiuH1b>F~QKG zcVDtKQ!CG~)GC_yYIvb+U{c=@SH^){&sfV;!23oiRjTDU0lRYaTp8W|uPax3cXKJO zE24kg2<^T^}v44sJ#=kkW7-hEjo;6{LQ_E1on=;rS37z>H`-`3dH+m!=NqAtenJzH5~ zYE1yCd$_AVMVhQCqb~&k<@T9#`YmzLQ_-8h>Zyt#~C;~P^*3&9v?@qjRN09O_ z$u3!y?@WBVcL+5L@?}zU0tL;myy<|MXsy4WG6h^T&MK$*-#0iJ;G@etoL_a zE;q;03k|(UKoM74Aku-NNhb+WzIMWn?bNbq>Y}U{2X(eWI+~)aPY*JJJagQv*X}AN z{h-A@0k@}`0GL*++5fzS*tyo+j5a-lO2}O2QOw3sH=-smv2uex>?Z5pIdk%V#;WU? zJYQaeb%H?}B8Q6yjU5hTU|^dH7V6Q9HP1yn)^1Buv{Y%3K$59cZ@k#O-J33+=mz2U z&bxvR9Ltq;_9oIX=AIvjh)kdNJ%A@#A)(#;2qXRM-i-g3)4(b{HI0~jeI`Njgj8#L zWp3rHhr*9U1Z57(uU;?fRZHVN4cB>gDO{^H?D>AM4MaJH;IPa(2-}8O8cQvjTQL9p zne9vH%TAZ8pL``~zoDR8YDSj4IU&~Xo@CIovFBZFX*wS1UDcZ*guE-0?cjWFyHJSUim(G4TdR43|*e&@?J8-?Z=APp4g< zriutI`xkyg)O#23DNsKlNoy~sPqsZUVo~j_asuNI|DZ(j<<`GlF@=i+O@DY5@p>!o zp507$c6&p)KvR+QG>HMBD|ixKWqjPZ5OE;?Pl{JoOK)~lGQ}dgXG>Z#OfhjdeYH_4 zU`gEkpErfy7Yic<)VGw2=loPiIjQbciculrmxHbbR=7?ckc~bzj2}qhT8W$h9xvg! z*zhFwS>~}<>SVK+M1eaH>oyfT3#~B|-4CS_LPN5MR83?K8_BCKVkF6fxa0D;I!`VU zxc3q9=mgqRJX6T~PWv>h+7gEnyBv#P=u1 zv%IMD$57$%^5Jo;tflCK9!;Td#-kVuCdQdD@@LS48z5sz`tclIwtrs&@C`Qnb%rqv zHayO9cu142Z_!oh`H248r~HLR80eWVs)9duUR-p}mlb*o zt90&bgg&SA*$-OWfj>k-C_RTp&7TY zFG&83qU|hPk}_`s+XOLp&zZ`elBcc6<80%tO?H!$)&Zsb-KuP}JH*fDa3_x`@v{gn zy+{N;L@>dX+S%8rY1*@Nn1)@Vw~>l!!r`^F9k|0w?XY77+d3*i{Txf|hbvPAz5 zkPAP8@_4DeLbWdEiVn{LO~&siPKa#Kj$NbsHq(-DjS9R_;I^K__ z>D^I7_8sROsntvJK9g_%1dxLxVKejVZhTwYDF;m>XeoE`aCm>ljDCi_klyM&EOS34Nyglm!z`j06d{#*$995b?T-# z^9`X=A!FlUL*{;o!3k^iv%}(7sp;=Tro`|Avk_1pm{i+s`%Ht`isD4*y)NsZ!CHF# zam=`dOF*8xyTs6FD?DZ5yK^#ZGyV$B5%xb`Xv{HnZR-{Cx*d=r%N74hl4y8*j#H!O zWx_)i8ri?znP$_(>B$5Eul%B z-R(#)t=Y2xG~&TbmpT2~0uUeG#u1z$LddjVRrSyhyxmf6HRnK7?wI%gUM7kHe#L?I z7tOr!rLjj?jAvwsWw%^3QB9y5TolJhb5a z=H9xgOw)=IW#ihZXOjB|2I~3;H*FE@u35#xGPk|^ZW?mjha=vKgk=%WN2tr_1kob4 zn#l1F=SvuEddGS?*Xe1?G=DJMY^_PWmp8Piaom47suEx#GWhbfguTtPiRhy5a0zLE z=hBB0E~d^$n7EYaD|!V1e#c5(aq%$)L!fZ2lm!dhP7YE9#Zo2{LXpYyB`-XbLpcY+ z4)Y*3JkSim6y#M|uq=&aV=srLI)WxitnA>}G}@g)j>+)CO%&t-lRsy{zYmW$upB4Q z#%{a}@(iEz1f0Y$x{CBXjlV>1@Hn%W#KGMERy{3xv*lDWc32T&5}Re*0t3(ZRSLo= ziVIePK1VV)6|+TYe3b%F1ED$4`PHpcUGk@L-^tApBip`lI$~1qucTDTC3Pi$mic_x zd%g>MVkpJmx2dwy1mtAmWY@_|SN=&To1dYwB-MdEw27O8#9sQ~=Y)i_Fb+J>b$_9J z|CQ+=+OSa@b!)3~fC{D{F;;3KYX7KULQ!aIUbS9hnR@nAmVp~sIOH^;Ig{;#Ha(tu^ zP21kRW9CR!?&5McM&-T9C+MayEz}2!=?qQzNHSa0>nyVO@CH*BktTGn5V<&G zlKkk^28@8@hQ=8^J4nPr9AE2e`YPZZBywn<@4FL8W~2J3U}RNcByT=_gB9B6V6#DA zPhP_I1xo?*zy;51Sh#SBsip-_T`s4>Se5vSCEkGf_KN(dJRxIA)x(olA{yTm$kNVz_^t*?m+_xatlF|I?7~w#af}oMNfC*n34>*7T}s*d^6_s0mwGBB}y1&OUgRn zn`StlCZ`7*+J9}0jdxlU{CIK2B0aU&0_0we_a*X*GjLY9PD1?9l9Om)UuEucY&Q=W z?!h!kzmJGjkSMy#vkVz-r^Pg3BY`~?z3b?dyruiv?>ZbIicNX3mmx099a{evzkKsE zwxIS;vz(HAF_d*cPTX;jWt48J3St&9jsgb>iqb}4Nnkj9O<^1?Ybs+l8#k0t4a5Cl z_!Ne;9$cIiYen7VGe(rcJS~Y@i^9@Bv|5lB9Hs4W~_bpYq`NBVL70X9p~s>pF_A{lEmKrf9L9!4L?a2 zmN)-SRq1sJvJr-Yn~^X*&>{gW-*%%*=RlJ!j}VD<#)V!u|4-HKHQBw%0L^) zyKOyDNuFU87Q8mg28acSe%+#O(E*-Z@n54B2v{gvEd5bk`gh})7=d>913QZt;4XQb zX*f&fJ;>LAhspxgD2s0jQ<+t)*kF9=G1nGZ6O*uzMnWdz6}0D7KJ?hagbc;~GYoI2 zsYaOtsXjJXl-BTGZwRbyBXLIDC~6qPqjnRahWi?4aW2ZnpU+@^7R$p^!j%)jLIL59 zX$6Z9hBAJ%U;4H|WeDm>qZ<>e2Ll{%jS z0ber_E9joA?CKT7o@dy+`@MXWF|&cwu6Eb==W`y=>2^D~^F`Yl?F@B&GZDJlzXq(? zNAN_|FTCIB=T9WtY+o}$>+%2P^VvPFopH=NY{%A=k+$~{S(2xVfkN=6JBH`x zo*z+A&RyQEbd1;1QV?C1aiu;4NQi!WglfO6{y~f<90D%A=sT;X7%Ka6c)nd4C-k9( z&Y)s$6u++Ei*ncwo!Zh@R}=p^m^RnGH(hfOBp0H6|83{vjSvJJ)qS4?Z1PToFd(WFORPoW_`nPY`hN3s#pZhB#=v98~Q%FSrZ zY`#Y?6eb0$jsnLU!S+Xbhz8{v;g7C2`RAOOCITl-TIgf{+Z)pDAX*%<`~6fD-#qh( z!J0g}mIu=*Eb>>$16aGtrTmgCg`A%jZ^n#E4( zt;9VCbP}_5E5)u!S6DuxeKiWun3a#ta)L7ezbv&J84j=OpxzDiuJD5^9EZ0V{DjJt zxqj6rkH&92ZEVT#Ka5>Fcq?r`q4}HjZPYNbks)9zojhJkQre#{2bwEWbzB*xqIX1S zGj_dO0B_G%K;%&J7xdghGaLx@KpdIx+4}yE^DcU%nNW}J-WhSyN6Yqnh*qsyyt!W% zY3$f1x`v_H@{W#nhVMa7bFvkg5o-ABR(3*tL_wZjYpZYXfVH=HF30ePXu`Pk|9x3< zA&77FiFL&KT7-Ole`hQJJO&lba=LwYD&Hu$^cbUDK+v*(=uj-m(pbh`T!{E!n_~rG z-$O&uLuCN__XWaXE`Spkm3p;rQi?EdM>op7D zWkBn|`U-Z>V7Uxg|Nn5a=u&t<#Ir4BCQ1wUv1%a$4YbQJ<E%9dnqjQX0@TtW4mm2rt$y*?zls zmUPMf95mWm2DJ3N!(iBj_PtK7(8G_$V3;L8Nfj|zV7I6jumP4N<+mIJV+!PVY3g%<4`-@jbtc=qM`f_DbuL0KMvwi_Xh zDcyqUksDhv)dR6iAvHrH%npB(lIynZ5{VG(Np>J9Rh436~RgMNeWeVbw1(3}*#!|=bZolR|E zIAa>>eg`!5Si;OQ_o22#k47%^WvGMxN#+oq-Ri%hyYgW%?Nf-gofe5Q5t=I3t^SD&lT(Nu zExi#_n-XFoinTpnGgdpdR7tYlP93vhUZ;y|91OJpf50ql6BoD>>2GP?@7_u5A!Fg1 z#cO*vBJr;vJNd|M@Gt&WeP)d$jV#UC@!3yDib~%k7@KdyJ&orj#{PA1gc+R@GeCOf z&oKWk=acHg+(V<=T{mxz54h7-Z{1$hF#BORtW0kh(vi(Ho%Zf13>{UJ(IbT&X%*Pv z)7%I43=6#-=q4Dv;HI0BC`eY7^beX!XxOYTdhX|Oti;A-&7fQuX1x^4b6BS2;>teZ^HEEBi z{G@j6=KAc^M|I=xWnsvg*3Ag3`Oe!Mpv(hAe)Y`!+3{iIC~4WBX`PSJ_2B8oBoI@^ z!4YFv;HQf>>F3VIP+ff>Db6NKn(OJfrIDa)fg613z&E)Qc=^y3uj-#SK9`Sd}mf_T`zw_Q*c;e)DcID0ut#Qm){rFGvNVJc%&x?aXGY zJ-*^XN37Z1br--poD3JQn_6IJ7$STHpG0JqDp}V^wD(7mS1YI5_jYQwHp%lEjOa80 zWRue;4?fT9{&l;(I|4_&ly6w8EbqS5(Qx(G7 zhhc`(;|eV}(oIMs@b;5=DWCTP9dITOU#T}8vz>ii6| z#AgmjK1fi@dPvvEkGUp{a{gErn?E#R5+Rn(p|-Ub$J!T2bGW;wW8x3zB)45(fdh72 zgM{?a=6Xv2r0^B8TP~;UnV9UOV$#8wC$%Su&~`;!@Wv4l`_(iHSkd6W=M_yA8YhpV zRa~4%jv;G^Aj@@L?sZA+_k{Fzj8C#z74zm%(6lAWUlV*k5_8C(@~JL&F{@!d zPW4J0_K2GCHJVv3<4C;YyIorvs=LO&fo=l$MAxD0i|NkN*1gP`Wl5CM!h{|r&nHpR z*$r_t#Y`hk-Eb>t0`VzJ)(%2y@arZXQ(%aRLD_b*9!?PcKp + + + + sonatype-nexus + ${SONA_USER} + ${SONA_PASS} + + + private-repo + scala-ci + ${PRIVATE_REPO_PASS} + + + + + + + codehaus-snapshots-mirror + Maven Codehaus snapshot repository + file:///codehaus-does-not-exist-anymore + codehaus-snapshots + + + diff --git a/admin/files/sonatype-curl b/admin/files/sonatype-curl new file mode 100644 index 000000000000..47f5e8c4cdd0 --- /dev/null +++ b/admin/files/sonatype-curl @@ -0,0 +1 @@ +user = ${SONA_USER}:${SONA_PASS} \ No newline at end of file diff --git a/admin/init.sh b/admin/init.sh new file mode 100755 index 000000000000..06f2b182e3cb --- /dev/null +++ b/admin/init.sh @@ -0,0 +1,30 @@ +#!/bin/bash + + +sensitive() { + perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-private-repo > ~/.credentials-private-repo + perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-sonatype > ~/.credentials-sonatype + perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/sonatype-curl > ~/.sonatype-curl + # perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/m2-settings.xml > ~/.m2/settings.xml -- not needed anymore (used for ide integration?) + + openssl aes-256-cbc -d -pass "pass:$GPG_SUBKEY_SECRET" -in files/gpg_subkey.enc | gpg --import +} + +# directories needed by sensitive part +# mkdir -p ~/.m2 -- not needed anymore (used for ide integration?) +mkdir -p ~/.ssh + +# don't let anything escape from the sensitive part (e.g. leak environment var by echoing to log on failure) +sensitive >/dev/null 2>&1 + +# pgp signing doesn't work without public key?? +gpg --keyserver pgp.mit.edu --recv-keys 0xa9052b1b6d92e560 + +# just to verify +gpg --list-keys +gpg --list-secret-keys + +mkdir -p ~/.sbt/0.13/plugins +cp files/gpg.sbt ~/.sbt/0.13/plugins/ + +export SBT_CMD=$(which sbt) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index d15edc3f6786..f8644e74cfd5 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -6,6 +6,8 @@ import BuildSettings.autoImport._ /** Custom commands for use by the Jenkins scripts. This keeps the surface area and call syntax small. */ object ScriptCommands { + def env(key: String) = Option(System.getenv(key)).getOrElse("") + def all = Seq( setupPublishCore, setupValidateTest, @@ -80,7 +82,7 @@ object ScriptCommands { baseVersionSuffix in Global := "SPLIT", resolvers in Global += "scala-pr" at url, publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - credentials in Global += Credentials(Path.userHome / ".credentials-sonatype"), + credentials in Global += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")), pgpPassphrase in Global := Some(Array.empty) ) ++ enableOptimizer } @@ -114,7 +116,11 @@ object ScriptCommands { private[this] def publishTarget(url: String) = { // Append build.timestamp to Artifactory URL to get consistent build numbers (see https://github.com/sbt/sbt/issues/2088): val url2 = if(url.startsWith("file:")) url else url.replaceAll("/$", "") + ";build.timestamp=" + System.currentTimeMillis - Seq(publishTo in Global := Some("scala-pr-publish" at url2)) + + Seq( + publishTo in Global := Some("scala-pr-publish" at url2), + credentials in Global += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", env("PRIVATE_REPO_PASS")) + ) } /** Like `Def.sequential` but accumulate all results */ diff --git a/scripts/common b/scripts/common index 316d8ed5a0fb..d8903a7d8b05 100644 --- a/scripts/common +++ b/scripts/common @@ -159,7 +159,6 @@ EOF # Takes a variable number of additional repositories as argument. # See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html function generateRepositoriesConfig() { - jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} sbtRepositoryConfig="$scriptsDir/sbt-repositories-config" echo > "$sbtRepositoryConfig" '[repositories]' if [[ $# -gt 0 ]]; then @@ -167,11 +166,29 @@ function generateRepositoriesConfig() { echo >> "$sbtRepositoryConfig" " script-repo-$i: ${!i}" done fi + + if [ "${TRAVIS}" != "true" ]; then + jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} + echo "jcenter-cache: $jcenterCacheUrl" >> "$sbtRepositoryConfig" + fi + cat >> "$sbtRepositoryConfig" << EOF - jcenter-cache: $jcenterCacheUrl - typesafe-ivy-releases: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly - sbt-plugin-releases: https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] - maven-central local + maven-central + typesafe-ivy-releases-boot: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly + typesafe-ivy-releases: https://dl.bintray.com/typesafe/ivy-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] + sbt-plugin-releases: https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/, [organisation]/[module]/(scala_[scalaVersion]/)(sbt_[sbtVersion]/)[revision]/[type]s/[artifact](-[classifier]).[ext] EOF } + + +# https://github.com/travis-ci/docs-travis-ci-com/issues/949 +travis_fold_start() { + echo "" + echo -e "travis_fold:start:$1\033[33;1m$2\033[0m" +} + +travis_fold_end() { + echo -e "\ntravis_fold:end:$1\r" + echo "" +} diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index fd7f720a9457..0f41dd3939b7 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -77,13 +77,14 @@ publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} forceRebuild=${forceRebuild-no} - sbtBuildTask=${sbtBuildTask-"testAll"} # TESTING leave empty to avoid the sanity check testStability=${testStability-yes} clean="clean" # TESTING leave empty to speed up testing -baseDir=${WORKSPACE-`pwd`} +WORKSPACE=${WORKSPACE-`pwd`} +baseDir=${WORKSPACE} + scriptsDir="$baseDir/scripts" . $scriptsDir/common @@ -99,7 +100,9 @@ mkdir -p $baseDir/resolutionScratch_ # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} -generateRepositoriesConfig $integrationRepoUrl +if [ "${TRAVIS}" != "true" ]; then + generateRepositoriesConfig $integrationRepoUrl +fi # ARGH trying to get this to work on multiple versions of sbt-extras... # the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir @@ -152,8 +155,9 @@ function st_stagingRepoClose() { #### sbt tools sbtBuild() { - echo "### sbtBuild: "$SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" - $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" >> $baseDir/logs/builds 2>&1 + travis_fold_start build "Building $(basename $PWD) with $@" + $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" + travis_fold_end build } sbtResolve() { @@ -161,10 +165,12 @@ sbtResolve() { touch build.sbt # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to. cross=${4-binary} - echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" + # echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" + travis_fold_start resolve "Resolving \"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ - 'show update' >> $baseDir/logs/resolution 2>&1 + 'show update' + travis_fold_end resolve } # Oh boy... can't use scaladoc to document scala-xml if scaladoc depends on the same version of scala-xml. @@ -229,7 +235,7 @@ buildScalaCheck(){ # build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to artifactory) buildModules() { - publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-private-repo")' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") + publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") buildTasks=($publishPrivateTask) buildXML # buildScalaCheck @@ -237,7 +243,7 @@ buildModules() { } buildPublishedModules() { - publishTasks=('set credentials += Credentials(Path.userHome / ".credentials-sonatype")' "set pgpPassphrase := Some(Array.empty)") + publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' "set pgpPassphrase := Some(Array.empty)") buildTasks=($publishSonatypeTaskModules) buildXML buildPartest @@ -283,7 +289,9 @@ determineScalaVersion() { if [ -z "$SCALA_VER_BASE" ]; then echo "No SCALA_VER_BASE specified." + travis_fold_start determineScalaVersion "Determining Scala version" $SBT_CMD $sbtArgs 'set baseVersionSuffix in Global := "SHA"' generateBuildCharacterPropertiesFile + travis_fold_end determineScalaVersion parseScalaProperties "buildcharacter.properties" SCALA_VER_BASE="$maven_version_base" SCALA_VER_SUFFIX="$maven_version_suffix" @@ -376,8 +384,6 @@ bootstrap() { #### (Optional) STARR. if [ ! -z "$STARR_REF" ]; then - echo "### Building STARR" - STARR_DIR=./scala-starr STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-starr" STARR_VER=$SCALA_VER_BASE$STARR_VER_SUFFIX @@ -386,21 +392,24 @@ bootstrap() { git clone --reference $WORKSPACE/.git $WORKSPACE/.git $STARR_DIR cd $STARR_DIR git co $STARR_REF - $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish >> $baseDir/logs/builds 2>&1 + travis_fold_start starr "Building starr" + $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish + travis_fold_end starr ) fi #### LOCKER - echo "### Building locker" - # for bootstrapping, publish core (or at least smallest subset we can get away with) # so that we can build modules with this version of Scala and publish them locally # must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala # publish more than just core: partest needs scalap # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi - $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish >> $baseDir/logs/builds 2>&1 + + travis_fold_start locker "Building locker" + $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish + travis_fold_end locker echo "### Building modules using locker" @@ -425,6 +434,7 @@ bootstrap() { cd $baseDir rm -rf build/ + travis_fold_start quick "Building bootstrapped" $SBT_CMD $sbtArgs \ --warn \ -Dstarr.version=$SCALA_VER \ @@ -434,6 +444,7 @@ bootstrap() { $sbtBuildTask \ dist/mkQuick \ publish + travis_fold_end quick # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala rm -rf $baseDir/ivy2 @@ -443,8 +454,7 @@ bootstrap() { } testStability() { - echo "### Testing stability" - + travis_fold_start stab "Testing stability" cd $baseDir # Run stability tests using the just built version as "quick" and a new version as "strap" @@ -460,6 +470,8 @@ testStability() { mv build/quick build/strap mv quick1 build/quick $scriptsDir/stability-test.sh + + travis_fold_end stab } # assumes we just bootstrapped, and current directory is $baseDir @@ -469,15 +481,16 @@ testStability() { publishSonatype() { # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary, # since we're just publishing an existing build - echo "### Publishing core to sonatype" + travis_fold_start sona "Publishing core to sonatype" $SBT_CMD $sbtArgs \ --warn \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ $publishSonatypeTaskCore + travis_fold_end sona - echo "### Publishing modules to sonatype" + # echo "### Publishing modules to sonatype" # build/test/publish scala core modules to sonatype (this will start a new staging repo) # (was hoping we could make everything go to the same staging repo, but it's not timing that causes two staging repos to be opened) # NOTE: only publish those for which versions are set From fc9a95b0a0013ee627f1ae56ae9e3ee6ed429ade Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Tue, 6 Mar 2018 10:34:33 +0000 Subject: [PATCH 1251/2793] Define testAll's task labels inline This avoids them going out of sync. (They were, by the way: "scalacheck/test" was missing.) --- build.sbt | 44 +++++++++++++++----------------------------- 1 file changed, 15 insertions(+), 29 deletions(-) diff --git a/build.sbt b/build.sbt index 3adcfc9b4d57..85753669f8c3 100644 --- a/build.sbt +++ b/build.sbt @@ -817,41 +817,27 @@ lazy val root: Project = (project in file(".")) state }, testAll := { - val results = ScriptCommands.sequence[Result[Unit]](List( - (Keys.test in Test in junit).result, - (Keys.test in Test in scalacheck).result, - (testOnly in IntegrationTest in testP).toTask(" -- run").result, - (testOnly in IntegrationTest in testP).toTask(" -- pos neg jvm").result, - (testOnly in IntegrationTest in testP).toTask(" -- res scalap specialized").result, - (testOnly in IntegrationTest in testP).toTask(" -- instrumented presentation").result, - (testOnly in IntegrationTest in testP).toTask(" -- --srcpath scaladoc").result, - (Keys.test in Test in osgiTestFelix).result, - (Keys.test in Test in osgiTestEclipse).result, - (mimaReportBinaryIssues in library).result, - (mimaReportBinaryIssues in reflect).result, + val results = ScriptCommands.sequence[(Result[Unit], String)](List( + (Keys.test in Test in junit).result map (_ -> "junit/test"), + (Keys.test in Test in scalacheck).result map (_ -> "scalacheck/test"), + (testOnly in IntegrationTest in testP).toTask(" -- run").result map (_ -> "partest run"), + (testOnly in IntegrationTest in testP).toTask(" -- pos neg jvm").result map (_ -> "partest pos neg jvm"), + (testOnly in IntegrationTest in testP).toTask(" -- res scalap specialized").result map (_ -> "partest res scalap specialized"), + (testOnly in IntegrationTest in testP).toTask(" -- instrumented presentation").result map (_ -> "partest instrumented presentation"), + (testOnly in IntegrationTest in testP).toTask(" -- --srcpath scaladoc").result map (_ -> "partest --srcpath scaladoc"), + (Keys.test in Test in osgiTestFelix).result map (_ -> "osgiTestFelix/test"), + (Keys.test in Test in osgiTestEclipse).result map (_ -> "osgiTestEclipse/test"), + (mimaReportBinaryIssues in library).result map (_ -> "library/mimaReportBinaryIssues"), + (mimaReportBinaryIssues in reflect).result map (_ -> "reflect/mimaReportBinaryIssues"), Def.task(()).dependsOn( // Run these in parallel: doc in Compile in library, doc in Compile in reflect, doc in Compile in compiler, doc in Compile in scalap - ).result + ).result map (_ -> "doc") )).value - // All attempts to define these together with the actual tasks due to the applicative rewriting of `.value` - val descriptions = Vector( - "junit/test", - "partest run", - "partest pos neg jvm", - "partest res scalap specialized", - "partest instrumented presentation", - "partest --srcpath scaladoc", - "osgiTestFelix/test", - "osgiTestEclipse/test", - "library/mimaReportBinaryIssues", - "reflect/mimaReportBinaryIssues", - "doc" - ) - val failed = results.map(_.toEither).zip(descriptions).collect { case (Left(i: Incomplete), d) => (i, d) } - if(failed.nonEmpty) { + val failed = results.collect { case (Inc(i), d) => (i, d) } + if (failed.nonEmpty) { val log = streams.value.log def showScopedKey(k: Def.ScopedKey[_]): String = Vector( From 4d5ce808047c58a16058a0629311cb2bdd2dfe4b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 7 Mar 2018 11:05:04 +0100 Subject: [PATCH 1252/2793] Pull structure of bootstrap script into .travis.yml,... so that: - we can use build stages - we get a nicer experience with build logs - we can easily see / tweak the overall flow with env vars Currently, we are skipping the run tests (take too long), and stability (broken until Stefan's PR is merged?). To publish a build, trigger a custom build with a before_script that set these env variables: - `SCALA_VER_BASE` - `SCALA_VER_SUFFIX` - `publishToSonatype` (the yaml looks at this to skip the step, so it can't be set by our scripts I think) (TODO: let's just set `SCALA_VER`) --- .travis.yml | 70 ++++- build.sbt | 22 ++ project/ScriptCommands.scala | 2 +- scripts/bootstrap_fun | 356 ++++++++++++++++++++++ scripts/common | 94 +++++- scripts/jobs/integrate/bootstrap | 455 ++--------------------------- scripts/jobs/integrate/ide | 10 +- scripts/jobs/integrate/windows | 4 +- scripts/jobs/validate/publish-core | 4 +- scripts/jobs/validate/test | 4 +- 10 files changed, 554 insertions(+), 467 deletions(-) create mode 100644 scripts/bootstrap_fun diff --git a/.travis.yml b/.travis.yml index 76aa5f7968ca..4c5517cfae6f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,26 +1,73 @@ sudo: required # GCE VMs have better performance (will be upgrading to premium VMs soon) -# this builds the spec using jekyll -# based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html - language: scala jdk: openjdk8 -# the spec is built with jekyll -rvm: - - 2.2 cache: directories: - $HOME/.ivy2/cache - $HOME/.sbt + - build/ + -script: +before_script: - (cd admin && ./init.sh) - - scripts/jobs/integrate/bootstrap - - bundle exec jekyll build -s spec/ -d build/spec -install: bundle install +# buildQuick needs following env (is that propagated to stages?) +# - PRIVATE_REPO_PASS, integrationRepoUrl, +# computed: SBT_CMD sbtArgs SCALA_VER updatedModuleVersions +jobs: + include: + - stage: build + script: + - source scripts/common + - source scripts/bootstrap_fun + - mkFreshIvy + - determineScalaVersion + - deriveModuleVersions + - removeExistingBuilds $integrationRepoUrl + - if [ ! -z "$STARR_REF" ]; then buildStarr; fi + - buildLocker + - rm -rf build/ # ensure we resolve from artifactory + - buildModules + - buildQuick clean publish + - echo 'declare -a updatedModuleVersions' > build/env + - echo 'export SCALA_VER="'${SCALA_VER}'" updatedModuleVersions="'${updatedModuleVersions}'"' >> build/env + - cat build/env + + # this builds the spec using jekyll + # based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html + - stage: build + script: bundle exec jekyll build -s spec/ -d build/spec + rvm: 2.2 + install: bundle install + # the key is restricted using forced commands so that it can only upload to the directory we need here + after_success: ./scripts/travis-publish-spec.sh + + # be careful to not set any env vars, as this will result in a cache miss + - &test + stage: test + before_script: + - source build/env + - if [ -z "$SCALA_VER" ]; then echo "Environment not propagated. Caching issue?"; cat build/env ; exit 1; fi + - source scripts/common + - source scripts/bootstrap_fun + - mkFreshIvy + - find build -type f -exec touch {} + # give antStyle a chance + script: buildQuick "set antStyle := true" testRest # shouldn't rebuild, since build/ is cached + - <<: *test + script: buildQuick "set antStyle := true" testPosPres + - <<: *test + script: buildQuick "set antStyle := true" testRun + if: env(testRun) = yes + + - script: testStability + if: env(testStability) = yes + + - stage: publish + script: publishSonatype + if: env(publishToSonatype) = yes # TODO: is this environment variable evaluated afer `source scripts/common` has a chance to set it? maybe it's ok and we can just keep this as the hook for manually triggering a release # cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret # openssl aes-256-cbc -pass "file:./secret" -in id_dsa_spec212_b4096 -out spec/id_dsa_travis.enc -a @@ -35,9 +82,6 @@ env: # ^^^ set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc -# the key is restricted using forced commands so that it can only upload to the directory we need here -after_success: - - ./scripts/travis-publish-spec.sh # using S3 would be simpler, but we want to upload to scala-lang.org # after_success: bundle exec s3_website push --headless diff --git a/build.sbt b/build.sbt index 3adcfc9b4d57..bc5c5bf8f3a8 100644 --- a/build.sbt +++ b/build.sbt @@ -816,6 +816,24 @@ lazy val root: Project = (project in file(".")) GenerateAnyVals.run(dir.getAbsoluteFile) state }, + + testRun := (testOnly in IntegrationTest in testP).toTask(" -- run").result.value, + + testPosPres := (testOnly in IntegrationTest in testP).toTask(" -- pos presentation").result.value, + + testRest := ScriptCommands.sequence[Result[Unit]](List( + (mimaReportBinaryIssues in library).result, + (mimaReportBinaryIssues in reflect).result, + (Keys.test in Test in junit).result, + (Keys.test in Test in scalacheck).result, + (testOnly in IntegrationTest in testP).toTask(" -- neg jvm").result, + (testOnly in IntegrationTest in testP).toTask(" -- res scalap specialized").result, + (testOnly in IntegrationTest in testP).toTask(" -- instrumented").result, + (testOnly in IntegrationTest in testP).toTask(" -- --srcpath scaladoc").result, + (Keys.test in Test in osgiTestFelix).result, + (Keys.test in Test in osgiTestEclipse).result)).value, + + // all of testRun, testPosPres, testRest testAll := { val results = ScriptCommands.sequence[Result[Unit]](List( (Keys.test in Test in junit).result, @@ -968,6 +986,10 @@ lazy val mkQuick = taskKey[File]("Generate a full build, including scripts, in b lazy val mkPack = taskKey[File]("Generate a full build, including scripts, in build/pack") lazy val testAll = taskKey[Unit]("Run all test tasks sequentially") +lazy val testRun = taskKey[Unit]("Run compute intensive test tasks sequentially") +lazy val testPosPres = taskKey[Unit]("Run compilation test (pos + presentation) sequentially") +lazy val testRest = taskKey[Unit]("Run the remaining test tasks sequentially") + // Defining these settings is somewhat redundant as we also redefine settings that depend on them. // However, IntelliJ's project import works better when these are set correctly. def clearSourceAndResourceDirectories = Seq(Compile, Test).flatMap(config => inConfig(config)(Seq( diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index f8644e74cfd5..4e85d3b95525 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -67,7 +67,7 @@ object ScriptCommands { baseVersionSuffix in Global := "SPLIT", resolvers in Global += "scala-pr" at url, testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) - ) ++ publishTarget(url) ++ enableOptimizer + ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer } /** Set up the environment for publishing in `validate/bootstrap`. The arguments are: diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun new file mode 100644 index 000000000000..137aa5e32bee --- /dev/null +++ b/scripts/bootstrap_fun @@ -0,0 +1,356 @@ +publishPrivateTask=${publishPrivateTask-"publish"} +publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} +publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} + +forceRebuild=${forceRebuild-no} +# testStability=${testStability-yes} +testStability=no # currently borker by ant PR? +clean="clean" # TESTING leave empty to speed up testing (on jenkins/locally; on travis it's a fresh machine every time) + +stApi="https://oss.sonatype.org/service/local" + + +# Oh boy... can't use scaladoc to document scala-xml if scaladoc depends on the same version of scala-xml. +# Even if that version is available through the project's resolvers, sbt won't look past this project. +# SOOOOO, we set the version to a dummy (-DOC), generate documentation, +# then set the version to the right one and publish (which won't re-gen the docs). +# Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice. + +# Each buildModule() function is invoked twice: first to build against locker and publish to artifactory, then +# to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes"). +# In the second round, sbtResolve is always true: the module will be found in the artifactory! +# Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the +# module again. +# +# Note: we tried an alternative solution in which sbtResolve would not look at artifactory, but that fails. For example, +# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building, +# which exists only in artifactory. + +docTask() { + if [[ "$STARR_REF" != "" && "$1" != "yes" ]]; then + # Don't build module docs on the first round of module builds when bootstrapping + # a binary incompatible compiler change to avoid linkage errors with using the old Scaladoc + echo set publishArtifact in packageDoc in Compile := false + else + echo doc + fi +} + +buildXML() { + if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) + then echo "Found scala-xml $XML_VER; not building." + else + update scala scala-xml "$XML_REF" && gfxd + doc="$(docTask $XML_BUILT)" + sbtBuild 'set version := "'$XML_VER'-DOC"' $clean "$doc" 'set version := "'$XML_VER'"' test "${buildTasks[@]}" + XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above + fi +} + +buildPartest() { + if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER ) + then echo "Found scala-partest $PARTEST_VER; not building." + else + update scala scala-partest "$PARTEST_REF" && gfxd + doc="$(docTask $PARTEST_BUILT)" + sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" test "${buildTasks[@]}" + PARTEST_BUILT="yes" + fi +} + +# should only be called with publishTasks publishing to artifactory +buildScalaCheck(){ + if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) + then echo "Found scalacheck $SCALACHECK_VER; not building." + else + update rickynils scalacheck $SCALACHECK_REF && gfxd + doc="$(docTask $SCALACHECK_BUILT)" + sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean "$doc" publish # test times out NOTE: never published to sonatype + SCALACHECK_BUILT="yes" + fi +} + +# build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to artifactory) +# publish to our internal repo (so we can resolve the modules in the scala build below) +# we only need to build the modules necessary to build Scala itself +# since the version of locker and quick are the same +buildModules() { + echo "### Building modules using locker" + + publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") + buildTasks=($publishPrivateTask) + buildXML + # buildScalaCheck + buildPartest + + constructUpdatedModuleVersions +} + +# build/test/publish scala core modules to sonatype (this will start a new staging repo) +# (was hoping we could make everything go to the same staging repo, but it's not timing that causes two staging repos to be opened) +# NOTE: only publish those for which versions are set +# test and publish to sonatype, assuming you have ~/.sbt/0.13/sonatype.sbt and ~/.sbt/0.13/plugin/gpg.sbt +buildPublishedModules() { + echo "### Publishing modules to sonatype" + + publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' "set pgpPassphrase := Some(Array.empty)") + buildTasks=($publishSonatypeTaskModules) + buildXML + buildPartest +} + + +## BUILD STEPS: +# TODO: can we reuse some caching? can we stop generating a repositories config, +# since this is duplicated from sbt and may thus get out of synch... +mkFreshIvy() { + # we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala + # rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)... + # we don't nuke the whole ws since that clobbers the git clones needlessly + [[ -d $WORKSPACE/ivy2-shadow ]] || rm -rf $WORKSPACE/ivy2 + mkdir -p $WORKSPACE/ivy2 + + rm -rf $WORKSPACE/resolutionScratch_ + mkdir -p $WORKSPACE/resolutionScratch_ + + generateRepositoriesConfig $integrationRepoUrl +} + +scalaVerToBinary() { + # $1 = SCALA_VER + # $2 = SCALA_VER_BASE + # $3 = SCALA_VER_SUFFIX + + local RE='\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)' + local majMin="$(echo $2 | sed -e "s#$RE#\1.\2#")" + local patch="$(echo $2 | sed -e "s#$RE#\3#")" + + # The binary version is majMin (e.g. "2.12") if + # - there's no suffix : 2.12.0, 2.12.1 + # - the suffix starts with "-bin" : 2.12.1-bin-sha, 2.12.1-bin-sha-custom, 2.12.1-bin-SNAPSHOT + # - the suffix is \w+ and patch version is > 0: 2.12.1-M1, 2.12.1-RC2 (also 2.12.1-sha, 2.12.1-SNAPSHOT, which we don't use) + # + # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-pre-sha, 2.12.0-pre-SNAPSHOT + # (also 2.12.0-sha, 2.12.0-SNAPSHOT, which we don't use) + # + # Adapted from sbt: https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L42 + # + # During the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of integration / SNAPSHOT + # versions is the full version, e.g. 2.12.0-pre-sha, so modules are always re-built. + + if [[ "$3" == "" || "${3:0:4}" == "-bin" || ("$patch" != "0" && "$3" =~ ^-[a-zA-Z0-9_]+$) ]]; then + echo "$majMin" + else + echo "$1" + fi +} + +determineScalaVersion() { + cd $WORKSPACE + parseScalaProperties "versions.properties" + + # each of the branches below defines the following vars: SCALA_VER_BASE, SCALA_VER_SUFFIX, publishToSonatype + if [ -z "$SCALA_VER_BASE" ]; then + echo "No SCALA_VER_BASE specified." + + travis_fold_start determineScalaVersion "Determining Scala version" + $SBT_CMD $sbtArgs 'set baseVersionSuffix in Global := "SHA"' generateBuildCharacterPropertiesFile + travis_fold_end determineScalaVersion + parseScalaProperties "buildcharacter.properties" + SCALA_VER_BASE="$maven_version_base" + SCALA_VER_SUFFIX="$maven_version_suffix" + publishToSonatype="no" + else + publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish + fi + + SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX" + SCALA_BINARY_VER=$(scalaVerToBinary $SCALA_VER $SCALA_VER_BASE $SCALA_VER_SUFFIX) + + echo "version=$SCALA_VER" >> $WORKSPACE/jenkins.properties + echo "sbtDistVersionOverride=-Dproject.version=$SCALA_VER" >> $WORKSPACE/jenkins.properties + + scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"') + + echo "Building Scala $SCALA_VER." +} + +# determineScalaVersion must have been called (versions.properties is parsed to env vars) +deriveModuleVersions() { + XML_VER=${XML_VER-$scala_xml_version_number} + PARTEST_VER=${PARTEST_VER-$partest_version_number} + SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number} + + XML_REF="v$XML_VER" + PARTEST_REF="v$PARTEST_VER" + SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags + + echo "PARTEST = $PARTEST_VER at $PARTEST_REF" + # echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF" + echo "XML = $XML_VER at $XML_REF" + +} + +createNetrcFile() { + local netrcFile=$HOME/`basename $1`-netrc + grep 'host=' $1 | sed 's/host=\(.*\)/machine \1/' > $netrcFile + grep 'user=' $1 | sed 's/user=\(.*\)/login \1/' >> $netrcFile + grep 'password=' $1 | sed 's/password=\(.*\)/password \1/' >> $netrcFile +} + +# deletes existing artifacts (core and modules) matching the $SCALA_VER from the repository passed as argument +removeExistingBuilds() { + local repoUrl=$1 + local repoPrefix="https://scala-ci.typesafe.com/artifactory/" + if [[ $repoUrl == "$repoPrefix"* ]]; then + local repoId=${1#$repoPrefix} + local storageApiUrl="${repoPrefix}api/storage/$repoId" + + createNetrcFile "$HOME/.credentials-private-repo" + local netrcFile="$HOME/.credentials-private-repo-netrc" + + # "module" is not a scala module (like scala-xml), but an artifact of a boostrap build. the variable + # contains: "org/scala-lang/modules", "org/scala-lang/scala-compiler", "org/scala-lang/scala-library", ... + local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration` + + for module in $scalaLangModules; do + local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | endswith(\"$SCALA_VER\")) | .uri"` + for artifact in $artifacts; do + echo "Deleting $repoUrl$module$artifact" + curl -s --netrc-file $netrcFile -X DELETE $repoUrl$module$artifact + done + done + else + echo "Unknown repo, not deleting anything: $repoUrl" + fi +} + +constructUpdatedModuleVersions() { + updatedModuleVersions=() + + # force the new module versions for building the core. these may be different from the values in versions.properties + # if the variables (XML_VER) were provided. in the common case, the values are the same as in versions.properties. + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER") + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER") + # updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER") + + # allow overriding the jline version using a jenkins build parameter + if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi + + if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi +} + +# build locker (scala + modules) and quick, publishing everything to artifactory + +#### (Optional) STARR. +buildStarr() { + cd $WORKSPACE + + STARR_DIR=./scala-starr + STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-starr" + STARR_VER=$SCALA_VER_BASE$STARR_VER_SUFFIX + rm -rf "$STARR_DIR" + ( + git clone --reference $WORKSPACE/.git $WORKSPACE/.git $STARR_DIR + cd $STARR_DIR + git co $STARR_REF + travis_fold_start starr "Building starr" + $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish + travis_fold_end starr + ) +} + +#### LOCKER +# for bootstrapping, publish core (or at least smallest subset we can get away with) +# so that we can build modules with this version of Scala and publish them locally +# must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala +# publish more than just core: partest needs scalap +# in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler +buildLocker() { + cd $WORKSPACE + + if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi + + travis_fold_start locker "Building locker" + $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish + travis_fold_end locker +} + +#### QUICK +buildQuick() { + cd $WORKSPACE + + # # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours + # # the sbt call will create a new one + # + # Rebuild Scala with these modules so that all binary versions are consistent. + # Update versions.properties to new modules. + # Sanity check: make sure the Scala test suite passes / docs can be generated with these modules. + + travis_fold_start quick "Building bootstrapped" + $SBT_CMD $sbtArgs \ + --warn \ + -Dstarr.version=$SCALA_VER \ + ${updatedModuleVersions[@]} \ + "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ + "$@" + travis_fold_end quick +} + +wipeIvyCache() { + # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala + rm -rf $WORKSPACE/ivy2 + + # TODO: create PR with following commit (note that release will have been tagged already) + # git commit versions.properties -m"Bump versions.properties for $SCALA_VER." +} + +testStability() { + travis_fold_start stab "Testing stability" + cd $WORKSPACE + + # Run stability tests using the just built version as "quick" and a new version as "strap" + mv build/quick quick1 + rm -rf build/ + $SBT_CMD $sbtArgs \ + --warn \ + -Dstarr.version=$SCALA_VER \ + ${updatedModuleVersions[@]} \ + "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ + $clean \ + library/compile reflect/compile compiler/compile + mv build/quick build/strap + mv quick1 build/quick + scripts/stability-test.sh + + travis_fold_end stab +} + +# assumes we just bootstrapped, and current directory is $WORKSPACE +# publishes locker to sonatype, then builds modules again (those for which version numbers were provided), +# and publishes those to sonatype as well +# finally, the staging repos are closed +publishSonatype() { + # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary, + # since we're just publishing an existing build + travis_fold_start sona "Publishing core to sonatype" + $SBT_CMD $sbtArgs \ + --warn \ + -Dstarr.version=$SCALA_VER \ + ${updatedModuleVersions[@]} \ + "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ + $publishSonatypeTaskCore + travis_fold_end sona + + buildPublishedModules + + open=$(st_stagingReposOpen) + allOpenUrls=$(echo $open | jq '.repositoryURI' | tr -d \") + allOpen=$(echo $open | jq '.repositoryId' | tr -d \") + + echo "Closing open repos: $allOpen" + + for repo in $allOpen; do st_stagingRepoClose $repo; done + + echo "Closed sonatype staging repos: $allOpenUrls." +} diff --git a/scripts/common b/scripts/common index d8903a7d8b05..83b39c7b9656 100644 --- a/scripts/common +++ b/scripts/common @@ -4,6 +4,11 @@ trap "exit 1" TERM export TOP_PID=$$ set -e +WORKSPACE="${WORKSPACE-`pwd`}" + +# the default (home dir) is fine on Travis, since each jobs gets its own worker (ivy cache is cached by travis) +IVY_HOME="${IVY_HOME-$HOME/.ivy2}" + # Known problems : does not fare well with interrupted, partial # compilations. We should perhaps have a multi-dependency version # of do_i_have below @@ -11,16 +16,26 @@ set -e LOGGINGDIR="$WORKSPACE/logs" mkdir -p $LOGGINGDIR -unset SBT_HOME -SBT_HOME="$WORKSPACE/.sbt" -mkdir -p $SBT_HOME -IVY_CACHE="$WORKSPACE/.ivy2" -mkdir -p $IVY_CACHE -rm -rf $IVY_CACHE/cache/org.scala-lang +# unset SBT_HOME +# SBT_HOME="$WORKSPACE/.sbt" +# mkdir -p $SBT_HOME +# IVY_CACHE="$WORKSPACE/.ivy2" +# mkdir -p $IVY_CACHE + +# TODO: do we need to nuke the cache on travis? +# rm -rf $IVY_CACHE/cache/org.scala-lang SBT_CMD=${sbtCmd-sbt} SBT_CMD="$SBT_CMD -sbt-version 0.13.17" +# repo to publish builds +integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} + +sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" + +sbtArgs="-Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig $sbtArgs" # allow supplying more args + + # temp dir where all 'non-build' operation are performed TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) TMP_DIR="${TMP_ROOT_DIR}/tmp" @@ -67,12 +82,12 @@ function debug () { } function parseScalaProperties(){ - propFile="$baseDir/$1" + propFile="$WORKSPACE/$1" if [ ! -f $propFile ]; then echo "Property file $propFile not found." exit 1 else - awk -f "$scriptsDir/readproperties.awk" "$propFile" > "$propFile.sh" + awk -f "scripts/readproperties.awk" "$propFile" > "$propFile.sh" . "$propFile.sh" # yeah yeah, not that secure, improvements welcome (I tried, but bash made me cry again) fi } @@ -159,7 +174,6 @@ EOF # Takes a variable number of additional repositories as argument. # See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html function generateRepositoriesConfig() { - sbtRepositoryConfig="$scriptsDir/sbt-repositories-config" echo > "$sbtRepositoryConfig" '[repositories]' if [[ $# -gt 0 ]]; then for i in $(seq 1 $#); do @@ -192,3 +206,65 @@ travis_fold_end() { echo -e "\ntravis_fold:end:$1\r" echo "" } + + +##### git +gfxd() { + git clean -fxd # TESTING +} + +update() { + [[ -d $WORKSPACE ]] || mkdir -p $WORKSPACE + cd $WORKSPACE + + if [ ! -d $WORKSPACE/$2 ]; then git clone "https://github.com/$1/$2.git"; fi + + cd $2 + + git fetch --tags "https://github.com/$1/$2.git" + (git fetch "https://github.com/$1/$2.git" $3 && git checkout -fq FETCH_HEAD) #|| git checkout -fq $3 # || fallback is for local testing on tag + git reset --hard +} + +##### sonatype interface + +st_curl(){ + curl -H "Content-Type: application/json" -H "accept: application/json,application/vnd.siesta-error-v1+json,application/vnd.siesta-validation-errors-v1+json" -K ~/.sonatype-curl -s -o - $@ +} + +st_stagingReposOpen() { + st_curl "$stApi/staging/profile_repositories" | jq '.data[] | select(.profileName == "org.scala-lang") | select(.type == "open")' +} + +st_stagingRepoDrop() { + repo=$1 + message=$2 + echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/drop" +} + +st_stagingRepoClose() { + repo=$1 + message=$2 + echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/close" +} + +#### sbt tools + +sbtBuild() { + travis_fold_start build "Building $(basename $PWD) with $@" + $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" + travis_fold_end build +} + +sbtResolve() { + cd $WORKSPACE/resolutionScratch_ + touch build.sbt + # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to. + cross=${4-binary} + # echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" + travis_fold_start resolve "Resolving \"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" + $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ + "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ + 'show update' + travis_fold_end resolve +} diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 0f41dd3939b7..c655b0b5ea6d 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -71,453 +71,48 @@ # Note: private-repo used to be private-repo.typesafe.com. now we're running artifactory on scala-ci.typesafe.com/artifactory +#### MAIN -publishPrivateTask=${publishPrivateTask-"publish"} -publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} -publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} - -forceRebuild=${forceRebuild-no} -sbtBuildTask=${sbtBuildTask-"testAll"} # TESTING leave empty to avoid the sanity check -testStability=${testStability-yes} - -clean="clean" # TESTING leave empty to speed up testing - -WORKSPACE=${WORKSPACE-`pwd`} -baseDir=${WORKSPACE} - -scriptsDir="$baseDir/scripts" -. $scriptsDir/common - -# we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala -# rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)... -# we don't nuke the whole ws since that clobbers the git clones needlessly -[[ -d $baseDir/ivy2-shadow ]] || rm -rf $baseDir/ivy2 -mkdir -p $baseDir/ivy2 - -rm -rf $baseDir/resolutionScratch_ -mkdir -p $baseDir/resolutionScratch_ - -# repo to publish builds -integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} - -if [ "${TRAVIS}" != "true" ]; then - generateRepositoriesConfig $integrationRepoUrl -fi - -# ARGH trying to get this to work on multiple versions of sbt-extras... +# TODO: this is weird for historical reasons, simplify now that we have one version of sbt in use +# we probably don't need to override the sbt dir? just ivy +# +# (WAS: trying to get this to work on multiple versions of sbt-extras... # the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir # the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base -# need to set sbt-dir to one that has the gpg.sbt plugin config -sbtArgs="-ivy $baseDir/ivy2 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13" - -##### git -gfxd() { - git clean -fxd # TESTING -} - -update() { - [[ -d $baseDir ]] || mkdir -p $baseDir - cd $baseDir - - if [ ! -d $baseDir/$2 ]; then git clone "https://github.com/$1/$2.git"; fi - - cd $2 - - git fetch --tags "https://github.com/$1/$2.git" - (git fetch "https://github.com/$1/$2.git" $3 && git checkout -fq FETCH_HEAD) #|| git checkout -fq $3 # || fallback is for local testing on tag - git reset --hard -} - -##### sonatype interface - -stApi="https://oss.sonatype.org/service/local" - -function st_curl(){ - curl -H "Content-Type: application/json" -H "accept: application/json,application/vnd.siesta-error-v1+json,application/vnd.siesta-validation-errors-v1+json" -K ~/.sonatype-curl -s -o - $@ -} - -function st_stagingReposOpen() { - st_curl "$stApi/staging/profile_repositories" | jq '.data[] | select(.profileName == "org.scala-lang") | select(.type == "open")' -} - -function st_stagingRepoDrop() { - repo=$1 - message=$2 - echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/drop" -} - -function st_stagingRepoClose() { - repo=$1 - message=$2 - echo "{\"data\":{\"description\":\"$message\",\"stagedRepositoryIds\":[\"$repo\"]}}" | st_curl -X POST -d @- "$stApi/staging/bulk/close" -} - -#### sbt tools - -sbtBuild() { - travis_fold_start build "Building $(basename $PWD) with $@" - $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" - travis_fold_end build -} - -sbtResolve() { - cd $baseDir/resolutionScratch_ - touch build.sbt - # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to. - cross=${4-binary} - # echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" - travis_fold_start resolve "Resolving \"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" - $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ - "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ - 'show update' - travis_fold_end resolve -} - -# Oh boy... can't use scaladoc to document scala-xml if scaladoc depends on the same version of scala-xml. -# Even if that version is available through the project's resolvers, sbt won't look past this project. -# SOOOOO, we set the version to a dummy (-DOC), generate documentation, -# then set the version to the right one and publish (which won't re-gen the docs). -# Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice. - -# Each buildModule() function is invoked twice: first to build against locker and publish to artifactory, then -# to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes"). -# In the second round, sbtResolve is always true: the module will be found in the artifactory! -# Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the -# module again. +# need to set sbt-dir to one that has the gpg.sbt plugin config) # -# Note: we tried an alternative solution in which sbtResolve would not look at artifactory, but that fails. For example, -# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building, -# which exists only in artifactory. - -docTask() { - if [[ "$STARR_REF" != "" && "$1" != "yes" ]]; then - # Don't build module docs on the first round of module builds when bootstrapping - # a binary incompatible compiler change to avoid linkage errors with using the old Scaladoc - echo set publishArtifact in packageDoc in Compile := false - else - echo doc - fi -} - -buildXML() { - if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) - then echo "Found scala-xml $XML_VER; not building." - else - update scala scala-xml "$XML_REF" && gfxd - doc="$(docTask $XML_BUILT)" - sbtBuild 'set version := "'$XML_VER'-DOC"' $clean "$doc" 'set version := "'$XML_VER'"' test "${buildTasks[@]}" - XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above - fi -} - -buildPartest() { - if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER ) - then echo "Found scala-partest $PARTEST_VER; not building." - else - update scala scala-partest "$PARTEST_REF" && gfxd - doc="$(docTask $PARTEST_BUILT)" - sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" test "${buildTasks[@]}" - PARTEST_BUILT="yes" - fi -} - -# should only be called with publishTasks publishing to artifactory -buildScalaCheck(){ - if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) - then echo "Found scalacheck $SCALACHECK_VER; not building." - else - update rickynils scalacheck $SCALACHECK_REF && gfxd - doc="$(docTask $SCALACHECK_BUILT)" - sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean "$doc" publish # test times out NOTE: never published to sonatype - SCALACHECK_BUILT="yes" - fi -} - -# build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to artifactory) -buildModules() { - publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") - buildTasks=($publishPrivateTask) - buildXML - # buildScalaCheck - buildPartest -} - -buildPublishedModules() { - publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' "set pgpPassphrase := Some(Array.empty)") - buildTasks=($publishSonatypeTaskModules) - buildXML - buildPartest -} - - -## BUILD STEPS: - -scalaVerToBinary() { - # $1 = SCALA_VER - # $2 = SCALA_VER_BASE - # $3 = SCALA_VER_SUFFIX - - local RE='\([0-9]*\)[.]\([0-9]*\)[.]\([0-9]*\)' - local majMin="$(echo $2 | sed -e "s#$RE#\1.\2#")" - local patch="$(echo $2 | sed -e "s#$RE#\3#")" - - # The binary version is majMin (e.g. "2.12") if - # - there's no suffix : 2.12.0, 2.12.1 - # - the suffix starts with "-bin" : 2.12.1-bin-sha, 2.12.1-bin-sha-custom, 2.12.1-bin-SNAPSHOT - # - the suffix is \w+ and patch version is > 0: 2.12.1-M1, 2.12.1-RC2 (also 2.12.1-sha, 2.12.1-SNAPSHOT, which we don't use) - # - # Otherwise, the binary version is the full version: 2.12.0-M1, 2.12.0-RC2, 2.12.0-pre-sha, 2.12.0-pre-SNAPSHOT - # (also 2.12.0-sha, 2.12.0-SNAPSHOT, which we don't use) - # - # Adapted from sbt: https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L42 - # - # During the pre-release cycle of a major release (e.g. before 2.12.0), the SCALA_BINARY_VER of integration / SNAPSHOT - # versions is the full version, e.g. 2.12.0-pre-sha, so modules are always re-built. - - if [[ "$3" == "" || "${3:0:4}" == "-bin" || ("$patch" != "0" && "$3" =~ ^-[a-zA-Z0-9_]+$) ]]; then - echo "$majMin" - else - echo "$1" - fi -} - -determineScalaVersion() { - cd $WORKSPACE - parseScalaProperties "versions.properties" - - # each of the branches below defines the following vars: SCALA_VER_BASE, SCALA_VER_SUFFIX, publishToSonatype - if [ -z "$SCALA_VER_BASE" ]; then - echo "No SCALA_VER_BASE specified." - - travis_fold_start determineScalaVersion "Determining Scala version" - $SBT_CMD $sbtArgs 'set baseVersionSuffix in Global := "SHA"' generateBuildCharacterPropertiesFile - travis_fold_end determineScalaVersion - parseScalaProperties "buildcharacter.properties" - SCALA_VER_BASE="$maven_version_base" - SCALA_VER_SUFFIX="$maven_version_suffix" - publishToSonatype="no" - else - publishToSonatype=${publishToSonatype-"yes"} # unless forced previously, publish - fi +# scripts/common will add the repositories override +sbtArgs="-ivy $WORKSPACE/ivy2 -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13" - SCALA_VER="$SCALA_VER_BASE$SCALA_VER_SUFFIX" - SCALA_BINARY_VER=$(scalaVerToBinary $SCALA_VER $SCALA_VER_BASE $SCALA_VER_SUFFIX) +# each job has its own ivy2, sharing between jobs would lead to trouble +mkdir -p $WORKSPACE/ivy2 - echo "version=$SCALA_VER" >> $baseDir/jenkins.properties - echo "sbtDistVersionOverride=-Dproject.version=$SCALA_VER" >> $baseDir/jenkins.properties +source scripts/common - scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"') +source scripts/bootstrap_fun - echo "Building Scala $SCALA_VER." -} +mkFreshIvy -# determineScalaVersion must have been called (versions.properties is parsed to env vars) -deriveModuleVersions() { - XML_VER=${XML_VER-$scala_xml_version_number} - PARTEST_VER=${PARTEST_VER-$partest_version_number} - SCALACHECK_VER=${SCALACHECK_VER-$scalacheck_version_number} - - XML_REF="v$XML_VER" - PARTEST_REF="v$PARTEST_VER" - SCALACHECK_REF="$SCALACHECK_VER" # no `v` in their tags - - echo "PARTEST = $PARTEST_VER at $PARTEST_REF" - # echo "SCALACHECK = $SCALACHECK_VER at $SCALACHECK_REF" - echo "XML = $XML_VER at $XML_REF" - -} - -createNetrcFile() { - local netrcFile=$HOME/`basename $1`-netrc - grep 'host=' $1 | sed 's/host=\(.*\)/machine \1/' > $netrcFile - grep 'user=' $1 | sed 's/user=\(.*\)/login \1/' >> $netrcFile - grep 'password=' $1 | sed 's/password=\(.*\)/password \1/' >> $netrcFile -} - -# deletes existing artifacts (core and modules) matching the $SCALA_VER from the repository passed as argument -removeExistingBuilds() { - local repoUrl=$1 - local repoPrefix="https://scala-ci.typesafe.com/artifactory/" - if [[ $repoUrl == "$repoPrefix"* ]]; then - local repoId=${1#$repoPrefix} - local storageApiUrl="${repoPrefix}api/storage/$repoId" - - createNetrcFile "$HOME/.credentials-private-repo" - local netrcFile="$HOME/.credentials-private-repo-netrc" - - # "module" is not a scala module (like scala-xml), but an artifact of a boostrap build. the variable - # contains: "org/scala-lang/modules", "org/scala-lang/scala-compiler", "org/scala-lang/scala-library", ... - local scalaLangModules=`curl -s $storageApiUrl/org/scala-lang | jq -r '.children | .[] | "org/scala-lang" + .uri' | grep -v actors-migration` - - for module in $scalaLangModules; do - local artifacts=`curl -s $storageApiUrl/$module | jq -r ".children | .[] | select(.uri | endswith(\"$SCALA_VER\")) | .uri"` - for artifact in $artifacts; do - echo "Deleting $repoUrl$module$artifact" - curl -s --netrc-file $netrcFile -X DELETE $repoUrl$module$artifact - done - done - else - echo "Unknown repo, not deleting anything: $repoUrl" - fi -} - -constructUpdatedModuleVersions() { - updatedModuleVersions=() - - # force the new module versions for building the core. these may be different from the values in versions.properties - # if the variables (XML_VER) were provided. in the common case, the values are the same as in versions.properties. - updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala-xml.version.number=$XML_VER") - updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dpartest.version.number=$PARTEST_VER") - # updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscalacheck.version.number=$SCALACHECK_VER") - - # allow overriding the jline version using a jenkins build parameter - if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi - - if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi -} - -# build locker (scala + modules) and quick, publishing everything to artifactory -bootstrap() { - echo "### Bootstrapping" - - cd $WORKSPACE - - #### (Optional) STARR. - if [ ! -z "$STARR_REF" ]; then - STARR_DIR=./scala-starr - STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-starr" - STARR_VER=$SCALA_VER_BASE$STARR_VER_SUFFIX - rm -rf "$STARR_DIR" - ( - git clone --reference $WORKSPACE/.git $WORKSPACE/.git $STARR_DIR - cd $STARR_DIR - git co $STARR_REF - travis_fold_start starr "Building starr" - $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish - travis_fold_end starr - ) - fi - - #### LOCKER - - # for bootstrapping, publish core (or at least smallest subset we can get away with) - # so that we can build modules with this version of Scala and publish them locally - # must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala - # publish more than just core: partest needs scalap - # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler - if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi - - travis_fold_start locker "Building locker" - $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish - travis_fold_end locker - - echo "### Building modules using locker" - - # build, test and publish modules with this core - # publish to our internal repo (so we can resolve the modules in the scala build below) - # we only need to build the modules necessary to build Scala itself - # since the version of locker and quick are the same - buildModules - - constructUpdatedModuleVersions - - #### QUICK - - echo "### Bootstrapping Scala using locker" - - # # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours - # # the sbt call will create a new one - # - # Rebuild Scala with these modules so that all binary versions are consistent. - # Update versions.properties to new modules. - # Sanity check: make sure the Scala test suite passes / docs can be generated with these modules. - cd $baseDir - rm -rf build/ - - travis_fold_start quick "Building bootstrapped" - $SBT_CMD $sbtArgs \ - --warn \ - -Dstarr.version=$SCALA_VER \ - ${updatedModuleVersions[@]} \ - "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ - $clean \ - $sbtBuildTask \ - dist/mkQuick \ - publish - travis_fold_end quick - - # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala - rm -rf $baseDir/ivy2 - - # TODO: create PR with following commit (note that release will have been tagged already) - # git commit versions.properties -m"Bump versions.properties for $SCALA_VER." -} - -testStability() { - travis_fold_start stab "Testing stability" - cd $baseDir - - # Run stability tests using the just built version as "quick" and a new version as "strap" - mv build/quick quick1 - rm -rf build/ - $SBT_CMD $sbtArgs \ - --warn \ - -Dstarr.version=$SCALA_VER \ - ${updatedModuleVersions[@]} \ - "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ - $clean \ - library/compile reflect/compile compiler/compile - mv build/quick build/strap - mv quick1 build/quick - $scriptsDir/stability-test.sh - - travis_fold_end stab -} - -# assumes we just bootstrapped, and current directory is $baseDir -# publishes locker to sonatype, then builds modules again (those for which version numbers were provided), -# and publishes those to sonatype as well -# finally, the staging repos are closed -publishSonatype() { - # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary, - # since we're just publishing an existing build - travis_fold_start sona "Publishing core to sonatype" - $SBT_CMD $sbtArgs \ - --warn \ - -Dstarr.version=$SCALA_VER \ - ${updatedModuleVersions[@]} \ - "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ - $publishSonatypeTaskCore - travis_fold_end sona - - # echo "### Publishing modules to sonatype" - # build/test/publish scala core modules to sonatype (this will start a new staging repo) - # (was hoping we could make everything go to the same staging repo, but it's not timing that causes two staging repos to be opened) - # NOTE: only publish those for which versions are set - # test and publish to sonatype, assuming you have ~/.sbt/0.13/sonatype.sbt and ~/.sbt/0.13/plugin/gpg.sbt - buildPublishedModules - - open=$(st_stagingReposOpen) - allOpenUrls=$(echo $open | jq '.repositoryURI' | tr -d \") - allOpen=$(echo $open | jq '.repositoryId' | tr -d \") - - echo "Closing open repos: $allOpen" +determineScalaVersion - for repo in $allOpen; do st_stagingRepoClose $repo; done +deriveModuleVersions - echo "Closed sonatype staging repos: $allOpenUrls." -} +removeExistingBuilds $integrationRepoUrl +if [ ! -z "$STARR_REF" ]; then + buildStarr +fi -#### MAIN +buildLocker -determineScalaVersion +# locker is now published in artifactory -- make sure we resolve from there +rm -rf build/ -deriveModuleVersions +buildModules -removeExistingBuilds $integrationRepoUrl +buildQuick clean testAll publish -bootstrap +wipeIvyCache if [ "$testStability" == "yes" ] then testStability diff --git a/scripts/jobs/integrate/ide b/scripts/jobs/integrate/ide index c39facbc3d16..1dc7b43139e8 100755 --- a/scripts/jobs/integrate/ide +++ b/scripts/jobs/integrate/ide @@ -1,7 +1,7 @@ #!/bin/bash -e # requires checkout: root is a scala checkout with which to integrate (actually, only required file is versions.properties, as documented below) # requires env: scalaVersion (specifies binary already built from above checkout), WORKSPACE (provided by jenkins), repo_ref (HEAD of the scala checkout), -# requires files: $baseDir/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...) +# requires files: $WORKSPACE/versions.properties (from checkout -- defines version numbers for modules used to build scala for dbuild...) echo "IDE integration not yet available on 2.12.x. Punting." exit 0 @@ -13,9 +13,9 @@ baseDir=${WORKSPACE-`pwd`} uberBuildUrl=${uberBuildUrl-"https://github.com/scala-ide/uber-build.git"} uberBuildConfig=${uberBuildConfig-"validator.conf"} # TODO: backport to 2.10.x: uberBuildConfig="validator-2.10.conf" -uberBuildDir="$baseDir/uber-build/" +uberBuildDir="$WORKSPACE/uber-build/" -cd $baseDir +cd $WORKSPACE if [[ -d $uberBuildDir ]]; then ( cd $uberBuildDir && git fetch $uberBuildUrl HEAD && git checkout -f FETCH_HEAD && git clean -fxd ) else @@ -26,10 +26,10 @@ echo "maven.version.number=$scalaVersion" >> versions.properties # pass prRepoUrl in, which uber-build passes along to dbuild (in sbt-builds-for-ide) # the "-P pr-scala" maven arg accomplishes the same thing for maven (directly used in uber-build) -BASEDIR="$baseDir" prRepoUrl="$prRepoUrl" IDE_M2_REPO="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\ +BASEDIR="$WORKSPACE" prRepoUrl="$prRepoUrl" IDE_M2_REPO="$prRepoUrl" MAVEN_ARGS="-P pr-scala"\ $uberBuildDir/uber-build.sh $uberBuildDir/config/$uberBuildConfig $repo_ref $scalaVersion # uber-build puts its local repo under target/m2repo # wipe the org/scala-lang part, which otherwise just keeps # growing and growing due to the -$sha-SNAPSHOT approach -[[ -d $baseDir/target/m2repo/org/scala-lang ]] && rm -rf $baseDir/target/m2repo/org/scala-lang +[[ -d $WORKSPACE/target/m2repo/org/scala-lang ]] && rm -rf $WORKSPACE/target/m2repo/org/scala-lang diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows index f5e068684e3b..2ed88c55589d 100755 --- a/scripts/jobs/integrate/windows +++ b/scripts/jobs/integrate/windows @@ -1,8 +1,6 @@ #!/bin/bash -baseDir=${WORKSPACE-`pwd`} -scriptsDir="$baseDir/scripts" -. $scriptsDir/common +source scripts/common java -version javac -version diff --git a/scripts/jobs/validate/publish-core b/scripts/jobs/validate/publish-core index c71fbd12b796..1b1f4bed9880 100755 --- a/scripts/jobs/validate/publish-core +++ b/scripts/jobs/validate/publish-core @@ -5,9 +5,7 @@ # The only downside is that backend improvements don't improve compiler performance itself until they are in STARR). # The version is suffixed with "-${sha:0:7}-SNAPSHOT" -baseDir=${WORKSPACE-`pwd`} -scriptsDir="$baseDir/scripts" -. $scriptsDir/common +source scripts/common generateRepositoriesConfig $prRepoUrl SBT="$SBT_CMD -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" diff --git a/scripts/jobs/validate/test b/scripts/jobs/validate/test index 39fafebaef44..a155df4ce04e 100755 --- a/scripts/jobs/validate/test +++ b/scripts/jobs/validate/test @@ -1,8 +1,6 @@ #!/bin/bash -e -v -x -baseDir=${WORKSPACE-`pwd`} -scriptsDir="$baseDir/scripts" -. $scriptsDir/common +source scripts/common generateRepositoriesConfig $prRepoUrl SBT="$SBT_CMD -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" From 9f54360df39f99f7ba183064631a10f4ef2cf1d5 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 7 Mar 2018 11:19:06 +0100 Subject: [PATCH 1253/2793] Exclude PR validation as a trigger --- .travis.yml | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/.travis.yml b/.travis.yml index 4c5517cfae6f..1d87bf896025 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,12 +14,22 @@ cache: before_script: - (cd admin && ./init.sh) +stages: + - name: build + - name: test + if: type != pull_request + - name: publish + if: type != pull_request + # buildQuick needs following env (is that propagated to stages?) # - PRIVATE_REPO_PASS, integrationRepoUrl, # computed: SBT_CMD sbtArgs SCALA_VER updatedModuleVersions jobs: include: - stage: build + # currently, not touching PR validation + # (also, we couldn't even, because the password to publish to artifactory is not there :-/) + if: type != pull_request script: - source scripts/common - source scripts/bootstrap_fun @@ -45,6 +55,7 @@ jobs: # the key is restricted using forced commands so that it can only upload to the directory we need here after_success: ./scripts/travis-publish-spec.sh + # be careful to not set any env vars, as this will result in a cache miss - &test stage: test @@ -65,6 +76,7 @@ jobs: - script: testStability if: env(testStability) = yes + - stage: publish script: publishSonatype if: env(publishToSonatype) = yes # TODO: is this environment variable evaluated afer `source scripts/common` has a chance to set it? maybe it's ok and we can just keep this as the hook for manually triggering a release From 8fc5de23fb00c59c7f475b0f58a58f8f328d3bc7 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 7 Mar 2018 15:27:21 +0100 Subject: [PATCH 1254/2793] Review feedback from lrytz --- .travis.yml | 2 +- admin/init.sh | 5 ----- scripts/common | 3 --- 3 files changed, 1 insertion(+), 9 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1d87bf896025..e1cf03763351 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ before_script: - (cd admin && ./init.sh) stages: - - name: build + - name: build # also builds the spec using jekyll - name: test if: type != pull_request - name: publish diff --git a/admin/init.sh b/admin/init.sh index 06f2b182e3cb..f53cc9641fb5 100755 --- a/admin/init.sh +++ b/admin/init.sh @@ -5,15 +5,10 @@ sensitive() { perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-private-repo > ~/.credentials-private-repo perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-sonatype > ~/.credentials-sonatype perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/sonatype-curl > ~/.sonatype-curl - # perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/m2-settings.xml > ~/.m2/settings.xml -- not needed anymore (used for ide integration?) openssl aes-256-cbc -d -pass "pass:$GPG_SUBKEY_SECRET" -in files/gpg_subkey.enc | gpg --import } -# directories needed by sensitive part -# mkdir -p ~/.m2 -- not needed anymore (used for ide integration?) -mkdir -p ~/.ssh - # don't let anything escape from the sensitive part (e.g. leak environment var by echoing to log on failure) sensitive >/dev/null 2>&1 diff --git a/scripts/common b/scripts/common index 83b39c7b9656..673c25dd4f64 100644 --- a/scripts/common +++ b/scripts/common @@ -6,9 +6,6 @@ set -e WORKSPACE="${WORKSPACE-`pwd`}" -# the default (home dir) is fine on Travis, since each jobs gets its own worker (ivy cache is cached by travis) -IVY_HOME="${IVY_HOME-$HOME/.ivy2}" - # Known problems : does not fare well with interrupted, partial # compilations. We should perhaps have a multi-dependency version # of do_i_have below From dd1631348ddf17ae74b9759dd674ef1b80f54ecb Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 8 Mar 2018 17:23:55 +1000 Subject: [PATCH 1255/2793] Add Automatic-Module-Name attribute to library, reflect, compiler --- build.sbt | 3 +++ project/AutomaticModuleName.scala | 22 ++++++++++++++++++++++ 2 files changed, 25 insertions(+) create mode 100644 project/AutomaticModuleName.scala diff --git a/build.sbt b/build.sbt index 3adcfc9b4d57..29bf92ea8781 100644 --- a/build.sbt +++ b/build.sbt @@ -337,6 +337,7 @@ lazy val bootstrap = project in file("target/bootstrap") lazy val library = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(Osgi.settings) + .settings(AutomaticModuleName.settings("scala.library")) .settings( name := "scala-library", description := "Scala Standard Library", @@ -375,6 +376,7 @@ lazy val library = configureAsSubproject(project) lazy val reflect = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(Osgi.settings) + .settings(AutomaticModuleName.settings("scala.reflect")) .settings( name := "scala-reflect", description := "Scala Reflection Library", @@ -400,6 +402,7 @@ lazy val compiler = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(generateBuildCharacterFileSettings) .settings(Osgi.settings) + .settings(AutomaticModuleName.settings("scala.tools.nsc")) .settings( name := "scala-compiler", description := "Scala Compiler", diff --git a/project/AutomaticModuleName.scala b/project/AutomaticModuleName.scala new file mode 100644 index 000000000000..8a70c67adae6 --- /dev/null +++ b/project/AutomaticModuleName.scala @@ -0,0 +1,22 @@ +package scala.build + +import sbt.{Def, _} +import sbt.Keys._ + +/** + * Helper to set Automatic-Module-Name in projects. + * + * !! DO NOT BE TEMPTED INTO AUTOMATICALLY DERIVING THE NAMES FROM PROJECT NAMES !! + * + * The names carry a lot of implications and DO NOT have to always align 1:1 with the group ids or package names, + * though there should be of course a strong relationship between them. + */ +object AutomaticModuleName { + def settings(name: String): Seq[Def.Setting[_]] = { + val pair = ("Automatic-Module-Name" -> name) + Seq( + packageOptions in (Compile, packageBin) += Package.ManifestAttributes(pair), + Osgi.headers += pair + ) + } +} \ No newline at end of file From 76b1abdba604f419ab6cfb1e9479aed58f6435b0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 8 Mar 2018 18:42:27 +1000 Subject: [PATCH 1256/2793] Simplify checking for already-published artifacts Use a maven incantation that allows download of a dependency without needing to create a dummy project. --- scripts/common | 52 +------------------------------------------------- 1 file changed, 1 insertion(+), 51 deletions(-) diff --git a/scripts/common b/scripts/common index 316d8ed5a0fb..ebd5b986b558 100644 --- a/scripts/common +++ b/scripts/common @@ -88,59 +88,9 @@ function parseScalaProperties(){ function checkAvailability () { pushd "${TMP_DIR}" rm -rf * - -# pom file for the test project - cat > pom.xml << EOF - - 4.0.0 - com.typesafe - typesafeDummy - war - 1.0-SNAPSHOT - Dummy - http://127.0.0.1 - - - $1 - $2 - $3 - - - - - sonatype.snapshot - Sonatype maven snapshot repository - https://oss.sonatype.org/content/repositories/snapshots - - daily - - -EOF - - if [ -n "$4" ] - then -# adds the extra repository - cat >> pom.xml << EOF - - extrarepo - extra repository - $4 - -EOF - fi - - cat >> pom.xml << EOF - - -EOF - set +e - mvn "${MAVEN_ARGS[@]}" compile &> "${TMP_DIR}/mvn.log" + mvn -q "${MAVEN_ARGS[@]}" -DremoteRepositories="$4" -DgroupId="$1" -DartifactId="$2" -Dversion="$3" -Dtransitive=false dependency:get RES=$? - # Quiet the maven, but allow diagnosing problems. - grep -i downloading "${TMP_DIR}/mvn.log" - grep -i exception "${TMP_DIR}/mvn.log" - grep -i error "${TMP_DIR}/mvn.log" set -e # log the result From 6ca54f6fe2c9be80f3e5a641a2e6dc97322afda2 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 5 Mar 2018 21:40:39 +0100 Subject: [PATCH 1257/2793] Correctly synthesize `manifest[T]` when `T` is an alias This class A[T] object A { type T = A[_] manifest[T] } crashed the compiler. Comparing the AST generated for `manifest[T]` with the working version `manifest[A[_]]` shows that a cast is inserted to the `classOf` argument in the latter case, but not the former. For `manifest[T]`: scala.Predef.manifest[A.T]( scala.reflect.ManifestFactory.classType[A.T]( classOf[A], ... For `manifest[A[_]]` scala.Predef.manifest[A[_]]( scala.reflect.ManifestFactory.classType[A[_]]( classOf[A].asInstanceOf[Class[A[_]]], ... My approach for fixing this was simply to see what makes the compiler insert the cast. The condition is here: private def manifestOfType(tp: Type, flavor: Symbol): SearchResult = { ... def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = { val tp1 = tp0.dealias ... val classarg = tp.dealias match { case _: ExistentialType => gen.mkCast(classarg0, ClassType(tp)) case _ => classarg0 } `tp` is `A.T`, the type alias. In the first call to `mot`, `tp0` is `A.T`. `tp1 = tp0.dealias` is an `ExistentialType`, so `mot` called recursively with `tp0 = tp1.skolemizeExistential`. A cast seems to be needed if the original type `tp` is an existential (not sure why that is), but we need to dealias. Not sure if we should cast to `tp` or `tp.dealias`, I guess it doesn't matter. --- .../tools/nsc/typechecker/Implicits.scala | 2 +- test/files/pos/t9155.scala | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t9155.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index bc6917ef34b6..9d2196a56720 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1304,7 +1304,7 @@ trait Implicits { manifestFactoryCall("arrayType", args.head, findManifest(args.head)) } else if (sym.isClass) { val classarg0 = gen.mkClassOf(tp1) - val classarg = tp match { + val classarg = tp.dealias match { case _: ExistentialType => gen.mkCast(classarg0, ClassType(tp)) case _ => classarg0 } diff --git a/test/files/pos/t9155.scala b/test/files/pos/t9155.scala new file mode 100644 index 000000000000..43b7f339329a --- /dev/null +++ b/test/files/pos/t9155.scala @@ -0,0 +1,19 @@ +class A[T] +object A { + type T = A[_] + manifest[T] +} + +class B[T] +object B { + type Any = B[ _ <: String] + manifest[B[_ <: String]] + manifest[B.Any] +} + +class C[T] +object C { + def f[T](implicit m: Manifest[T]) = 0 + type CAlias = C[_] + val x = f[CAlias] +} From 54546fce25abeea631ffa4a02bfd5f98489b2552 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 14 Feb 2018 22:56:14 +0000 Subject: [PATCH 1258/2793] Threadsafe simple stats --- .../reflect/internal/util/Statistics.scala | 44 +++++++++++++------ 1 file changed, 31 insertions(+), 13 deletions(-) diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index df8f5e78065a..e4a3f6f64ffb 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -2,10 +2,11 @@ package scala package reflect.internal.util import scala.collection.mutable - import scala.reflect.internal.SymbolTable import scala.reflect.internal.settings.MutableSettings -import java.lang.invoke.{SwitchPoint, MethodHandle, MethodHandles, MethodType} +import java.util.concurrent.atomic.{AtomicInteger, AtomicLong} + +import scala.runtime.LongRef abstract class Statistics(val symbolTable: SymbolTable, settings: MutableSettings) { @@ -177,22 +178,37 @@ quant) } class Timer(val prefix: String, val phases: Seq[String]) extends Quantity { - var nanos: Long = 0 - var timings = 0 - def start() = { - (nanos, System.nanoTime()) + private val totalThreads = new AtomicInteger() + private val threadNanos = new ThreadLocal[LongRef] { + override def initialValue() = { + totalThreads.incrementAndGet() + new LongRef(0) + } + } + private[util] val totalNanos = new AtomicLong + private[util] val timings = new AtomicInteger + def nanos = totalNanos.get + def start(): TimerSnapshot = { + (threadNanos.get.elem, System.nanoTime()) } def stop(prev: TimerSnapshot) { val (nanos0, start) = prev - nanos = nanos0 + System.nanoTime() - start - timings += 1 + val newThreadNanos = nanos0 + System.nanoTime() - start + val threadNanosCount = threadNanos.get + val diff = newThreadNanos - threadNanosCount.elem + threadNanosCount.elem = newThreadNanos + totalNanos.addAndGet(diff) + timings.incrementAndGet() + } + protected def show(ns: Long) = s"${ns/1000/1000.0}ms" + override def toString = { + val threads = totalThreads.get + s"$timings spans, ${if (threads > 1) s"$threads threads, "}${show(totalNanos.get)}" } - protected def show(ns: Long) = s"${ns/1000000}ms" - override def toString = s"$timings spans, ${show(nanos)}" } class SubTimer(prefix: String, override val underlying: Timer) extends Timer(prefix, underlying.phases) with SubQuantity { - override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.nanos) + override protected def show(ns: Long) = super.show(ns) + showPercent(ns, underlying.totalNanos.get) } class StackableTimer(prefix: String, underlying: Timer) extends SubTimer(prefix, underlying) with Ordered[StackableTimer] { @@ -232,6 +248,8 @@ quant) /** A stack of timers, all active, where a timer's specific "clock" * is stopped as long as it is buried by some other timer in the stack, but * its aggregate clock keeps on ticking. + * + * Note: Not threadsafe */ class TimerStack { private var elems: List[(StackableTimer, Long)] = Nil @@ -246,9 +264,9 @@ quant) val (nanos0, start) = prev val duration = System.nanoTime() - start val (topTimer, nestedNanos) :: rest = elems - topTimer.nanos = nanos0 + duration + topTimer.totalNanos.addAndGet(nanos0 + duration) topTimer.specificNanos += duration - nestedNanos - topTimer.timings += 1 + topTimer.timings.incrementAndGet() elems = rest match { case (outerTimer, outerNested) :: elems1 => (outerTimer, outerNested + duration) :: elems1 From 2a0a742de7ecbc8972153fe6428a2544e1790984 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 8 Mar 2018 15:19:33 +0100 Subject: [PATCH 1259/2793] Cleanups, skip tests for now Main cleanup = no more repositories config! This is nice because we were duplicating the one that comes with sbt's launcher, just to add a few lines. We could do the same for the jenkins scripts in scripts/, but leaving those alone as much as possible for now. --- .travis.yml | 23 ++++++++++++----------- admin/files/gpg.sbt | 3 +++ scripts/bootstrap_fun | 15 ++++++--------- scripts/common | 7 +++---- scripts/jobs/integrate/bootstrap | 13 +++++++------ 5 files changed, 31 insertions(+), 30 deletions(-) diff --git a/.travis.yml b/.travis.yml index e1cf03763351..ae8b8251bbef 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,8 +16,9 @@ before_script: stages: - name: build # also builds the spec using jekyll + # tests are running into time limits (will re-enable once Jason's partest speedups are in) - name: test - if: type != pull_request + if: env(bla) = thisVarIsNotSet AND type != pull_request # just disabling tests for now, but commenting the stage here doesn't do the trick - name: publish if: type != pull_request @@ -65,21 +66,21 @@ jobs: - source scripts/common - source scripts/bootstrap_fun - mkFreshIvy - - find build -type f -exec touch {} + # give antStyle a chance - script: buildQuick "set antStyle := true" testRest # shouldn't rebuild, since build/ is cached + # - find build -type f -exec touch {} + # "set antStyle := true" seems to cause really long compiles for the test suite?? + script: buildQuick testRest # shouldn't rebuild, since build/ is cached - <<: *test - script: buildQuick "set antStyle := true" testPosPres + script: buildQuick testPosPres - <<: *test - script: buildQuick "set antStyle := true" testRun - if: env(testRun) = yes - + script: buildQuick testRun - script: testStability - if: env(testStability) = yes - - stage: publish - script: publishSonatype - if: env(publishToSonatype) = yes # TODO: is this environment variable evaluated afer `source scripts/common` has a chance to set it? maybe it's ok and we can just keep this as the hook for manually triggering a release + script: if [ "$publishToSonatype" = "yes" ]; then publishSonatype; fi + # using bash conditional, because a travis condition on the stage won't work: + # the `env` function only picks stuff up from yaml, not variables set in bash, + # and we can't supply more env vars using a custom build from the web + # It would work using the API according to https://github.com/travis-ci/docs-travis-ci-com/issues/1485#issuecomment-351726416, + # but that's too much right now. # cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret # openssl aes-256-cbc -pass "file:./secret" -in id_dsa_spec212_b4096 -out spec/id_dsa_travis.enc -a diff --git a/admin/files/gpg.sbt b/admin/files/gpg.sbt index 2efcc4b691e8..5f168c76e3ad 100644 --- a/admin/files/gpg.sbt +++ b/admin/files/gpg.sbt @@ -1 +1,4 @@ +// TODO: are the resolvers needed? +resolvers ++= Seq(Resolver.typesafeIvyRepo("releases"), Resolver.sbtPluginRepo("releases")) + addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.1.0") diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index 137aa5e32bee..b33cf864bcae 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -2,9 +2,7 @@ publishPrivateTask=${publishPrivateTask-"publish"} publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} -forceRebuild=${forceRebuild-no} -# testStability=${testStability-yes} -testStability=no # currently borker by ant PR? +forceBuildModules=${forceBuildModules-no} clean="clean" # TESTING leave empty to speed up testing (on jenkins/locally; on travis it's a fresh machine every time) stApi="https://oss.sonatype.org/service/local" @@ -37,7 +35,7 @@ docTask() { } buildXML() { - if [ "$XML_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) + if [ "$XML_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) then echo "Found scala-xml $XML_VER; not building." else update scala scala-xml "$XML_REF" && gfxd @@ -48,19 +46,20 @@ buildXML() { } buildPartest() { - if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER ) + if [ "$PARTEST_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-partest" $PARTEST_VER ) then echo "Found scala-partest $PARTEST_VER; not building." else update scala scala-partest "$PARTEST_REF" && gfxd doc="$(docTask $PARTEST_BUILT)" - sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" test "${buildTasks[@]}" + # disable -Xfatal-warnings until https://github.com/scala/bug/issues/10763 is fixed + sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" 'set scalacOptions := scalacOptions.value.filterNot(_.contains("fatal-warn"))' test "${buildTasks[@]}" PARTEST_BUILT="yes" fi } # should only be called with publishTasks publishing to artifactory buildScalaCheck(){ - if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceRebuild" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) + if [ "$SCALACHECK_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scalacheck" "scalacheck" $SCALACHECK_VER ) then echo "Found scalacheck $SCALACHECK_VER; not building." else update rickynils scalacheck $SCALACHECK_REF && gfxd @@ -112,8 +111,6 @@ mkFreshIvy() { rm -rf $WORKSPACE/resolutionScratch_ mkdir -p $WORKSPACE/resolutionScratch_ - - generateRepositoriesConfig $integrationRepoUrl } scalaVerToBinary() { diff --git a/scripts/common b/scripts/common index 673c25dd4f64..b38c0f938282 100644 --- a/scripts/common +++ b/scripts/common @@ -29,9 +29,7 @@ SBT_CMD="$SBT_CMD -sbt-version 0.13.17" integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" - -sbtArgs="-Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig $sbtArgs" # allow supplying more args - +addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" # temp dir where all 'non-build' operation are performed TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) @@ -249,7 +247,7 @@ st_stagingRepoClose() { sbtBuild() { travis_fold_start build "Building $(basename $PWD) with $@" - $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" + $SBT_CMD -no-colors $sbtArgs "$addIntegrationResolver" "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" travis_fold_end build } @@ -261,6 +259,7 @@ sbtResolve() { # echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" travis_fold_start resolve "Resolving \"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ + "$addIntegrationResolver" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ 'show update' travis_fold_end resolve diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index c655b0b5ea6d..03d5235c6464 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -73,6 +73,11 @@ #### MAIN +# each job has its own ivy2, sharing between jobs would lead to trouble +mkdir -p $WORKSPACE/ivy2 + +source scripts/common + # TODO: this is weird for historical reasons, simplify now that we have one version of sbt in use # we probably don't need to override the sbt dir? just ivy # @@ -81,13 +86,9 @@ # the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base # need to set sbt-dir to one that has the gpg.sbt plugin config) # -# scripts/common will add the repositories override -sbtArgs="-ivy $WORKSPACE/ivy2 -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13" +# scripts/common provides sbtRepositoryConfig +sbtArgs="-ivy $WORKSPACE/ivy2 -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" -# each job has its own ivy2, sharing between jobs would lead to trouble -mkdir -p $WORKSPACE/ivy2 - -source scripts/common source scripts/bootstrap_fun From 4769a7df5c3b6b5a8743403bef5cf5a9805241b0 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 8 Mar 2018 15:58:37 +0100 Subject: [PATCH 1260/2793] Hopefully last round of cleanups --- .travis.yml | 4 +--- scripts/bootstrap_fun | 20 -------------------- scripts/common | 22 +++++++++------------- scripts/jobs/integrate/bootstrap | 18 +++++++++++++----- 4 files changed, 23 insertions(+), 41 deletions(-) diff --git a/.travis.yml b/.travis.yml index ae8b8251bbef..5550d4fa4f82 100644 --- a/.travis.yml +++ b/.travis.yml @@ -34,7 +34,6 @@ jobs: script: - source scripts/common - source scripts/bootstrap_fun - - mkFreshIvy - determineScalaVersion - deriveModuleVersions - removeExistingBuilds $integrationRepoUrl @@ -65,7 +64,6 @@ jobs: - if [ -z "$SCALA_VER" ]; then echo "Environment not propagated. Caching issue?"; cat build/env ; exit 1; fi - source scripts/common - source scripts/bootstrap_fun - - mkFreshIvy # - find build -type f -exec touch {} + # "set antStyle := true" seems to cause really long compiles for the test suite?? script: buildQuick testRest # shouldn't rebuild, since build/ is cached - <<: *test @@ -74,7 +72,7 @@ jobs: script: buildQuick testRun - script: testStability - - stage: publish + - stage: publish # note that it's important that this runs on a fresh VM, so we're sure to resolve scala from artifactory, and not out of local cache (which was overwritten during bootstrap, somewhat unorthodoxly) script: if [ "$publishToSonatype" = "yes" ]; then publishSonatype; fi # using bash conditional, because a travis condition on the stage won't work: # the `env` function only picks stuff up from yaml, not variables set in bash, diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index b33cf864bcae..06850f45bb50 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -100,18 +100,6 @@ buildPublishedModules() { ## BUILD STEPS: -# TODO: can we reuse some caching? can we stop generating a repositories config, -# since this is duplicated from sbt and may thus get out of synch... -mkFreshIvy() { - # we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala - # rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)... - # we don't nuke the whole ws since that clobbers the git clones needlessly - [[ -d $WORKSPACE/ivy2-shadow ]] || rm -rf $WORKSPACE/ivy2 - mkdir -p $WORKSPACE/ivy2 - - rm -rf $WORKSPACE/resolutionScratch_ - mkdir -p $WORKSPACE/resolutionScratch_ -} scalaVerToBinary() { # $1 = SCALA_VER @@ -294,14 +282,6 @@ buildQuick() { travis_fold_end quick } -wipeIvyCache() { - # clear ivy cache (and to be sure, local as well), so the next round of sbt builds sees the fresh scala - rm -rf $WORKSPACE/ivy2 - - # TODO: create PR with following commit (note that release will have been tagged already) - # git commit versions.properties -m"Bump versions.properties for $SCALA_VER." -} - testStability() { travis_fold_start stab "Testing stability" cd $WORKSPACE diff --git a/scripts/common b/scripts/common index b38c0f938282..24fd1cca69e6 100644 --- a/scripts/common +++ b/scripts/common @@ -11,16 +11,13 @@ WORKSPACE="${WORKSPACE-`pwd`}" # of do_i_have below LOGGINGDIR="$WORKSPACE/logs" -mkdir -p $LOGGINGDIR +mkdir -p "$LOGGINGDIR" -# unset SBT_HOME -# SBT_HOME="$WORKSPACE/.sbt" -# mkdir -p $SBT_HOME -# IVY_CACHE="$WORKSPACE/.ivy2" -# mkdir -p $IVY_CACHE +rm -rf "$WORKSPACE/resolutionScratch_" +mkdir -p "$WORKSPACE/resolutionScratch_" # TODO: do we need to nuke the cache on travis? -# rm -rf $IVY_CACHE/cache/org.scala-lang +# rm -rf $WORKSPACE/.ivy2/cache/org.scala-lang SBT_CMD=${sbtCmd-sbt} SBT_CMD="$SBT_CMD -sbt-version 0.13.17" @@ -31,6 +28,8 @@ integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifacto sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" +jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} + # temp dir where all 'non-build' operation are performed TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) TMP_DIR="${TMP_ROOT_DIR}/tmp" @@ -165,6 +164,7 @@ EOF popd } +# Only used on Jenkins # Generate a repositories file with all allowed repositories in our build environment. # Takes a variable number of additional repositories as argument. # See http://www.scala-sbt.org/0.13/docs/Proxy-Repositories.html @@ -176,12 +176,8 @@ function generateRepositoriesConfig() { done fi - if [ "${TRAVIS}" != "true" ]; then - jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} - echo "jcenter-cache: $jcenterCacheUrl" >> "$sbtRepositoryConfig" - fi - cat >> "$sbtRepositoryConfig" << EOF + jcenter-cache: $jcenterCacheUrl local maven-central typesafe-ivy-releases-boot: https://repo.lightbend.com/typesafe/ivy-releases/, [organisation]/[module]/[revision]/[type]s/[artifact](-[classifier]).[ext], bootOnly @@ -252,7 +248,7 @@ sbtBuild() { } sbtResolve() { - cd $WORKSPACE/resolutionScratch_ + cd "$WORKSPACE/resolutionScratch_" touch build.sbt # Can be set to `full` if a module requires cross-versioning against the full Scala version, like the continuations plugin used to. cross=${4-binary} diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 03d5235c6464..4a540b1a5934 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -92,12 +92,18 @@ sbtArgs="-ivy $WORKSPACE/ivy2 -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/. source scripts/bootstrap_fun -mkFreshIvy +# On Jenkins, we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala +# rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)... +# we don't nuke the whole ws since that clobbers the git clones needlessly +[[ -d $WORKSPACE/ivy2-shadow ]] || rm -rf "$WORKSPACE/ivy2" +mkdir -p "$WORKSPACE/ivy2" determineScalaVersion deriveModuleVersions +generateRepositoriesConfig $integrationRepoUrl + removeExistingBuilds $integrationRepoUrl if [ ! -z "$STARR_REF" ]; then @@ -113,12 +119,14 @@ buildModules buildQuick clean testAll publish -wipeIvyCache - if [ "$testStability" == "yes" ] then testStability fi -if [ "$publishToSonatype" == "yes" ] - then publishSonatype + +if [ "$publishToSonatype" == "yes" ]; then + # clear ivy cache so the next round of building modules sees the fresh scala + rm -rf "$WORKSPACE/ivy2/cache/org.scala-lang" + + publishSonatype fi From d76263cb2e654140f97dbd86253d20b23e89bd72 Mon Sep 17 00:00:00 2001 From: Jonathan Frawley Date: Tue, 6 Mar 2018 14:32:52 +0000 Subject: [PATCH 1261/2793] Add extra quotes to fix paths with spaces in them Fixes scala/bug#10756 --- src/compiler/scala/tools/ant/templates/tool-unix.tmpl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl index 82ed68221cfe..634190a31b4c 100755 --- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl @@ -13,7 +13,7 @@ findScalaHome () { local source="${BASH_SOURCE[0]}" while [ -h "$source" ] ; do local linked="$(readlink "$source")" - local dir="$( cd -P $(dirname "$source") && cd -P $(dirname "$linked") && pwd )" + local dir="$( cd -P "$(dirname "$source")" && cd -P "$(dirname "$linked")" && pwd )" source="$dir/$(basename "$linked")" done ( cd -P "$(dirname "$source")/.." && pwd ) From 47d38ecb2122a2e3cbf3a4ac74383a99245156ae Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 9 Mar 2018 12:35:08 +0100 Subject: [PATCH 1262/2793] Serialize env vars more robustly to build/env --- .travis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5550d4fa4f82..e084daec32cc 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,8 +42,8 @@ jobs: - rm -rf build/ # ensure we resolve from artifactory - buildModules - buildQuick clean publish - - echo 'declare -a updatedModuleVersions' > build/env - - echo 'export SCALA_VER="'${SCALA_VER}'" updatedModuleVersions="'${updatedModuleVersions}'"' >> build/env + - set | grep "^updatedModuleVersions=" > build/env + - set | grep "^SCALA_VER=" >> build/env - cat build/env # this builds the spec using jekyll From 4485245611596c35de025f4dfe9d6f77b683aebf Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 9 Mar 2018 14:21:08 +0100 Subject: [PATCH 1263/2793] `qual` is used in `qual.withFilter(fun)`, but `fun` is exempt For unused warnings, make sure we record usages in `qual` of the `withFilter` that's synthesized for refutability checks, while excluding any patterns in the `match` that represents the `isDefinedAt` of the partial function. (Are these patterns vars in the isDefinedAt literal ever used? It's not useful to simplify the trees, but an interesting question to ponder regardless.) Consider the expansions of the following tests: - test/files/pos/t10763.scala ``` xs.withFilter(((check$ifrefutable$1: Int) => (check$ifrefutable$1: Int @unchecked) match { case 1 => true case _ => false } ``` - test/files/pos/t10394.scala: ``` .withFilter(((check$ifrefutable$2: Int) => (check$ifrefutable$2: Int @unchecked) match { case (i @ (_: Int)) => true case _ => false } ``` In the first case, we should identify `xs` as being used; while the second example shows that pattern bindings in `match` passed into `withFilter` should never yield warnings. I was too lazy to rename `AtBoundIdentifierAttachment`, but I think it could do with a name that more clearly signals the intent ("exempt from usage check"), rather than the mechanism ("at binding") used to indicate a pattern var should be exempt from (un)usage checking. Reworks 3e28d97e. --- project/ScalaOptionParser.scala | 4 ++-- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 7 ++----- .../scala/reflect/internal/StdAttachments.scala | 10 ++++++---- src/reflect/scala/reflect/internal/TreeGen.scala | 2 +- test/files/pos/t10763.flags | 1 + test/files/pos/t10763.scala | 7 +++++++ 6 files changed, 19 insertions(+), 12 deletions(-) create mode 100644 test/files/pos/t10763.flags create mode 100644 test/files/pos/t10763.scala diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index 94a92a1acdca..26d75e51be84 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -91,14 +91,14 @@ object ScalaOptionParser { "-Yoverride-objects", "-Yoverride-vars", "-Ypatmat-debug", "-Yno-adapted-args", "-Ypartial-unification", "-Ypos-debug", "-Ypresentation-debug", "-Ypresentation-strict", "-Ypresentation-verbose", "-Yquasiquote-debug", "-Yrangepos", "-Yreify-copypaste", "-Yreify-debug", "-Yrepl-class-based", "-Yrepl-sync", "-Yshow-member-pos", "-Yshow-symkinds", "-Yshow-symowners", "-Yshow-syms", "-Yshow-trees", "-Yshow-trees-compact", "-Yshow-trees-stringified", "-Ytyper-debug", - "-Ywarn-adapted-args", "-Ywarn-dead-code", "-Ywarn-inaccessible", "-Ywarn-infer-any", "-Ywarn-nullary-override", "-Ywarn-nullary-unit", "-Ywarn-numeric-widen", "-Ywarn-unused", "-Ywarn-unused-import", "-Ywarn-value-discard", + "-Ywarn-adapted-args", "-Ywarn-dead-code", "-Ywarn-inaccessible", "-Ywarn-infer-any", "-Ywarn-nullary-override", "-Ywarn-nullary-unit", "-Ywarn-numeric-widen", "-Ywarn-unused-import", "-Ywarn-value-discard", "-deprecation", "-explaintypes", "-feature", "-help", "-no-specialization", "-nobootcp", "-nowarn", "-optimise", "-print", "-unchecked", "-uniqid", "-usejavacp", "-usemanifestcp", "-verbose", "-version") private def stringSettingNames = List("-Xgenerate-phase-graph", "-Xmain-class", "-Xpluginsdir", "-Xshow-class", "-Xshow-object", "-Xsource-reader", "-Ydump-classes", "-Ygen-asmp", "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript") private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp") private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "fields", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "icode", "jvm", "terminal") private val phaseSettings = List("-Xprint-icode", "-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint") - private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require") + private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require", "-Ywarn-unused") private def intSettingNames = List("-Xmax-classfile-name", "-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion") private def choiceSettingNames = Map[String, List[String]]( "-YclasspathImpl" -> List("flat", "recursive"), diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 1dfdd77e1e0c..da3883d10c66 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -504,7 +504,6 @@ trait TypeDiagnostics { override def traverse(t: Tree): Unit = if (!t.isErrorTyped) { val sym = t.symbol - var bail = false t match { case m: MemberDef if qualifies(sym) => t match { @@ -513,7 +512,7 @@ trait TypeDiagnostics { case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => if (sym.isPrimaryConstructor) for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa - else if (sym.isSynthetic && sym.isImplicit) bail = true + else if (sym.isSynthetic && sym.isImplicit) return else if (!sym.isConstructor) for (vs <- vparamss) params ++= vs.map(_.symbol) defnTrees += m @@ -527,11 +526,9 @@ trait TypeDiagnostics { } case _: RefTree if sym ne null => targets += sym case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol - case Apply(Select(_, nme.withFilter), Function(vparams, _) :: Nil) => - bail = vparams.exists(_.name startsWith nme.CHECK_IF_REFUTABLE_STRING) case _ => } - if (bail) return + if (t.tpe ne null) { for (tp <- t.tpe if !treeTypes(tp)) { // Include references to private/local aliases (which might otherwise refer to an enclosing class) diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 72d0e2bdd40c..3c2126813abb 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -63,10 +63,12 @@ trait StdAttachments { */ case object BackquotedIdentifierAttachment extends PlainAttachment - /** Indicates that the host `Ident` has been created from a pattern2 binding, `case x @ p`. - * In the absence of named parameters in patterns, allows nuanced warnings for unused variables. - * Hence, `case X(x = _) =>` would not warn; for now, `case X(x @ _) =>` is documentary if x is unused. - */ + /** A pattern binding exempt from unused warning. + * + * Its host `Ident` has been created from a pattern2 binding, `case x @ p`. + * In the absence of named parameters in patterns, allows nuanced warnings for unused variables. + * Hence, `case X(x = _) =>` would not warn; for now, `case X(x @ _) =>` is documentary if x is unused. + */ case object AtBoundIdentifierAttachment extends PlainAttachment /** Indicates that a `ValDef` was synthesized from a pattern definition, `val P(x)`. diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 3ca58a7e7b1d..e69829baea69 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -817,7 +817,7 @@ abstract class TreeGen { if (treeInfo.isVarPatternDeep(pat)) rhs else { val cases = List( - CaseDef(pat.duplicate, EmptyTree, Literal(Constant(true))), + CaseDef(pat.duplicate updateAttachment AtBoundIdentifierAttachment, EmptyTree, Literal(Constant(true))), CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) ) val visitor = mkVisitor(cases, checkExhaustive = false, nme.CHECK_IF_REFUTABLE_STRING) diff --git a/test/files/pos/t10763.flags b/test/files/pos/t10763.flags new file mode 100644 index 000000000000..ae548523beb5 --- /dev/null +++ b/test/files/pos/t10763.flags @@ -0,0 +1 @@ +-Xfatal-warnings -Xlint:unused diff --git a/test/files/pos/t10763.scala b/test/files/pos/t10763.scala new file mode 100644 index 000000000000..42c45d2d3dd9 --- /dev/null +++ b/test/files/pos/t10763.scala @@ -0,0 +1,7 @@ +class Test { + def xsUnused = { + val xs: List[Int] = List(0) + + for (refute@1 <- xs) {} + } +} From 2ea7b2e3f53851d1fe09194e27639a8ba7992dcc Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 9 Mar 2018 13:22:14 +0100 Subject: [PATCH 1264/2793] Trigger scala-dist after a successful build --- .travis.yml | 9 +++++---- scripts/common | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 5550d4fa4f82..501da1d55ec0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -73,7 +73,9 @@ jobs: - script: testStability - stage: publish # note that it's important that this runs on a fresh VM, so we're sure to resolve scala from artifactory, and not out of local cache (which was overwritten during bootstrap, somewhat unorthodoxly) - script: if [ "$publishToSonatype" = "yes" ]; then publishSonatype; fi + script: + - if [ "$publishToSonatype" = "yes" ]; then publishSonatype; fi + - triggerScalaDist # using bash conditional, because a travis condition on the stage won't work: # the `env` function only picks stuff up from yaml, not variables set in bash, # and we can't supply more env vars using a custom build from the web @@ -85,13 +87,12 @@ jobs: # travis encrypt "PRIV_KEY_SECRET=`cat ./secret`" env: global: - - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" + - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER - secure: "ek3As5q2tL8UBXcxSBbv4v5YgsoPD41SCzPOSu72kzfbngyxgQxrcziU5pIM+Lib9KaWex7hVVWNL38tMyDbu+0OpDv8bPjMujzlDx5I2pJUfuOJo7QRYsJE1nsXcY4cA72cCLfbRcLEkvtDAhcdLSaUOqlyQe5BY4X4fY5eoPA=" # SONA_PASS - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET - -# ^^^ set PRIV_KEY_SECRET to password used to encrypt spec/id_dsa_travis.enc + - secure: "ee0z/1jehBjFa2M2JlBHRjeo6OEn/zmVl72ukBP1ISeKqz18Cswc4gDI5tV9RW9SlYFLkIlGsR2qnRCyJ/pqgQLcNdrpsCRFFc79oyLhfEtmPdAHlWfj4RSP68zINRtDdFuJ8iSy8XYP0NaqpVIYpkNdv9I6q7N85ljmMQpHO+U=" # TRAVIS_TOKEN (login with GitHub as lrytz) # using S3 would be simpler, but we want to upload to scala-lang.org diff --git a/scripts/common b/scripts/common index 3be992e557df..d65c954b9854 100644 --- a/scripts/common +++ b/scripts/common @@ -210,3 +210,23 @@ sbtResolve() { 'show update' travis_fold_end resolve } + +#### travis + +triggerScalaDist() { + local jsonTemplate='{ "request": { "branch": "%s", "message": "Scala Dist %s", "config": { "before_install": "export version=%s mode=release scala_sha=%s" } } }' + local json=$(printf "$jsonTemplate" "$TRAVIS_BRANCH" "$SCALA_VER" "$SCALA_VER" "$TRAVIS_COMMIT") + + local curlStatus=$(curl \ + -s -o /dev/null -w "%{http_code}" \ + -H "Travis-API-Version: 3" \ + -H "Authorization: token $TRAVIS_TOKEN" \ + -H "Content-Type: application/json" \ + -d "$json" \ + https://api.travis-ci.org/repo/scala%2Fscala-dist/requests) + + [[ "$curlStatus" == "202" ]] || { + echo "failed to start job" + exit 1 + } +} From 3bc6bab3fcadb00e1a6f92879ac8f5d88fff52c8 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 16 Nov 2017 11:14:49 -0800 Subject: [PATCH 1265/2793] Adapt method values more cannily, part 1 Introduce MethodValue to extract and build trees like `f _`. --- src/compiler/scala/tools/nsc/ast/parser/Parsers.scala | 2 +- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 3 --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 9 +++++++++ .../scala/reflect/runtime/JavaUniverseForce.scala | 1 + 5 files changed, 12 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 93d8542a7857..6df212c4503a 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1745,7 +1745,7 @@ self => } simpleExprRest(app, canApply = true) case USCORE => - atPos(t.pos.start, in.skipToken()) { makeMethodValue(stripParens(t)) } + atPos(t.pos.start, in.skipToken()) { MethodValue(stripParens(t)) } case _ => t } diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 7866fcf2dc69..396f1c637ee4 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -35,9 +35,6 @@ abstract class TreeBuilder { def repeatedApplication(tpe: Tree): Tree = AppliedTypeTree(rootScalaDot(tpnme.REPEATED_PARAM_CLASS_NAME), List(tpe)) - // represents `expr _`, as specified in Method Values of spec/06-expressions.md - def makeMethodValue(expr: Tree): Tree = Typed(expr, Function(Nil, EmptyTree)) - def makeImportSelector(name: Name, nameOffset: Int): ImportSelector = ImportSelector(name, nameOffset, name, nameOffset) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 72d7f1219965..fb489eccc9f8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4634,7 +4634,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Annotated(_, r) => treesInResult(r) case If(_, t, e) => treesInResult(t) ++ treesInResult(e) case Try(b, catches, _) => treesInResult(b) ++ catches - case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r) // a method value + case MethodValue(r) => treesInResult(r) case Select(qual, name) => treesInResult(qual) case Apply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult) case TypeApply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 76787aeafa4f..7b78fca09b58 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -473,6 +473,15 @@ trait Trees extends api.Trees { extends TermTree with TypedApi object Typed extends TypedExtractor + // represents `expr _`, as specified in Method Values of spec/06-expressions.md + object MethodValue { + def apply(expr: Tree): Tree = Typed(expr, Function(Nil, EmptyTree)) + def unapply(tree: Tree): Option[Tree] = tree match { + case Typed(expr, Function(Nil, EmptyTree)) => Some(expr) + case _ => None + } + } + abstract class GenericApply extends TermTree with GenericApplyApi { val fun: Tree val args: List[Tree] diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 2c05a14604ec..b50eb9814c7d 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -85,6 +85,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.Throw this.New this.Typed + this.MethodValue this.TypeApply this.Apply this.ApplyDynamic From 50b53334a318b9e12476779e165d00c8352e9a10 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 9 Mar 2018 16:30:44 +0100 Subject: [PATCH 1266/2793] travis: source scripts in publish stage... --- .travis.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.travis.yml b/.travis.yml index 501da1d55ec0..85853c1125bf 100644 --- a/.travis.yml +++ b/.travis.yml @@ -74,6 +74,10 @@ jobs: - stage: publish # note that it's important that this runs on a fresh VM, so we're sure to resolve scala from artifactory, and not out of local cache (which was overwritten during bootstrap, somewhat unorthodoxly) script: + - source build/env + - if [ -z "$SCALA_VER" ]; then echo "Environment not propagated. Caching issue?"; cat build/env ; exit 1; fi + - source scripts/common + - source scripts/bootstrap_fun - if [ "$publishToSonatype" = "yes" ]; then publishSonatype; fi - triggerScalaDist # using bash conditional, because a travis condition on the stage won't work: From aaaf9e4421f3639d230d027c217b50b274db0077 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 20 Jan 2018 07:22:18 -0800 Subject: [PATCH 1267/2793] Improve error messages for method references A method is not a value, and thus must somehow be converted to an expression -- unless we're specifically looking for a method. We can either insert an application (to implicit args or empty arg list), or lift to a function by eta-expansion. If those are not possible, we report an error (unstable tree -- could be refined further). Tests for scala/bug#10474, scala/bug#10731 --- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/neg/t10474.check | 7 +++++++ test/files/neg/t10474.scala | 16 ++++++++++++++++ test/files/neg/t10695.check | 4 ++++ test/files/neg/t10695.scala | 14 ++++++++++++++ test/files/neg/t10731.check | 4 ++++ test/files/neg/t10731.scala | 4 ++++ 7 files changed, 50 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t10474.check create mode 100644 test/files/neg/t10474.scala create mode 100644 test/files/neg/t10695.check create mode 100644 test/files/neg/t10695.scala create mode 100644 test/files/neg/t10731.check create mode 100644 test/files/neg/t10731.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 72d7f1219965..624934d39c9b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -895,7 +895,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def cantAdapt = if (context.implicitsEnabled) MissingArgsForMethodTpeError(tree, meth) - else setError(tree) + else UnstableTreeError(tree) def emptyApplication: Tree = adapt(typed(Apply(tree, Nil) setPos tree.pos), mode, pt, original) diff --git a/test/files/neg/t10474.check b/test/files/neg/t10474.check new file mode 100644 index 000000000000..d12531ca902d --- /dev/null +++ b/test/files/neg/t10474.check @@ -0,0 +1,7 @@ +t10474.scala:8: error: stable identifier required, but Test.this.Foo found. + case Foo.Bar ⇒ true + ^ +t10474.scala:15: error: stable identifier required, but hrhino.this.Foo found. + val Foo.Crash = ??? + ^ +two errors found diff --git a/test/files/neg/t10474.scala b/test/files/neg/t10474.scala new file mode 100644 index 000000000000..49f8e14839c1 --- /dev/null +++ b/test/files/neg/t10474.scala @@ -0,0 +1,16 @@ + +object Test { + def Foo(a: Int): Char = ??? + + object Bar + + def crash[A](): Boolean = Bar match { + case Foo.Bar ⇒ true + case _ ⇒ false + } +} + +trait hrhino { + def Foo(i: Int) = i + val Foo.Crash = ??? +} diff --git a/test/files/neg/t10695.check b/test/files/neg/t10695.check new file mode 100644 index 000000000000..1ece3a4d9d20 --- /dev/null +++ b/test/files/neg/t10695.check @@ -0,0 +1,4 @@ +t10695.scala:6: error: stable identifier required, but X.raw found. + val node: raw.Node = null + ^ +one error found diff --git a/test/files/neg/t10695.scala b/test/files/neg/t10695.scala new file mode 100644 index 000000000000..580d915615ca --- /dev/null +++ b/test/files/neg/t10695.scala @@ -0,0 +1,14 @@ + +import X._ + +object Main extends App { + + val node: raw.Node = null + + Seq().fold(node)(_ => _) + +} + +object X { + def raw(s: String) = ??? +} diff --git a/test/files/neg/t10731.check b/test/files/neg/t10731.check new file mode 100644 index 000000000000..d5e345c6f341 --- /dev/null +++ b/test/files/neg/t10731.check @@ -0,0 +1,4 @@ +t10731.scala:3: error: stable identifier required, but C.this.eq found. + val eq.a = 1 + ^ +one error found diff --git a/test/files/neg/t10731.scala b/test/files/neg/t10731.scala new file mode 100644 index 000000000000..f7445ebd1076 --- /dev/null +++ b/test/files/neg/t10731.scala @@ -0,0 +1,4 @@ + +class C { + val eq.a = 1 +} From d474de91737a7ddba4ec7d9da19e2599072c063c Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Sat, 10 Mar 2018 07:50:26 +0100 Subject: [PATCH 1268/2793] Travis: benchq webhook --- .travis.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.travis.yml b/.travis.yml index b5e795e31e52..be286b6ed08c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -106,3 +106,6 @@ before_cache: # Cleanup the cached directories to avoid unnecessary cache updates - find $HOME/.ivy2/cache -name "ivydata-*.properties" -print -delete - find $HOME/.sbt -name "*.lock" -print -delete + +notifications: + webhooks: https://scala-ci.typesafe.com/benchq/webhooks/travis From 4fd0629488dcf7219857bfda2c51bae1bc3924d1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 1 Nov 2017 15:14:52 +1000 Subject: [PATCH 1269/2793] Support multi-release JARs / compile for older platform (JEP 238/247) - Adds a --release option to scalac (ala javac). - Uses this option to look up the right version in multi-release JARs. - By default, the ambient JDK version running the compiler is used as the multi-release version. - No JAR is considered to be multi-release when the compiler is run with JDK version 8. - When running on >=9 with --release of <=8 using older releases, use ct.sym as the source of the platform API rather than jrt://. This contains the Java standard API as-at the old release. --- .../nsc/classpath/DirectoryClassPath.scala | 115 +++++++++++++++--- .../scala/tools/nsc/classpath/FileUtils.scala | 5 +- .../nsc/classpath/PackageNameUtils.scala | 8 +- .../ZipAndJarFileLookupFactory.scala | 15 +-- .../nsc/classpath/ZipArchiveFileLookup.scala | 3 +- .../tools/nsc/settings/ScalaSettings.scala | 8 ++ .../scala/tools/util/PathResolver.scala | 4 +- .../mima-filters/2.12.0.forwards.excludes | 4 + .../reflect/internal/JDK9Reflectors.java | 91 ++++++++++++++ src/reflect/scala/reflect/io/ZipArchive.scala | 70 +++++++---- .../nsc/classpath/JrtClassPathTest.scala | 2 +- .../nsc/classpath/MultiReleaseJarTest.scala | 105 ++++++++++++++++ 12 files changed, 378 insertions(+), 52 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/JDK9Reflectors.java create mode 100644 test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 28e025f5a0db..bfbdb1435628 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -14,6 +14,9 @@ import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import FileUtils._ import scala.collection.JavaConverters._ +import scala.collection.immutable +import scala.reflect.internal.JDK9Reflectors +import scala.tools.nsc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} /** * A trait allowing to look for classpath entries in directories. It provides common logic for @@ -125,13 +128,35 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI - def apply(): Option[ClassPath] = { - try { - val fs = FileSystems.getFileSystem(URI.create("jrt:/")) - Some(new JrtClassPath(fs)) - } catch { - case _: ProviderNotFoundException | _: FileSystemNotFoundException => - None + def apply(release: Option[String]): Option[ClassPath] = { + import scala.util.Properties._ + if (!isJavaAtLeast("9")) None + else { + // TODO escalate errors once we're sure they are fatal + // I'm hesitant to do this immediately, because -release will still work for multi-release JARs + // even if we're running on a JRE or a non OpenJDK JDK where ct.sym is unavailable. + // + // Longer term we'd like an official API for this in the JDK + // Discussion: http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/thread.html#11738 + + val currentMajorVersion: Int = JDK9Reflectors.runtimeVersionMajor(JDK9Reflectors.runtimeVersion()).intValue() + release match { + case Some(v) if v.toInt < currentMajorVersion => + try { + val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") + if (Files.notExists(ctSym)) None + else Some(new CtSymClassPath(ctSym, v.toInt)) + } catch { + case _: Throwable => None + } + case _ => + try { + val fs = FileSystems.getFileSystem(URI.create("jrt:/")) + Some(new JrtClassPath(fs)) + } catch { + case _: ProviderNotFoundException | _: FileSystemNotFoundException => None + } + } } } } @@ -161,11 +186,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No /** Empty string represents root package */ override private[nsc] def hasPackage(pkg: String) = packageToModuleBases.contains(pkg) override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { - def matches(packageDottedName: String) = - if (packageDottedName.contains(".")) - packageOf(packageDottedName) == inPackage - else inPackage == "" - packageToModuleBases.keysIterator.filter(matches).map(PackageEntryImpl(_)).toVector + packageToModuleBases.keysIterator.filter(pack => packageContains(inPackage, pack)).map(PackageEntryImpl(_)).toVector } private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = { if (inPackage == "") Nil @@ -188,8 +209,8 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No def findClassFile(className: String): Option[AbstractFile] = { if (!className.contains(".")) None else { - val inPackage = packageOf(className) - packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap{x => + val (inPackage, _) = separatePkgAndClassNames(className) + packageToModuleBases.getOrElse(inPackage, Nil).iterator.flatMap { x => val file = x.resolve(className.replace('.', '/') + ".class") if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil }.take(1).toList.headOption @@ -199,6 +220,72 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No dottedClassName.substring(0, dottedClassName.lastIndexOf(".")) } +/** + * Implementation `ClassPath` based on the $JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 + */ +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { + import java.nio.file.Path, java.nio.file._ + type F = Path + private val javaHome = System.getProperty("java.home") + private val javaSpecVersion = scala.util.Properties.javaSpecVersion + private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null) + private val root: Path = fileSystem.getRootDirectories.iterator().next + private val roots = Files.newDirectoryStream(root).iterator().asScala.toList + + // http://mail.openjdk.java.net/pipermail/compiler-dev/2018-March/011737.html + private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString + + private val releaseCode: String = codeFor(release) + private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) + private val subset: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) + + // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) + private val packageIndex: scala.collection.Map[String, Seq[Path]] = { + val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() + subset.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach{ + p => + if (p.getNameCount > 1) { + val p1 = if (scala.util.Properties.isJavaAtLeast("9")) p.subpath(1, p.getNameCount) else p + val packageDotted = p1.toString.replace('/', '.') + index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p + } + }) + index + } + + /** Empty string represents root package */ + override private[nsc] def hasPackage(pkg: String) = packageIndex.contains(pkg) + override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { + packageIndex.keysIterator.filter(pack => packageContains(inPackage, pack)).map(PackageEntryImpl(_)).toVector + } + private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = { + if (inPackage == "") Nil + else { + packageIndex.getOrElse(inPackage, Nil).flatMap(x => + Files.list(x).iterator().asScala.filter(_.getFileName.toString.endsWith(".sig"))).map(x => + ClassFileEntryImpl(new PlainNioFile(x))).toVector + } + } + + override private[nsc] def list(inPackage: String): ClassPathEntries = + if (inPackage == "") ClassPathEntries(packages(inPackage), Nil) + else ClassPathEntries(packages(inPackage), classes(inPackage)) + + def asURLs: Seq[URL] = Nil + def asClassPathStrings: Seq[String] = Nil + + def findClassFile(className: String): Option[AbstractFile] = { + if (!className.contains(".")) None + else { + val (inPackage, classSimpleName) = separatePkgAndClassNames(className) + packageIndex.getOrElse(inPackage, Nil).iterator.flatMap{x => + val file = x.resolve(classSimpleName + ".sig") + if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil + }.take(1).toList.headOption + } + } +} + case class DirectoryClassPath(dir: File) extends JFileDirectoryLookup[ClassFileEntryImpl] with NoSourcePaths { override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index 6b8dee627355..e32ee5015d69 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -37,6 +37,7 @@ object FileUtils { private val SUFFIX_CLASS = ".class" private val SUFFIX_SCALA = ".scala" private val SUFFIX_JAVA = ".java" + private val SUFFIX_SIG = ".sig" def stripSourceExtension(fileName: String): String = { if (endsScala(fileName)) stripClassExtension(fileName) @@ -49,7 +50,7 @@ object FileUtils { @inline private def ends (filename:String, suffix:String) = filename.endsWith(suffix) && filename.length > suffix.length def endsClass(fileName: String): Boolean = - ends (fileName, SUFFIX_CLASS) + ends (fileName, SUFFIX_CLASS) || fileName.endsWith(SUFFIX_SIG) def endsScalaOrJava(fileName: String): Boolean = endsScala(fileName) || endsJava(fileName) @@ -61,7 +62,7 @@ object FileUtils { ends (fileName, SUFFIX_SCALA) def stripClassExtension(fileName: String): String = - fileName.substring(0, fileName.length - 6) // equivalent of fileName.length - SUFFIX_CLASS.length + fileName.substring(0, fileName.lastIndexOf('.')) def stripJavaExtension(fileName: String): String = fileName.substring(0, fileName.length - 5) // equivalent of fileName.length - SUFFIX_JAVA.length diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala index 39b0d7813559..cea556f9eb0f 100644 --- a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala @@ -14,7 +14,7 @@ object PackageNameUtils { * @param fullClassName full class name with package * @return (package, simple class name) */ - def separatePkgAndClassNames(fullClassName: String): (String, String) = { + @inline def separatePkgAndClassNames(fullClassName: String): (String, String) = { val lastDotIndex = fullClassName.lastIndexOf('.') if (lastDotIndex == -1) (RootPackage, fullClassName) @@ -23,4 +23,10 @@ object PackageNameUtils { } def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." + + def packageContains(inPackage: String, packageDottedName: String) = { + if (packageDottedName.contains(".")) + packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length + else inPackage == "" + } } diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 4f4b8ace77ca..45bd0111316a 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -23,14 +23,14 @@ sealed trait ZipAndJarFileLookupFactory { private val cache = new FileBasedCache[ClassPath] def create(zipFile: AbstractFile, settings: Settings): ClassPath = { - if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile) + if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile, Option(settings.release.value).filter(_ != "")) else createUsingCache(zipFile, settings) } - protected def createForZipFile(zipFile: AbstractFile): ClassPath + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = { - cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile)) + cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, Option(settings.release.value).filter(_ != ""))) } } @@ -39,7 +39,7 @@ sealed trait ZipAndJarFileLookupFactory { * It should be the only way of creating them as it provides caching. */ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { - private case class ZipArchiveClassPath(zipFile: File) + private case class ZipArchiveClassPath(zipFile: File, override val release: Option[String]) extends ZipArchiveFileLookup[ClassFileEntryImpl] with NoSourcePaths { @@ -143,9 +143,9 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) } - override protected def createForZipFile(zipFile: AbstractFile): ClassPath = + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) - else ZipArchiveClassPath(zipFile.file) + else ZipArchiveClassPath(zipFile.file, release) private def createWithoutUnderlyingFile(zipFile: AbstractFile) = zipFile match { case manifestRes: ManifestResources => @@ -164,6 +164,7 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { private case class ZipArchiveSourcePath(zipFile: File) extends ZipArchiveFileLookup[SourceFileEntryImpl] with NoClassPaths { + def release: Option[String] = None override def asSourcePathString: String = asClassPathString @@ -173,7 +174,7 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource } - override protected def createForZipFile(zipFile: AbstractFile): ClassPath = ZipArchiveSourcePath(zipFile.file) + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) } final class FileBasedCache[T] { diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 0fbb6342a35a..a433eacaae55 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -18,13 +18,14 @@ import scala.tools.nsc.util.{ClassPath, ClassRepresentation} */ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath { val zipFile: File + def release: Option[String] assert(zipFile != null, "Zip file in ZipArchiveFileLookup cannot be null") override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) - private val archive = new FileZipArchive(zipFile) + private val archive = new FileZipArchive(zipFile, release) override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val prefix = PackageNameUtils.packagePrefix(inPackage) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index da9423c4d2ad..3132dfc2c661 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -79,6 +79,14 @@ trait ScalaSettings extends AbsScalaSettings domain = languageFeatures ) } + val release = StringSetting("-release", "", "Compile for a specific version of the Java platform. Supported targets: 6, 7, 8, 9", "").withPostSetHook { (value: StringSetting) => + if (value.value != "" && !scala.util.Properties.isJavaAtLeast("9")) { + errorFn.apply("-release is only supported on Java 9 and higher") + } else { + // TODO validate numeric value + // TODO validate release <= java.specification.version + } + } /* * The previous "-source" option is intended to be used mainly diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index f845656980b3..0531a9938b89 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -232,9 +232,11 @@ final class PathResolver(settings: Settings) { import classPathFactory._ + private def release: Option[String] = Option(settings.release.value).filter(_ != "") + // Assemble the elements! def basis = List[Traversable[ClassPath]]( - JrtClassPath.apply(), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) + JrtClassPath.apply(release), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) classesInPath(javaBootClassPath), // 1. The Java bootstrap class path. contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path. classesInExpandedPath(javaUserClassPath), // 3. The Java application class path. diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index af2a7900bd16..eaf76f7a435f 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -16,3 +16,7 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$Lea ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.exists") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settings.isScala213") + +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.this") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.getDir") \ No newline at end of file diff --git a/src/reflect/scala/reflect/internal/JDK9Reflectors.java b/src/reflect/scala/reflect/internal/JDK9Reflectors.java new file mode 100644 index 000000000000..be06356c71b6 --- /dev/null +++ b/src/reflect/scala/reflect/internal/JDK9Reflectors.java @@ -0,0 +1,91 @@ +package scala.reflect.internal; + +import java.io.IOException; +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.MethodType; +import java.util.jar.JarFile; + +public final class JDK9Reflectors { + private static final MethodHandle RUNTIME_VERSION_PARSE; + private static final MethodHandle RUNTIME_VERSION; + private static final MethodHandle RUNTIME_VERSION_MAJOR; + private static final MethodHandle NEW_JAR_FILE; + + static { + RUNTIME_VERSION_PARSE = lookupRuntimeVersionParse(); + RUNTIME_VERSION = lookupRuntimeVersion(); + RUNTIME_VERSION_MAJOR = lookupRuntimeVersionMajor(); + NEW_JAR_FILE = lookupNewJarFile(); + } + + public static /*java.lang.Runtime.Version*/ Object runtimeVersionParse(String string) { + try { + return RUNTIME_VERSION_PARSE == null ? null : RUNTIME_VERSION_PARSE.invoke(string); + } catch (Throwable t) { + return null; + } + } + + public static /*java.lang.Runtime.Version*/ Object runtimeVersion() { + try { + return RUNTIME_VERSION == null ? null : RUNTIME_VERSION.invoke(); + } catch (Throwable t) { + return null; + } + } + + public static /*java.lang.Runtime.Version*/ Integer runtimeVersionMajor(/*java.lang.Runtime.Version*/ Object version) { + try { + return RUNTIME_VERSION_MAJOR == null ? null : (Integer) (int) RUNTIME_VERSION_MAJOR.invoke(version); + } catch (Throwable t) { + return null; + } + } + + public static JarFile newJarFile(java.io.File file, boolean verify, int mode, /*java.lang.Runtime.Version*/ Object version) throws IOException { + try { + if (version == null) return new JarFile(file, verify, mode); + else { + return (JarFile) NEW_JAR_FILE.invoke(file, verify, mode, version); + } + } catch (IOException | IllegalArgumentException | SecurityException ex) { + throw ex; + } catch (Throwable t) { + throw new RuntimeException(t); + } + + } + + private static MethodHandle lookupRuntimeVersionParse() { + try { + return MethodHandles.lookup().findStatic(runtimeVersionClass(), "parse", MethodType.methodType(runtimeVersionClass(), String.class)); + } catch (Throwable t) { + return null; + } + } + private static MethodHandle lookupRuntimeVersion() { + try { + return MethodHandles.lookup().findStatic(Class.forName("java.lang.Runtime"), "version", MethodType.methodType(runtimeVersionClass())); + } catch (Throwable t) { + return null; + } + } + private static MethodHandle lookupRuntimeVersionMajor() { + try { + return MethodHandles.lookup().findVirtual(runtimeVersionClass(), "major", MethodType.methodType(Integer.TYPE)); + } catch (Throwable t) { + return null; + } + } + private static MethodHandle lookupNewJarFile() { + try { + return MethodHandles.lookup().findConstructor(java.util.jar.JarFile.class, MethodType.methodType(void.class, java.io.File.class, java.lang.Boolean.TYPE, Integer.TYPE, Class.forName("java.lang.Runtime$Version"))); + } catch (Throwable t) { + return null; + } + } + private static Class runtimeVersionClass() throws ClassNotFoundException { + return Class.forName("java.lang.Runtime$Version"); + } +} diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 2bd2bc1da590..4ceb2cd07e41 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -8,13 +8,15 @@ package reflect package io import java.net.URL -import java.io.{ IOException, InputStream, ByteArrayInputStream, FilterInputStream } -import java.io.{ File => JFile } -import java.util.zip.{ ZipEntry, ZipFile, ZipInputStream } +import java.io.{ByteArrayInputStream, FilterInputStream, IOException, InputStream} +import java.io.{File => JFile} +import java.util.zip.{ZipEntry, ZipFile, ZipInputStream} import java.util.jar.Manifest + import scala.collection.mutable import scala.collection.JavaConverters._ import scala.annotation.tailrec +import scala.reflect.internal.JDK9Reflectors /** An abstraction for zip files and streams. Everything is written the way * it is for performance: we come through here a lot on every run. Be careful @@ -63,8 +65,9 @@ object ZipArchive { } import ZipArchive._ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -abstract class ZipArchive(override val file: JFile) extends AbstractFile with Equals { +abstract class ZipArchive(override val file: JFile, release: Option[String]) extends AbstractFile with Equals { self => + def this(file: JFile) = this(file, None) override lazy val canonicalPath = super.canonicalPath @@ -117,15 +120,24 @@ abstract class ZipArchive(override val file: JFile) extends AbstractFile with Eq dir } - protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = { - if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) - else ensureDir(dirs, dirName(entry.getName), null) + protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = getDir(dirs, entry, entry.getName) + + protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry, entryName: String): DirEntry = { + if (entry.isDirectory) ensureDir(dirs, entryName, entry) + else ensureDir(dirs, dirName(entryName), null) } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ -final class FileZipArchive(file: JFile) extends ZipArchive(file) { +final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) { + def this(file: JFile) = this(file, None) private[this] def openZipFile(): ZipFile = try { - new ZipFile(file) + release match { + case Some(r) if file.getName.endsWith(".jar") => + val releaseVersion = JDK9Reflectors.runtimeVersionParse(r) + JDK9Reflectors.newJarFile(file, true, ZipFile.OPEN_READ, releaseVersion) + case _ => + new ZipFile(file) + } } catch { case ioe: IOException => throw new IOException("Error accessing " + file.getPath, ioe) } @@ -153,8 +165,9 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) { // faster than LazyEntry. private[this] class LeakyEntry( zipFile: ZipFile, - zipEntry: ZipEntry - ) extends Entry(zipEntry.getName) { + zipEntry: ZipEntry, + name: String + ) extends Entry(name) { override def lastModified: Long = zipEntry.getTime override def input: InputStream = zipFile.getInputStream(zipEntry) override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) @@ -169,20 +182,27 @@ final class FileZipArchive(file: JFile) extends ZipArchive(file) { try { while (enum.hasMoreElements) { val zipEntry = enum.nextElement - val dir = getDir(dirs, zipEntry) - if (zipEntry.isDirectory) dir - else { - val f = - if (ZipArchive.closeZipFile) - new LazyEntry( - zipEntry.getName(), - zipEntry.getTime(), - zipEntry.getSize().toInt - ) - else - new LeakyEntry(zipFile, zipEntry) - - dir.entries(f.name) = f + if (!zipEntry.getName.startsWith("META-INF/versions/")) { + val zipEntryVersioned = if (release.isDefined) { + // JARFile will return the entry for the corresponding release-dependent version here under META-INF/versions + zipFile.getEntry(zipEntry.getName) + } else zipEntry + // We always use the original entry name here, which corresponds to the class FQN. + val entryName = zipEntry.getName + val dir = getDir(dirs, zipEntry, entryName) + if (!zipEntry.isDirectory) { + val f = + if (ZipArchive.closeZipFile) + new LazyEntry( + entryName, + zipEntry.getTime(), + zipEntry.getSize().toInt + ) + else + new LeakyEntry(zipFile, zipEntryVersioned, entryName) + + dir.entries(f.name) = f + } } } } finally { diff --git a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala index 2c3c5134da47..b46677d6d477 100644 --- a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala +++ b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala @@ -26,7 +26,7 @@ class JrtClassPathTest { val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath) AggregateClassPath(elements) } - else JrtClassPath().get + else JrtClassPath(None).get assertEquals(Nil, cp.classes("")) assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang")) diff --git a/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala new file mode 100644 index 000000000000..75d4c2d30756 --- /dev/null +++ b/test/junit/scala/tools/nsc/classpath/MultiReleaseJarTest.scala @@ -0,0 +1,105 @@ +package scala.tools.nsc.classpath + +import java.io.ByteArrayOutputStream +import java.nio.file.{FileSystems, Files, Path} +import java.util.jar.Attributes +import java.util.jar.Attributes.Name + +import org.junit.{Assert, Test} + +import scala.tools.nsc.{Global, Settings} +import scala.tools.testing.BytecodeTesting +import scala.util.Properties + +class MultiReleaseJarTest extends BytecodeTesting { + import compiler._ + @Test + def mrJar(): Unit = { + if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JdK"); return} // TODO test that the compiler warns that --release is unsupported. + + val temp1 = Files.createTempFile("mr-jar-test-", ".jar") + + // TODO test fails if both Global runs look at the same JAR on disk. Caching problem in our classpath implementation? + // val temp2 = temp1 + val temp2 = Files.createTempFile("mr-jar-test-", ".jar") + + try { + def code(newApi: String) = s"package p1; abstract class Versioned { def oldApi: Int; $newApi }" + + val oldC = compileToBytes(code("")).head._2 + val newC = compileToBytes(code("def newApi: Int")).head._2 + List(temp1, temp2).foreach(temp => createZip(temp, List( + "/p1/Versioned.class" -> oldC, + "/META-INF/versions/9/p1/Versioned.class" -> newC, + "/META-INF/MANIFEST.MF" -> createManifest) + )) + + def declsOfC(jarPath: Path, release: String) = { + val settings = new Settings() + settings.usejavacp.value = true + settings.classpath.value = jarPath.toAbsolutePath.toString + val g = new Global(settings) + settings.release.value = release + new g.Run + val decls = g.rootMirror.staticClass("p1.Versioned").info.decls.filterNot(_.isConstructor).map(_.name.toString).toList.sorted + decls + } + + Assert.assertEquals(List("newApi", "oldApi"), declsOfC(temp1, "9")) + Assert.assertEquals(List("oldApi"), declsOfC(temp2, "8")) + } finally + List(temp1, temp2).foreach(Files.deleteIfExists) + } + + @Test + def ctSymTest(): Unit = { + if (!Properties.isJavaAtLeast("9")) { println("skipping mrJar() on old JDK"); return} // TODO test that the compiler warns that --release is unsupported. + + def lookup(className: String, release: String): Boolean = { + val settings = new Settings() + settings.usejavacp.value = true + val g = new Global(settings) + import g._ + settings.release.value = release + new Run + rootMirror.getClassIfDefined(TypeName(className)) != NoSymbol + } + Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "8")) + Assert.assertFalse(lookup("java.lang.invoke.LambdaMetafactory", "7")) + Assert.assertTrue(lookup("java.lang.invoke.LambdaMetafactory", "9")) + } + + private def createManifest = { + val manifest = new java.util.jar.Manifest() + manifest.getMainAttributes.put(Name.MANIFEST_VERSION, "1.0") + manifest.getMainAttributes.put(new Attributes.Name("Multi-Release"), String.valueOf(true)) + val os = new ByteArrayOutputStream() + manifest.write(os) + val manifestBytes = os.toByteArray + manifestBytes + } + private def createZip(zipLocation: Path, content: List[(String, Array[Byte])]): Unit = { + val env = new java.util.HashMap[String, String]() + Files.deleteIfExists(zipLocation) + env.put("create", String.valueOf(true)) + val fileUri = zipLocation.toUri + val zipUri = new java.net.URI("jar:" + fileUri.getScheme, fileUri.getPath, null) + val zipfs = FileSystems.newFileSystem(zipUri, env) + try { + try { + for ((internalPath, contentBytes) <- content) { + val internalTargetPath = zipfs.getPath(internalPath) + Files.createDirectories(internalTargetPath.getParent) + Files.write(internalTargetPath, contentBytes) + } + } finally { + if (zipfs != null) zipfs.close() + } + } finally { + zipfs.close() + } + } + + + +} From 4ce0f67aae8df5523995b4772469ccce02e6a5d3 Mon Sep 17 00:00:00 2001 From: jvican Date: Fri, 9 Mar 2018 10:51:01 +0100 Subject: [PATCH 1270/2793] Handle escaping characters in URL -> file This bug was discovered in https://github.com/scala/scala/pull/6314#issuecomment-371385148 when caching classloaders for compiler plugins. The issue discovered is unrelated to that PR and is rather a bug in how scalac converts URLs to File in the `AbstractFile.getURL` method. That method was using `getPath` in `java.net.URL` when the returned path gives back a file path with escaped characters. When that incorrect file path is passed through the constructor of `java.io.File`, the construction succeeds but the underlying file is a different one (and doesn't exist). The fix to this bug is to use the safe `toURI()` method, which is correctly used in other parts of the scalac classpath infrastructure (like in the `asURLs` method in `DirectoryClasspath` and `ZipArchiveFileLookup`). The `toURI` method returns the file path with all the special characters unescaped. Fixes https://github.com/scala/bug/issues/10764. --- .../scala/reflect/io/AbstractFile.scala | 2 +- .../scala/reflect/io/AbstractFileSpec.scala | 30 +++++++++++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/reflect/io/AbstractFileSpec.scala diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index e77dd6846c09..5a4c9445c1d5 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -52,7 +52,7 @@ object AbstractFile { */ def getURL(url: URL): AbstractFile = if (url.getProtocol == "file") { - val f = new java.io.File(url.getPath) + val f = new java.io.File(url.toURI) if (f.isDirectory) getDirectory(f) else getFile(f) } else null diff --git a/test/junit/scala/reflect/io/AbstractFileSpec.scala b/test/junit/scala/reflect/io/AbstractFileSpec.scala new file mode 100644 index 000000000000..6440a5cc593e --- /dev/null +++ b/test/junit/scala/reflect/io/AbstractFileSpec.scala @@ -0,0 +1,30 @@ +package scala.reflect.io + +import java.nio.file.Files + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.TempDir + +@RunWith(classOf[JUnit4]) +class AbstractFileSpec { + @Test + def handleURLEscapedCharacters(): Unit = { + val tempDir = TempDir.createTempDir().toPath + val scalaPath = tempDir.resolve("this is a file?.scala") + Files.createFile(scalaPath) + val scalaFile = scalaPath.toFile + + try { + val fileFromURLPath = new java.io.File(scalaFile.toURI.toURL.getPath) + Assert.assertTrue(!fileFromURLPath.exists()) + val scalacFile = AbstractFile.getURL(scalaFile.toURI.toURL) + Assert.assertTrue(scalacFile.file.exists()) + } finally { + Files.deleteIfExists(scalaPath) + Files.deleteIfExists(tempDir) + } + } +} From 3bbf53a5e0924b1aac72763a02d4ef031b1cfd7d Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 12 Mar 2018 11:24:29 +0100 Subject: [PATCH 1271/2793] Minor cleanups --- .../nsc/classpath/DirectoryClassPath.scala | 32 ++++++++----------- .../nsc/classpath/PackageNameUtils.scala | 5 +++ .../ZipAndJarFileLookupFactory.scala | 4 +-- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/util/PathResolver.scala | 4 +-- .../reflect/internal/JDK9Reflectors.java | 6 ++-- src/reflect/scala/reflect/io/ZipArchive.scala | 23 ++++++------- 7 files changed, 34 insertions(+), 41 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index bfbdb1435628..5f32fa4359e9 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -216,8 +216,6 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No }.take(1).toList.headOption } } - private def packageOf(dottedClassName: String): String = - dottedClassName.substring(0, dottedClassName.lastIndexOf(".")) } /** @@ -225,9 +223,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No */ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { import java.nio.file.Path, java.nio.file._ - type F = Path - private val javaHome = System.getProperty("java.home") - private val javaSpecVersion = scala.util.Properties.javaSpecVersion + private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null) private val root: Path = fileSystem.getRootDirectories.iterator().next private val roots = Files.newDirectoryStream(root).iterator().asScala.toList @@ -236,19 +232,17 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas private def codeFor(major: Int): String = if (major < 10) major.toString else ('A' + (major - 10)).toChar.toString private val releaseCode: String = codeFor(release) - private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) - private val subset: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) + private def fileNameMatchesRelease(fileName: String) = !fileName.contains("-") && fileName.contains(releaseCode) // exclude `9-modules` + private val rootsForRelease: List[Path] = roots.filter(root => fileNameMatchesRelease(root.getFileName.toString)) // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) private val packageIndex: scala.collection.Map[String, Seq[Path]] = { val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() - subset.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach{ - p => - if (p.getNameCount > 1) { - val p1 = if (scala.util.Properties.isJavaAtLeast("9")) p.subpath(1, p.getNameCount) else p - val packageDotted = p1.toString.replace('/', '.') - index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p - } + rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => + if (p.getNameCount > 1) { + val packageDotted = p.subpath(1, p.getNameCount).toString.replace('/', '.') + index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p + } }) index } @@ -261,9 +255,9 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas private[nsc] def classes(inPackage: String): Seq[ClassFileEntry] = { if (inPackage == "") Nil else { - packageIndex.getOrElse(inPackage, Nil).flatMap(x => - Files.list(x).iterator().asScala.filter(_.getFileName.toString.endsWith(".sig"))).map(x => - ClassFileEntryImpl(new PlainNioFile(x))).toVector + val sigFiles = packageIndex.getOrElse(inPackage, Nil).iterator.flatMap(p => + Files.list(p).iterator().asScala.filter(_.getFileName.toString.endsWith(".sig"))) + sigFiles.map(f => ClassFileEntryImpl(new PlainNioFile(f))).toVector } } @@ -278,8 +272,8 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas if (!className.contains(".")) None else { val (inPackage, classSimpleName) = separatePkgAndClassNames(className) - packageIndex.getOrElse(inPackage, Nil).iterator.flatMap{x => - val file = x.resolve(classSimpleName + ".sig") + packageIndex.getOrElse(inPackage, Nil).iterator.flatMap { p => + val file = p.resolve(classSimpleName + ".sig") if (Files.exists(file)) new scala.reflect.io.PlainNioFile(file) :: Nil else Nil }.take(1).toList.headOption } diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala index cea556f9eb0f..14ac12e041bf 100644 --- a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala @@ -24,6 +24,11 @@ object PackageNameUtils { def packagePrefix(inPackage: String): String = if (inPackage == RootPackage) "" else inPackage + "." + /** + * `true` if `packageDottedName` is a package directly nested in `inPackage`, for example: + * - `packageContains("scala", "scala.collection")` + * - `packageContains("", "scala")` + */ def packageContains(inPackage: String, packageDottedName: String) = { if (packageDottedName.contains(".")) packageDottedName.startsWith(inPackage) && packageDottedName.lastIndexOf('.') == inPackage.length diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 45bd0111316a..716eeaaa1eaa 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -23,14 +23,14 @@ sealed trait ZipAndJarFileLookupFactory { private val cache = new FileBasedCache[ClassPath] def create(zipFile: AbstractFile, settings: Settings): ClassPath = { - if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile, Option(settings.release.value).filter(_ != "")) + if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile, settings.releaseValue) else createUsingCache(zipFile, settings) } protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = { - cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, Option(settings.release.value).filter(_ != ""))) + cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue)) } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 3132dfc2c661..7df4ca5144cb 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -87,6 +87,7 @@ trait ScalaSettings extends AbsScalaSettings // TODO validate release <= java.specification.version } } + def releaseValue: Option[String] = Option(release.value).filter(_ != "") /* * The previous "-source" option is intended to be used mainly diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 0531a9938b89..0aff4460c08e 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -232,11 +232,9 @@ final class PathResolver(settings: Settings) { import classPathFactory._ - private def release: Option[String] = Option(settings.release.value).filter(_ != "") - // Assemble the elements! def basis = List[Traversable[ClassPath]]( - JrtClassPath.apply(release), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) + JrtClassPath.apply(settings.releaseValue), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) classesInPath(javaBootClassPath), // 1. The Java bootstrap class path. contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path. classesInExpandedPath(javaUserClassPath), // 3. The Java application class path. diff --git a/src/reflect/scala/reflect/internal/JDK9Reflectors.java b/src/reflect/scala/reflect/internal/JDK9Reflectors.java index be06356c71b6..6112cbaf062c 100644 --- a/src/reflect/scala/reflect/internal/JDK9Reflectors.java +++ b/src/reflect/scala/reflect/internal/JDK9Reflectors.java @@ -47,7 +47,7 @@ public static JarFile newJarFile(java.io.File file, boolean verify, int mode, /* try { if (version == null) return new JarFile(file, verify, mode); else { - return (JarFile) NEW_JAR_FILE.invoke(file, verify, mode, version); + return NEW_JAR_FILE == null ? null : (JarFile) NEW_JAR_FILE.invoke(file, verify, mode, version); } } catch (IOException | IllegalArgumentException | SecurityException ex) { throw ex; @@ -66,7 +66,7 @@ private static MethodHandle lookupRuntimeVersionParse() { } private static MethodHandle lookupRuntimeVersion() { try { - return MethodHandles.lookup().findStatic(Class.forName("java.lang.Runtime"), "version", MethodType.methodType(runtimeVersionClass())); + return MethodHandles.lookup().findStatic(java.lang.Runtime.class, "version", MethodType.methodType(runtimeVersionClass())); } catch (Throwable t) { return null; } @@ -80,7 +80,7 @@ private static MethodHandle lookupRuntimeVersionMajor() { } private static MethodHandle lookupNewJarFile() { try { - return MethodHandles.lookup().findConstructor(java.util.jar.JarFile.class, MethodType.methodType(void.class, java.io.File.class, java.lang.Boolean.TYPE, Integer.TYPE, Class.forName("java.lang.Runtime$Version"))); + return MethodHandles.lookup().findConstructor(java.util.jar.JarFile.class, MethodType.methodType(void.class, java.io.File.class, java.lang.Boolean.TYPE, Integer.TYPE, runtimeVersionClass())); } catch (Throwable t) { return null; } diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 4ceb2cd07e41..2ccb765d7899 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -120,11 +120,9 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext dir } - protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = getDir(dirs, entry, entry.getName) - - protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry, entryName: String): DirEntry = { - if (entry.isDirectory) ensureDir(dirs, entryName, entry) - else ensureDir(dirs, dirName(entryName), null) + protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = { + if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) + else ensureDir(dirs, dirName(entry.getName), null) } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ @@ -150,7 +148,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch override def lastModified: Long = time // could be stale override def input: InputStream = { val zipFile = openZipFile() - val entry = zipFile.getEntry(name) + val entry = zipFile.getEntry(name) // with `-release`, returns the correct version under META-INF/versions val delegate = zipFile.getInputStream(entry) new FilterInputStream(delegate) { override def close(): Unit = { zipFile.close() } @@ -187,19 +185,16 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch // JARFile will return the entry for the corresponding release-dependent version here under META-INF/versions zipFile.getEntry(zipEntry.getName) } else zipEntry - // We always use the original entry name here, which corresponds to the class FQN. - val entryName = zipEntry.getName - val dir = getDir(dirs, zipEntry, entryName) if (!zipEntry.isDirectory) { + val dir = getDir(dirs, zipEntry) val f = if (ZipArchive.closeZipFile) new LazyEntry( - entryName, - zipEntry.getTime(), - zipEntry.getSize().toInt - ) + zipEntry.getName, + zipEntry.getTime, + zipEntry.getSize.toInt) else - new LeakyEntry(zipFile, zipEntryVersioned, entryName) + new LeakyEntry(zipFile, zipEntryVersioned, zipEntry.getName) dir.entries(f.name) = f } From 767ea02f4104e95b0c7fa759568cdb82b17a7228 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 12 Mar 2018 21:51:37 +1000 Subject: [PATCH 1272/2793] Disable macro/plugin class loader caching, by default And change the relevant settings to be evolvable into a policy selection rather than just a on/off toggle. --- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 2 +- .../scala/tools/nsc/settings/ScalaSettings.scala | 14 ++++++++++++-- .../scala/tools/nsc/typechecker/Macros.scala | 2 +- 3 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 7e82dbe04710..021d9e48244e 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -31,7 +31,7 @@ trait Plugins { global: Global => def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s asPath(settings.pluginsDir.value) map injectDefault map Path.apply } - val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, settings.YdisablePluginsClassLoaderCaching.value) + val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name) val (goods, errors) = maybes partition (_.isSuccess) // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index da9423c4d2ad..2b80fafdd753 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -220,8 +220,8 @@ trait ScalaSettings extends AbsScalaSettings val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") val YmethodInfer = BooleanSetting ("-Yinfer-argument-types", "Infer types for arguments of overridden methods.") val YdisableFlatCpCaching = BooleanSetting ("-YdisableFlatCpCaching", "Do not cache flat classpath representation of classpath elements from jars across compiler instances.") - val YdisablePluginsClassLoaderCaching = BooleanSetting ("-YdisablePluginsClassLoaderCaching", "Do not cache classloaders for compiler plugins that are dynamically loaded.") - val YdisableMacrosClassLoaderCaching = BooleanSetting ("-YdisableMacrosClassLoaderCaching", "Do not cache classloaders for macros that are dynamically loaded.") + val YcachePluginClassLoader = CachePolicy.setting("plugin", "compiler plugins") + val YcacheMacroClassLoader = CachePolicy.setting("macro", "macros") val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") @@ -233,6 +233,16 @@ trait ScalaSettings extends AbsScalaSettings val YjarCompressionLevel = IntSetting("-Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (x: String) => None) + sealed abstract class CachePolicy(val name: String, val help: String) + object CachePolicy { + def setting(style: String, styleLong: String) = ChoiceSetting(s"-Ycache-$style-class-loader", "policy", s"Policy for caching class loaders for $styleLong that are dynamically loaded.", values.map(_.name), None.name, values.map(_.help)) + object None extends CachePolicy("none", "Don't cache class loader") + object LastModified extends CachePolicy("last-modified", "Cache class loader, using file last-modified time to invalidate") + // TODO Jorge to add new policy. Think about whether there is a benefit to the user on offering this as a separate policy or unifying with the previous one. + // object ZipMetadata extends CachePolicy("zip-metadata", "Cache classloade, using file last-modified time, then ZIP file metadata to invalidate") + def values: List[CachePolicy] = List(None, LastModified) + } + object optChoices extends MultiChoiceEnumeration { val unreachableCode = Choice("unreachable-code", "Eliminate unreachable code, exception handlers guarding no instructions, redundant metadata (debug information, line numbers).") val simplifyJumps = Choice("simplify-jumps", "Simplify branching instructions, eliminate unnecessary ones.") diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 683c4f4c42f7..e5dceb0a477b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -72,7 +72,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { import scala.tools.nsc.io.Jar import scala.reflect.io.{AbstractFile, Path} val locations = classpath.map(u => Path(AbstractFile.getURL(u).file)) - val disableCache = settings.YdisableMacrosClassLoaderCaching.value + val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name if (disableCache || locations.exists(!Jar.isJarOrZip(_))) { if (disableCache) macroLogVerbose("macro classloader: caching is disabled by the user.") else { From b9be25c6607c9b4af82dd19af9ba993575e4710c Mon Sep 17 00:00:00 2001 From: Piotr Kukielka Date: Tue, 13 Feb 2018 11:40:09 +0100 Subject: [PATCH 1273/2793] Reduce allocation and CPU usage in distinct There are three simple optimizations which improves distinct performance: - in case of empty or one-element collection return the same collection - add element to hashmap and check if it was already present in one method call - rewrite for loop to while loop --- src/library/scala/collection/SeqLike.scala | 18 +++++++++++------- test/benchmarks/.gitignore | 1 + 2 files changed, 12 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index f4237d245dbe..f15419e54a26 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -504,15 +504,19 @@ trait SeqLike[+A, +Repr] extends Any with IterableLike[A, Repr] with GenSeqLike[ * @return A new $coll which contains the first occurrence of every element of this $coll. */ def distinct: Repr = { - val b = newBuilder - val seen = mutable.HashSet[A]() - for (x <- this) { - if (!seen(x)) { - b += x - seen += x + val isImmutable = this.isInstanceOf[immutable.Seq[_]] + if (isImmutable && lengthCompare(1) <= 0) repr + else { + val b = newBuilder + val seen = new mutable.HashSet[A]() + var it = this.iterator + var different = false + while (it.hasNext) { + val next = it.next + if (seen.add(next)) b += next else different = true } + if (different || !isImmutable) b.result() else repr } - b.result() } def patch[B >: A, That](from: Int, patch: GenSeq[B], replaced: Int)(implicit bf: CanBuildFrom[Repr, B, That]): That = { diff --git a/test/benchmarks/.gitignore b/test/benchmarks/.gitignore index ce4d893417db..78304b6b90e9 100644 --- a/test/benchmarks/.gitignore +++ b/test/benchmarks/.gitignore @@ -7,6 +7,7 @@ # standard Eclipse output directory /bin/ +.idea # sbteclipse-generated Eclipse files /.classpath From e00d10652b4e550c209ea6cbf496d95229401cf7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 14 Feb 2018 15:47:15 +1000 Subject: [PATCH 1274/2793] Support forthcominng unforked mode of partest - Explicitly add test output path to the compiler classpath Rather than assuming its in the application classpath. - Ensure tests cleanup threads - Fork tests that inherently require it or ones that I can't figure out how to make work without it --- src/partest-extras/scala/tools/partest/BytecodeTest.scala | 2 +- src/partest-extras/scala/tools/partest/ReplTest.scala | 7 +++++++ test/files/jvm/methvsfield.javaopts | 1 + test/files/jvm/natives.javaopts | 1 + test/files/jvm/t1600.javaopts | 1 + test/files/jvm/t8689.javaopts | 1 + test/files/presentation/memory-leaks.javaopts | 1 + test/files/run/dynamic-applyDynamic.scala | 2 +- test/files/run/dynamic-applyDynamicNamed.scala | 2 +- test/files/run/dynamic-selectDynamic.scala | 2 +- test/files/run/dynamic-updateDynamic.scala | 2 +- test/files/run/icode-reader-dead-code.scala | 4 ++-- test/files/run/lambda-serialization-gc.javaopts | 1 + test/files/run/lazy-concurrent.scala | 8 ++++++-- test/files/run/reflection-mem-glbs.javaopts | 1 + test/files/run/reflection-mem-tags.javaopts | 1 + test/files/run/reify_copypaste1.javaopts | 1 + test/files/run/shutdownhooks.javaopts | 1 + test/files/run/t10513.scala | 2 +- test/files/run/t10552/Test_2.scala | 2 +- test/files/run/t2318.javaopts | 1 + test/files/run/t4332.scala | 2 +- .../run/t4841-isolate-plugins/t4841-isolate-plugin.scala | 7 ++++--- test/files/run/t5938.scala | 2 +- test/files/run/t7805-repl-i.javaopts | 1 + test/files/run/t7817-tree-gen.scala | 2 +- test/files/run/t8046/Test.scala | 2 +- test/files/run/t8266-octal-interp.javaopts | 1 + test/files/run/t8433.scala | 2 +- 29 files changed, 44 insertions(+), 19 deletions(-) create mode 100644 test/files/jvm/methvsfield.javaopts create mode 100644 test/files/jvm/natives.javaopts create mode 100644 test/files/jvm/t1600.javaopts create mode 100644 test/files/jvm/t8689.javaopts create mode 100644 test/files/presentation/memory-leaks.javaopts create mode 100644 test/files/run/lambda-serialization-gc.javaopts create mode 100644 test/files/run/reflection-mem-glbs.javaopts create mode 100644 test/files/run/reflection-mem-tags.javaopts create mode 100644 test/files/run/reify_copypaste1.javaopts create mode 100644 test/files/run/shutdownhooks.javaopts create mode 100644 test/files/run/t2318.javaopts create mode 100644 test/files/run/t7805-repl-i.javaopts create mode 100644 test/files/run/t8266-octal-interp.javaopts diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index 532dfd2a7300..2056f9d8be65 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -133,7 +133,7 @@ abstract class BytecodeTest { // logic inspired by scala.tools.util.PathResolver implementation // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath val factory = new ClassPathFactory(new Settings()) - val containers = factory.classesInExpandedPath(Defaults.javaUserClassPath) + val containers = factory.classesInExpandedPath(sys.props("partest.output") + ":" + Defaults.javaUserClassPath) new AggregateClassPath(containers) } } diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala index 08a4a3c5f13b..1538dba394fe 100644 --- a/src/partest-extras/scala/tools/partest/ReplTest.scala +++ b/src/partest-extras/scala/tools/partest/ReplTest.scala @@ -19,6 +19,13 @@ abstract class ReplTest extends DirectTest { final override def settings: Settings = { val s = super.settings s.Xnojline.value = true + if (getClass.getClassLoader.getParent != null) { + s.classpath.value = s.classpath.value match { + case "" => testOutput.toString + case s => s + ":" + testOutput.toString + } + s.usejavacp.value = true + } transformSettings(s) } def normalize(s: String) = s diff --git a/test/files/jvm/methvsfield.javaopts b/test/files/jvm/methvsfield.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/jvm/methvsfield.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/jvm/natives.javaopts b/test/files/jvm/natives.javaopts new file mode 100644 index 000000000000..57b2283c7fb3 --- /dev/null +++ b/test/files/jvm/natives.javaopts @@ -0,0 +1 @@ +-Dneeds.to.fork \ No newline at end of file diff --git a/test/files/jvm/t1600.javaopts b/test/files/jvm/t1600.javaopts new file mode 100644 index 000000000000..f4038254ba29 --- /dev/null +++ b/test/files/jvm/t1600.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm.maybe.because.context.classloader \ No newline at end of file diff --git a/test/files/jvm/t8689.javaopts b/test/files/jvm/t8689.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/jvm/t8689.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/presentation/memory-leaks.javaopts b/test/files/presentation/memory-leaks.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/presentation/memory-leaks.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/dynamic-applyDynamic.scala b/test/files/run/dynamic-applyDynamic.scala index b06041194c1e..3ce59713ded1 100644 --- a/test/files/run/dynamic-applyDynamic.scala +++ b/test/files/run/dynamic-applyDynamic.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}" + s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" override def code = """ object X { diff --git a/test/files/run/dynamic-applyDynamicNamed.scala b/test/files/run/dynamic-applyDynamicNamed.scala index cc59f9058be9..500f44dc06d5 100644 --- a/test/files/run/dynamic-applyDynamicNamed.scala +++ b/test/files/run/dynamic-applyDynamicNamed.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}" + s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" override def code = """ object X { diff --git a/test/files/run/dynamic-selectDynamic.scala b/test/files/run/dynamic-selectDynamic.scala index bd6c138c5002..937529a505f4 100644 --- a/test/files/run/dynamic-selectDynamic.scala +++ b/test/files/run/dynamic-selectDynamic.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}" + s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" override def code = """ object X { diff --git a/test/files/run/dynamic-updateDynamic.scala b/test/files/run/dynamic-updateDynamic.scala index 80fe0ea35f4c..32fc530e7b0d 100644 --- a/test/files/run/dynamic-updateDynamic.scala +++ b/test/files/run/dynamic-updateDynamic.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path}" + s"-usejavacp -Xprint-pos -Xprint:typer -Yrangepos -Ystop-after:typer -d ${testOutput.path} -cp ${testOutput.path}" override def code = """ object X { diff --git a/test/files/run/icode-reader-dead-code.scala b/test/files/run/icode-reader-dead-code.scala index cdec3412cdc8..9c4f62289ce1 100644 --- a/test/files/run/icode-reader-dead-code.scala +++ b/test/files/run/icode-reader-dead-code.scala @@ -30,13 +30,13 @@ object Test extends DirectTest { |} """.stripMargin - compileString(newCompiler("-usejavacp"))(aCode) + compileString(newCompiler(s"-usejavacp", "-cp", testOutput.path))(aCode) addDeadCode() // If inlining fails, the compiler will issue an inliner warning that is not present in the // check file - compileString(newCompiler("-usejavacp", "-opt:l:inline", "-opt-inline-from:**"))(bCode) + compileString(newCompiler("-usejavacp", "-cp", testOutput.path, "-opt:l:inline", "-opt-inline-from:**"))(bCode) } def readClass(file: String) = { diff --git a/test/files/run/lambda-serialization-gc.javaopts b/test/files/run/lambda-serialization-gc.javaopts new file mode 100644 index 000000000000..9ecdb8a4dafd --- /dev/null +++ b/test/files/run/lambda-serialization-gc.javaopts @@ -0,0 +1 @@ +-Xmx512m \ No newline at end of file diff --git a/test/files/run/lazy-concurrent.scala b/test/files/run/lazy-concurrent.scala index 4699ed6a151a..d09fc4cd0663 100644 --- a/test/files/run/lazy-concurrent.scala +++ b/test/files/run/lazy-concurrent.scala @@ -7,11 +7,15 @@ object Test { lazy val Singleton = new Singleton var i = 0 + val threads = collection.mutable.ListBuffer[Thread]() while (i < 4) { - new Thread(new Runnable { + val t = new Thread(new Runnable { def run = Singleton.field - }).start + }) + threads += t + t.start i += 1 } + threads.foreach(_.join) } } diff --git a/test/files/run/reflection-mem-glbs.javaopts b/test/files/run/reflection-mem-glbs.javaopts new file mode 100644 index 000000000000..9ecdb8a4dafd --- /dev/null +++ b/test/files/run/reflection-mem-glbs.javaopts @@ -0,0 +1 @@ +-Xmx512m \ No newline at end of file diff --git a/test/files/run/reflection-mem-tags.javaopts b/test/files/run/reflection-mem-tags.javaopts new file mode 100644 index 000000000000..9ecdb8a4dafd --- /dev/null +++ b/test/files/run/reflection-mem-tags.javaopts @@ -0,0 +1 @@ +-Xmx512m \ No newline at end of file diff --git a/test/files/run/reify_copypaste1.javaopts b/test/files/run/reify_copypaste1.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/run/reify_copypaste1.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/shutdownhooks.javaopts b/test/files/run/shutdownhooks.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/run/shutdownhooks.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t10513.scala b/test/files/run/t10513.scala index c9932879aa61..b4788e04b2cc 100644 --- a/test/files/run/t10513.scala +++ b/test/files/run/t10513.scala @@ -21,7 +21,7 @@ object Test { val longStandingPromise = Promise[Nothing] val futures = List.tabulate(numFutures) { i => - val arr = Array.tabulate(arrSz)(identity) + val arr = new Array[Int](arrSz) val idx = rng.nextInt(arrSz) val f1 = Future { arr diff --git a/test/files/run/t10552/Test_2.scala b/test/files/run/t10552/Test_2.scala index ddd8ab01efd8..189719afa0a7 100644 --- a/test/files/run/t10552/Test_2.scala +++ b/test/files/run/t10552/Test_2.scala @@ -1,7 +1,7 @@ import scala.tools.partest._ object Test extends DirectTest { - override def extraSettings: String = "-usejavacp -Ystop-after:typer" + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path} -Ystop-after:typer" def code = "class C { A.f }" diff --git a/test/files/run/t2318.javaopts b/test/files/run/t2318.javaopts new file mode 100644 index 000000000000..8bf493ce91e6 --- /dev/null +++ b/test/files/run/t2318.javaopts @@ -0,0 +1 @@ +-Ddummy=fresh_jvm_needed_to_test_security_manager \ No newline at end of file diff --git a/test/files/run/t4332.scala b/test/files/run/t4332.scala index 1c7e7d73de66..6da95833eebc 100644 --- a/test/files/run/t4332.scala +++ b/test/files/run/t4332.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ object Test extends DirectTest { override def code = "" - lazy val global = newCompiler("-usejavacp") + lazy val global = newCompiler("-usejavacp", "-cp", testOutput.path) import global._, definitions._ override def show() { diff --git a/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala b/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala index 5421922c9c8d..06902755ae5e 100644 --- a/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala +++ b/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala @@ -8,7 +8,7 @@ import java.io.File object Test extends DirectTest { override def code = "class Code" - override def extraSettings = s"-usejavacp" + override def extraSettings = s"-usejavacp -cp ${testOutput.jfile.getAbsolutePath}" // plugin named ploogin1_1 or ploogin1_2, but not ploogin2_x // Although the samples are in different classloaders, the plugin @@ -24,7 +24,7 @@ object Test extends DirectTest { def compilePlugin(i: Int) = { val out = (testOutput / s"p$i").createDirectory() - val args = Seq("-usejavacp", "-d", out.path) + val args = Seq("-usejavacp", "-d", out.path, "-cp", testOutput.path ) compileString(newCompiler(args: _*))(pluginCode(i)) val xml = PluginDescription(s"p$i", s"t4841.SamplePloogin$i").toXML (out / "scalac-plugin.xml").toFile writeAll xml @@ -33,7 +33,8 @@ object Test extends DirectTest { override def show() = { val dirs = 1 to 2 map (compilePlugin(_)) - compile("-Xdev", s"-Xplugin:${dirs mkString ","}", "-usejavacp", "-d", testOutput.path) + val plugins = dirs.map(d => s"$d:${testOutput.path}").mkString(",") + compile("-Xdev", s"-Xplugin:$plugins", "-usejavacp", "-d", testOutput.path) } } diff --git a/test/files/run/t5938.scala b/test/files/run/t5938.scala index 59a95ac37f22..7a3093102a70 100644 --- a/test/files/run/t5938.scala +++ b/test/files/run/t5938.scala @@ -3,7 +3,7 @@ import scala.tools.partest.DirectTest object Test extends DirectTest { override def extraSettings: String = - s"-usejavacp -d ${testOutput.path}" + s"-usejavacp -d ${testOutput.path} -cp ${testOutput.path} -d ${testOutput.path}" override def code = """ object O extends C { diff --git a/test/files/run/t7805-repl-i.javaopts b/test/files/run/t7805-repl-i.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/run/t7805-repl-i.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t7817-tree-gen.scala b/test/files/run/t7817-tree-gen.scala index 094c0d62896a..0b6463be10e6 100644 --- a/test/files/run/t7817-tree-gen.scala +++ b/test/files/run/t7817-tree-gen.scala @@ -9,7 +9,7 @@ class DSep { object P } object Test extends CompilerTest { import global._ - override def extraSettings = super.extraSettings + " -d " + testOutput.path + override def extraSettings = s"${super.extraSettings} -d ${testOutput.path} -cp ${testOutput.path}" override def sources = List( """ package test { class C { object O } } diff --git a/test/files/run/t8046/Test.scala b/test/files/run/t8046/Test.scala index f6b525d1b5a1..952d3d7bcc27 100644 --- a/test/files/run/t8046/Test.scala +++ b/test/files/run/t8046/Test.scala @@ -2,7 +2,7 @@ import scala.tools.partest._ object Test extends DirectTest { override def code = "" - override def extraSettings: String = "-usejavacp" + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path}" override def show() { val c = newCompiler() diff --git a/test/files/run/t8266-octal-interp.javaopts b/test/files/run/t8266-octal-interp.javaopts new file mode 100644 index 000000000000..9740f07b079b --- /dev/null +++ b/test/files/run/t8266-octal-interp.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/run/t8433.scala b/test/files/run/t8433.scala index 79e18757b897..0e8043aa367f 100644 --- a/test/files/run/t8433.scala +++ b/test/files/run/t8433.scala @@ -42,5 +42,5 @@ object Test extends DirectTest { ScalaClassLoader(getClass.getClassLoader) run ("Main", Nil) } - override def extraSettings = s"-usejavacp -d ${testOutput.path}" + override def extraSettings = s"-usejavacp -d ${testOutput.path} -cp ${testOutput.path}" } From 6f21f2a4fef0f84ea890571165428df1f76937a9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 7 Mar 2018 22:47:05 +1000 Subject: [PATCH 1275/2793] Remove resource hungry tests Tests that take 30+ seconds to compile and/or execute really add up to a productivity loss for contributors. These tests served a purpose to show that the corresponding changes were correct. But if we want them to serve an ongoing purpose of guarding against regressions, they need to be maintained to do so more quicky or be moved into a test suite that is run less frequently. --- test/files/neg/patmatexhaust-huge.check | 7 - test/files/neg/patmatexhaust-huge.flags | 1 - test/files/neg/patmatexhaust-huge.scala | 806 ------------------ test/files/pos/t10387.flags | 1 - test/files/pos/t10387.scala | 269 ------ test/files/pos/t9181.flags | 1 - test/files/pos/t9181.scala | 806 ------------------ test/files/presentation/memory-leaks.check | 54 -- test/files/presentation/memory-leaks.javaopts | 1 - .../memory-leaks/MemoryLeaksTest.scala | 141 --- test/files/run/t6853.scala | 18 - test/files/run/t6969.check | 1 - test/files/run/t6969.scala | 32 - 13 files changed, 2138 deletions(-) delete mode 100644 test/files/neg/patmatexhaust-huge.check delete mode 100644 test/files/neg/patmatexhaust-huge.flags delete mode 100644 test/files/neg/patmatexhaust-huge.scala delete mode 100644 test/files/pos/t10387.flags delete mode 100644 test/files/pos/t10387.scala delete mode 100644 test/files/pos/t9181.flags delete mode 100644 test/files/pos/t9181.scala delete mode 100644 test/files/presentation/memory-leaks.check delete mode 100644 test/files/presentation/memory-leaks.javaopts delete mode 100644 test/files/presentation/memory-leaks/MemoryLeaksTest.scala delete mode 100644 test/files/run/t6853.scala delete mode 100644 test/files/run/t6969.check delete mode 100644 test/files/run/t6969.scala diff --git a/test/files/neg/patmatexhaust-huge.check b/test/files/neg/patmatexhaust-huge.check deleted file mode 100644 index 66dbd42ef3ea..000000000000 --- a/test/files/neg/patmatexhaust-huge.check +++ /dev/null @@ -1,7 +0,0 @@ -patmatexhaust-huge.scala:404: warning: match may not be exhaustive. -It would fail on the following inputs: C392, C397 - def f(c: C): Int = c match { - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found diff --git a/test/files/neg/patmatexhaust-huge.flags b/test/files/neg/patmatexhaust-huge.flags deleted file mode 100644 index 591a950f830d..000000000000 --- a/test/files/neg/patmatexhaust-huge.flags +++ /dev/null @@ -1 +0,0 @@ --Xfatal-warnings -unchecked -Ypatmat-exhaust-depth off \ No newline at end of file diff --git a/test/files/neg/patmatexhaust-huge.scala b/test/files/neg/patmatexhaust-huge.scala deleted file mode 100644 index 8f87655b7a23..000000000000 --- a/test/files/neg/patmatexhaust-huge.scala +++ /dev/null @@ -1,806 +0,0 @@ -sealed trait C -case object C1 extends C -case object C2 extends C -case object C3 extends C -case object C4 extends C -case object C5 extends C -case object C6 extends C -case object C7 extends C -case object C8 extends C -case object C9 extends C -case object C10 extends C -case object C11 extends C -case object C12 extends C -case object C13 extends C -case object C14 extends C -case object C15 extends C -case object C16 extends C -case object C17 extends C -case object C18 extends C -case object C19 extends C -case object C20 extends C -case object C21 extends C -case object C22 extends C -case object C23 extends C -case object C24 extends C -case object C25 extends C -case object C26 extends C -case object C27 extends C -case object C28 extends C -case object C29 extends C -case object C30 extends C -case object C31 extends C -case object C32 extends C -case object C33 extends C -case object C34 extends C -case object C35 extends C -case object C36 extends C -case object C37 extends C -case object C38 extends C -case object C39 extends C -case object C40 extends C -case object C41 extends C -case object C42 extends C -case object C43 extends C -case object C44 extends C -case object C45 extends C -case object C46 extends C -case object C47 extends C -case object C48 extends C -case object C49 extends C -case object C50 extends C -case object C51 extends C -case object C52 extends C -case object C53 extends C -case object C54 extends C -case object C55 extends C -case object C56 extends C -case object C57 extends C -case object C58 extends C -case object C59 extends C -case object C60 extends C -case object C61 extends C -case object C62 extends C -case object C63 extends C -case object C64 extends C -case object C65 extends C -case object C66 extends C -case object C67 extends C -case object C68 extends C -case object C69 extends C -case object C70 extends C -case object C71 extends C -case object C72 extends C -case object C73 extends C -case object C74 extends C -case object C75 extends C -case object C76 extends C -case object C77 extends C -case object C78 extends C -case object C79 extends C -case object C80 extends C -case object C81 extends C -case object C82 extends C -case object C83 extends C -case object C84 extends C -case object C85 extends C -case object C86 extends C -case object C87 extends C -case object C88 extends C -case object C89 extends C -case object C90 extends C -case object C91 extends C -case object C92 extends C -case object C93 extends C -case object C94 extends C -case object C95 extends C -case object C96 extends C -case object C97 extends C -case object C98 extends C -case object C99 extends C -case object C100 extends C -case object C101 extends C -case object C102 extends C -case object C103 extends C -case object C104 extends C -case object C105 extends C -case object C106 extends C -case object C107 extends C -case object C108 extends C -case object C109 extends C -case object C110 extends C -case object C111 extends C -case object C112 extends C -case object C113 extends C -case object C114 extends C -case object C115 extends C -case object C116 extends C -case object C117 extends C -case object C118 extends C -case object C119 extends C -case object C120 extends C -case object C121 extends C -case object C122 extends C -case object C123 extends C -case object C124 extends C -case object C125 extends C -case object C126 extends C -case object C127 extends C -case object C128 extends C -case object C129 extends C -case object C130 extends C -case object C131 extends C -case object C132 extends C -case object C133 extends C -case object C134 extends C -case object C135 extends C -case object C136 extends C -case object C137 extends C -case object C138 extends C -case object C139 extends C -case object C140 extends C -case object C141 extends C -case object C142 extends C -case object C143 extends C -case object C144 extends C -case object C145 extends C -case object C146 extends C -case object C147 extends C -case object C148 extends C -case object C149 extends C -case object C150 extends C -case object C151 extends C -case object C152 extends C -case object C153 extends C -case object C154 extends C -case object C155 extends C -case object C156 extends C -case object C157 extends C -case object C158 extends C -case object C159 extends C -case object C160 extends C -case object C161 extends C -case object C162 extends C -case object C163 extends C -case object C164 extends C -case object C165 extends C -case object C166 extends C -case object C167 extends C -case object C168 extends C -case object C169 extends C -case object C170 extends C -case object C171 extends C -case object C172 extends C -case object C173 extends C -case object C174 extends C -case object C175 extends C -case object C176 extends C -case object C177 extends C -case object C178 extends C -case object C179 extends C -case object C180 extends C -case object C181 extends C -case object C182 extends C -case object C183 extends C -case object C184 extends C -case object C185 extends C -case object C186 extends C -case object C187 extends C -case object C188 extends C -case object C189 extends C -case object C190 extends C -case object C191 extends C -case object C192 extends C -case object C193 extends C -case object C194 extends C -case object C195 extends C -case object C196 extends C -case object C197 extends C -case object C198 extends C -case object C199 extends C -case object C200 extends C -case object C201 extends C -case object C202 extends C -case object C203 extends C -case object C204 extends C -case object C205 extends C -case object C206 extends C -case object C207 extends C -case object C208 extends C -case object C209 extends C -case object C210 extends C -case object C211 extends C -case object C212 extends C -case object C213 extends C -case object C214 extends C -case object C215 extends C -case object C216 extends C -case object C217 extends C -case object C218 extends C -case object C219 extends C -case object C220 extends C -case object C221 extends C -case object C222 extends C -case object C223 extends C -case object C224 extends C -case object C225 extends C -case object C226 extends C -case object C227 extends C -case object C228 extends C -case object C229 extends C -case object C230 extends C -case object C231 extends C -case object C232 extends C -case object C233 extends C -case object C234 extends C -case object C235 extends C -case object C236 extends C -case object C237 extends C -case object C238 extends C -case object C239 extends C -case object C240 extends C -case object C241 extends C -case object C242 extends C -case object C243 extends C -case object C244 extends C -case object C245 extends C -case object C246 extends C -case object C247 extends C -case object C248 extends C -case object C249 extends C -case object C250 extends C -case object C251 extends C -case object C252 extends C -case object C253 extends C -case object C254 extends C -case object C255 extends C -case object C256 extends C -case object C257 extends C -case object C258 extends C -case object C259 extends C -case object C260 extends C -case object C261 extends C -case object C262 extends C -case object C263 extends C -case object C264 extends C -case object C265 extends C -case object C266 extends C -case object C267 extends C -case object C268 extends C -case object C269 extends C -case object C270 extends C -case object C271 extends C -case object C272 extends C -case object C273 extends C -case object C274 extends C -case object C275 extends C -case object C276 extends C -case object C277 extends C -case object C278 extends C -case object C279 extends C -case object C280 extends C -case object C281 extends C -case object C282 extends C -case object C283 extends C -case object C284 extends C -case object C285 extends C -case object C286 extends C -case object C287 extends C -case object C288 extends C -case object C289 extends C -case object C290 extends C -case object C291 extends C -case object C292 extends C -case object C293 extends C -case object C294 extends C -case object C295 extends C -case object C296 extends C -case object C297 extends C -case object C298 extends C -case object C299 extends C -case object C300 extends C -case object C301 extends C -case object C302 extends C -case object C303 extends C -case object C304 extends C -case object C305 extends C -case object C306 extends C -case object C307 extends C -case object C308 extends C -case object C309 extends C -case object C310 extends C -case object C311 extends C -case object C312 extends C -case object C313 extends C -case object C314 extends C -case object C315 extends C -case object C316 extends C -case object C317 extends C -case object C318 extends C -case object C319 extends C -case object C320 extends C -case object C321 extends C -case object C322 extends C -case object C323 extends C -case object C324 extends C -case object C325 extends C -case object C326 extends C -case object C327 extends C -case object C328 extends C -case object C329 extends C -case object C330 extends C -case object C331 extends C -case object C332 extends C -case object C333 extends C -case object C334 extends C -case object C335 extends C -case object C336 extends C -case object C337 extends C -case object C338 extends C -case object C339 extends C -case object C340 extends C -case object C341 extends C -case object C342 extends C -case object C343 extends C -case object C344 extends C -case object C345 extends C -case object C346 extends C -case object C347 extends C -case object C348 extends C -case object C349 extends C -case object C350 extends C -case object C351 extends C -case object C352 extends C -case object C353 extends C -case object C354 extends C -case object C355 extends C -case object C356 extends C -case object C357 extends C -case object C358 extends C -case object C359 extends C -case object C360 extends C -case object C361 extends C -case object C362 extends C -case object C363 extends C -case object C364 extends C -case object C365 extends C -case object C366 extends C -case object C367 extends C -case object C368 extends C -case object C369 extends C -case object C370 extends C -case object C371 extends C -case object C372 extends C -case object C373 extends C -case object C374 extends C -case object C375 extends C -case object C376 extends C -case object C377 extends C -case object C378 extends C -case object C379 extends C -case object C380 extends C -case object C381 extends C -case object C382 extends C -case object C383 extends C -case object C384 extends C -case object C385 extends C -case object C386 extends C -case object C387 extends C -case object C388 extends C -case object C389 extends C -case object C390 extends C -case object C391 extends C -case object C392 extends C -case object C393 extends C -case object C394 extends C -case object C395 extends C -case object C396 extends C -case object C397 extends C -case object C398 extends C -case object C399 extends C -case object C400 extends C - -object M { - def f(c: C): Int = c match { - case C1 => 1 - case C2 => 2 - case C3 => 3 - case C4 => 4 - case C5 => 5 - case C6 => 6 - case C7 => 7 - case C8 => 8 - case C9 => 9 - case C10 => 10 - case C11 => 11 - case C12 => 12 - case C13 => 13 - case C14 => 14 - case C15 => 15 - case C16 => 16 - case C17 => 17 - case C18 => 18 - case C19 => 19 - case C20 => 20 - case C21 => 21 - case C22 => 22 - case C23 => 23 - case C24 => 24 - case C25 => 25 - case C26 => 26 - case C27 => 27 - case C28 => 28 - case C29 => 29 - case C30 => 30 - case C31 => 31 - case C32 => 32 - case C33 => 33 - case C34 => 34 - case C35 => 35 - case C36 => 36 - case C37 => 37 - case C38 => 38 - case C39 => 39 - case C40 => 40 - case C41 => 41 - case C42 => 42 - case C43 => 43 - case C44 => 44 - case C45 => 45 - case C46 => 46 - case C47 => 47 - case C48 => 48 - case C49 => 49 - case C50 => 50 - case C51 => 51 - case C52 => 52 - case C53 => 53 - case C54 => 54 - case C55 => 55 - case C56 => 56 - case C57 => 57 - case C58 => 58 - case C59 => 59 - case C60 => 60 - case C61 => 61 - case C62 => 62 - case C63 => 63 - case C64 => 64 - case C65 => 65 - case C66 => 66 - case C67 => 67 - case C68 => 68 - case C69 => 69 - case C70 => 70 - case C71 => 71 - case C72 => 72 - case C73 => 73 - case C74 => 74 - case C75 => 75 - case C76 => 76 - case C77 => 77 - case C78 => 78 - case C79 => 79 - case C80 => 80 - case C81 => 81 - case C82 => 82 - case C83 => 83 - case C84 => 84 - case C85 => 85 - case C86 => 86 - case C87 => 87 - case C88 => 88 - case C89 => 89 - case C90 => 90 - case C91 => 91 - case C92 => 92 - case C93 => 93 - case C94 => 94 - case C95 => 95 - case C96 => 96 - case C97 => 97 - case C98 => 98 - case C99 => 99 - case C100 => 100 - case C101 => 101 - case C102 => 102 - case C103 => 103 - case C104 => 104 - case C105 => 105 - case C106 => 106 - case C107 => 107 - case C108 => 108 - case C109 => 109 - case C110 => 110 - case C111 => 111 - case C112 => 112 - case C113 => 113 - case C114 => 114 - case C115 => 115 - case C116 => 116 - case C117 => 117 - case C118 => 118 - case C119 => 119 - case C120 => 120 - case C121 => 121 - case C122 => 122 - case C123 => 123 - case C124 => 124 - case C125 => 125 - case C126 => 126 - case C127 => 127 - case C128 => 128 - case C129 => 129 - case C130 => 130 - case C131 => 131 - case C132 => 132 - case C133 => 133 - case C134 => 134 - case C135 => 135 - case C136 => 136 - case C137 => 137 - case C138 => 138 - case C139 => 139 - case C140 => 140 - case C141 => 141 - case C142 => 142 - case C143 => 143 - case C144 => 144 - case C145 => 145 - case C146 => 146 - case C147 => 147 - case C148 => 148 - case C149 => 149 - case C150 => 150 - case C151 => 151 - case C152 => 152 - case C153 => 153 - case C154 => 154 - case C155 => 155 - case C156 => 156 - case C157 => 157 - case C158 => 158 - case C159 => 159 - case C160 => 160 - case C161 => 161 - case C162 => 162 - case C163 => 163 - case C164 => 164 - case C165 => 165 - case C166 => 166 - case C167 => 167 - case C168 => 168 - case C169 => 169 - case C170 => 170 - case C171 => 171 - case C172 => 172 - case C173 => 173 - case C174 => 174 - case C175 => 175 - case C176 => 176 - case C177 => 177 - case C178 => 178 - case C179 => 179 - case C180 => 180 - case C181 => 181 - case C182 => 182 - case C183 => 183 - case C184 => 184 - case C185 => 185 - case C186 => 186 - case C187 => 187 - case C188 => 188 - case C189 => 189 - case C190 => 190 - case C191 => 191 - case C192 => 192 - case C193 => 193 - case C194 => 194 - case C195 => 195 - case C196 => 196 - case C197 => 197 - case C198 => 198 - case C199 => 199 - case C200 => 200 - case C201 => 201 - case C202 => 202 - case C203 => 203 - case C204 => 204 - case C205 => 205 - case C206 => 206 - case C207 => 207 - case C208 => 208 - case C209 => 209 - case C210 => 210 - case C211 => 211 - case C212 => 212 - case C213 => 213 - case C214 => 214 - case C215 => 215 - case C216 => 216 - case C217 => 217 - case C218 => 218 - case C219 => 219 - case C220 => 220 - case C221 => 221 - case C222 => 222 - case C223 => 223 - case C224 => 224 - case C225 => 225 - case C226 => 226 - case C227 => 227 - case C228 => 228 - case C229 => 229 - case C230 => 230 - case C231 => 231 - case C232 => 232 - case C233 => 233 - case C234 => 234 - case C235 => 235 - case C236 => 236 - case C237 => 237 - case C238 => 238 - case C239 => 239 - case C240 => 240 - case C241 => 241 - case C242 => 242 - case C243 => 243 - case C244 => 244 - case C245 => 245 - case C246 => 246 - case C247 => 247 - case C248 => 248 - case C249 => 249 - case C250 => 250 - case C251 => 251 - case C252 => 252 - case C253 => 253 - case C254 => 254 - case C255 => 255 - case C256 => 256 - case C257 => 257 - case C258 => 258 - case C259 => 259 - case C260 => 260 - case C261 => 261 - case C262 => 262 - case C263 => 263 - case C264 => 264 - case C265 => 265 - case C266 => 266 - case C267 => 267 - case C268 => 268 - case C269 => 269 - case C270 => 270 - case C271 => 271 - case C272 => 272 - case C273 => 273 - case C274 => 274 - case C275 => 275 - case C276 => 276 - case C277 => 277 - case C278 => 278 - case C279 => 279 - case C280 => 280 - case C281 => 281 - case C282 => 282 - case C283 => 283 - case C284 => 284 - case C285 => 285 - case C286 => 286 - case C287 => 287 - case C288 => 288 - case C289 => 289 - case C290 => 290 - case C291 => 291 - case C292 => 292 - case C293 => 293 - case C294 => 294 - case C295 => 295 - case C296 => 296 - case C297 => 297 - case C298 => 298 - case C299 => 299 - case C300 => 300 - case C301 => 301 - case C302 => 302 - case C303 => 303 - case C304 => 304 - case C305 => 305 - case C306 => 306 - case C307 => 307 - case C308 => 308 - case C309 => 309 - case C310 => 310 - case C311 => 311 - case C312 => 312 - case C313 => 313 - case C314 => 314 - case C315 => 315 - case C316 => 316 - case C317 => 317 - case C318 => 318 - case C319 => 319 - case C320 => 320 - case C321 => 321 - case C322 => 322 - case C323 => 323 - case C324 => 324 - case C325 => 325 - case C326 => 326 - case C327 => 327 - case C328 => 328 - case C329 => 329 - case C330 => 330 - case C331 => 331 - case C332 => 332 - case C333 => 333 - case C334 => 334 - case C335 => 335 - case C336 => 336 - case C337 => 337 - case C338 => 338 - case C339 => 339 - case C340 => 340 - case C341 => 341 - case C342 => 342 - case C343 => 343 - case C344 => 344 - case C345 => 345 - case C346 => 346 - case C347 => 347 - case C348 => 348 - case C349 => 349 - case C350 => 350 - case C351 => 351 - case C352 => 352 - case C353 => 353 - case C354 => 354 - case C355 => 355 - case C356 => 356 - case C357 => 357 - case C358 => 358 - case C359 => 359 - case C360 => 360 - case C361 => 361 - case C362 => 362 - case C363 => 363 - case C364 => 364 - case C365 => 365 - case C366 => 366 - case C367 => 367 - case C368 => 368 - case C369 => 369 - case C370 => 370 - case C371 => 371 - case C372 => 372 - case C373 => 373 - case C374 => 374 - case C375 => 375 - case C376 => 376 - case C377 => 377 - case C378 => 378 - case C379 => 379 - case C380 => 380 - case C381 => 381 - case C382 => 382 - case C383 => 383 - case C384 => 384 - case C385 => 385 - case C386 => 386 - case C387 => 387 - case C388 => 388 - case C389 => 389 - case C390 => 390 - case C391 => 391 -// case C392 => 392 - case C393 => 393 - case C394 => 394 - case C395 => 395 - case C396 => 396 -// case C397 => 397 - case C398 => 398 - case C399 => 399 - case C400 => 400 - } -} diff --git a/test/files/pos/t10387.flags b/test/files/pos/t10387.flags deleted file mode 100644 index 2ae3d24b9ccc..000000000000 --- a/test/files/pos/t10387.flags +++ /dev/null @@ -1 +0,0 @@ --Ystop-after:patmat diff --git a/test/files/pos/t10387.scala b/test/files/pos/t10387.scala deleted file mode 100644 index 0268a14c889b..000000000000 --- a/test/files/pos/t10387.scala +++ /dev/null @@ -1,269 +0,0 @@ -object foo { - abstract sealed class num - final case class One() extends num - final case class Bit0(a: num) extends num - final case class Bit1(a: num) extends num - - abstract sealed class char - final case class zero_char() extends char - final case class Char(a: num) extends char - - def integer_of_char(x0: char): BigInt = x0 match { - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(255) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(254) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(253) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(252) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(251) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(250) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(249) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))))) => BigInt(248) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(247) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(246) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(245) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(244) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(243) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(242) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(241) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))))) => BigInt(240) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(239) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(238) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(237) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(236) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(235) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(234) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(233) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))))) => BigInt(232) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(231) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(230) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(229) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(228) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(227) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(226) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(225) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))))) => BigInt(224) - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(223) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(222) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(221) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(220) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(219) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(218) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(217) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))))) => BigInt(216) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(215) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(214) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(213) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(212) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(211) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(210) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(209) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))))) => BigInt(208) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(207) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(206) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(205) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(204) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(203) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(202) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(201) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))))) => BigInt(200) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(199) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(198) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(197) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(196) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(195) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(194) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(193) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))))) => BigInt(192) - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(191) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(190) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(189) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(188) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(187) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(186) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(185) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))))) => BigInt(184) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(183) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(182) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(181) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(180) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(179) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(178) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(177) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))))) => BigInt(176) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(175) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(174) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(173) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(172) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(171) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(170) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(169) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))))) => BigInt(168) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(167) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(166) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(165) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(164) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(163) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(162) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(161) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))))) => BigInt(160) - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(159) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(158) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(157) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(156) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(155) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(154) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(153) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))))) => BigInt(152) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(151) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(150) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(149) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(148) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(147) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(146) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(145) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))))) => BigInt(144) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(143) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(142) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(141) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(140) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(139) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(138) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(137) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))))) => BigInt(136) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(135) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(134) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(133) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(132) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(131) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(130) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(129) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))))) => BigInt(128) - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(127) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(126) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(125) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit1(One()))))))) => BigInt(124) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(123) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(122) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(121) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit1(One()))))))) => BigInt(120) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(119) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(118) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(117) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit1(One()))))))) => BigInt(116) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(115) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(114) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(113) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit1(One()))))))) => BigInt(112) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(111) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(110) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(109) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit1(One()))))))) => BigInt(108) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(107) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(106) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(105) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit1(One()))))))) => BigInt(104) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(103) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(102) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(101) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit1(One()))))))) => BigInt(100) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(99) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(98) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(97) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit1(One()))))))) => BigInt(96) - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(95) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(94) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(93) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(Bit0(One()))))))) => BigInt(92) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(91) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(90) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(89) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(Bit0(One()))))))) => BigInt(88) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(87) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(86) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(85) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(Bit0(One()))))))) => BigInt(84) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(83) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(82) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(81) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(Bit0(One()))))))) => BigInt(80) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(79) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(78) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(77) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(Bit0(One()))))))) => BigInt(76) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(75) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(74) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(73) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(Bit0(One()))))))) => BigInt(72) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(71) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(70) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(69) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(Bit0(One()))))))) => BigInt(68) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(67) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(66) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(65) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(Bit0(One()))))))) => BigInt(64) - case Char(Bit1(Bit1(Bit1(Bit1(Bit1(One())))))) => BigInt(63) - case Char(Bit0(Bit1(Bit1(Bit1(Bit1(One())))))) => BigInt(62) - case Char(Bit1(Bit0(Bit1(Bit1(Bit1(One())))))) => BigInt(61) - case Char(Bit0(Bit0(Bit1(Bit1(Bit1(One())))))) => BigInt(60) - case Char(Bit1(Bit1(Bit0(Bit1(Bit1(One())))))) => BigInt(59) - case Char(Bit0(Bit1(Bit0(Bit1(Bit1(One())))))) => BigInt(58) - case Char(Bit1(Bit0(Bit0(Bit1(Bit1(One())))))) => BigInt(57) - case Char(Bit0(Bit0(Bit0(Bit1(Bit1(One())))))) => BigInt(56) - case Char(Bit1(Bit1(Bit1(Bit0(Bit1(One())))))) => BigInt(55) - case Char(Bit0(Bit1(Bit1(Bit0(Bit1(One())))))) => BigInt(54) - case Char(Bit1(Bit0(Bit1(Bit0(Bit1(One())))))) => BigInt(53) - case Char(Bit0(Bit0(Bit1(Bit0(Bit1(One())))))) => BigInt(52) - case Char(Bit1(Bit1(Bit0(Bit0(Bit1(One())))))) => BigInt(51) - case Char(Bit0(Bit1(Bit0(Bit0(Bit1(One())))))) => BigInt(50) - case Char(Bit1(Bit0(Bit0(Bit0(Bit1(One())))))) => BigInt(49) - case Char(Bit0(Bit0(Bit0(Bit0(Bit1(One())))))) => BigInt(48) - case Char(Bit1(Bit1(Bit1(Bit1(Bit0(One())))))) => BigInt(47) - case Char(Bit0(Bit1(Bit1(Bit1(Bit0(One())))))) => BigInt(46) - case Char(Bit1(Bit0(Bit1(Bit1(Bit0(One())))))) => BigInt(45) - case Char(Bit0(Bit0(Bit1(Bit1(Bit0(One())))))) => BigInt(44) - case Char(Bit1(Bit1(Bit0(Bit1(Bit0(One())))))) => BigInt(43) - case Char(Bit0(Bit1(Bit0(Bit1(Bit0(One())))))) => BigInt(42) - case Char(Bit1(Bit0(Bit0(Bit1(Bit0(One())))))) => BigInt(41) - case Char(Bit0(Bit0(Bit0(Bit1(Bit0(One())))))) => BigInt(40) - case Char(Bit1(Bit1(Bit1(Bit0(Bit0(One())))))) => BigInt(39) - case Char(Bit0(Bit1(Bit1(Bit0(Bit0(One())))))) => BigInt(38) - case Char(Bit1(Bit0(Bit1(Bit0(Bit0(One())))))) => BigInt(37) - case Char(Bit0(Bit0(Bit1(Bit0(Bit0(One())))))) => BigInt(36) - case Char(Bit1(Bit1(Bit0(Bit0(Bit0(One())))))) => BigInt(35) - case Char(Bit0(Bit1(Bit0(Bit0(Bit0(One())))))) => BigInt(34) - case Char(Bit1(Bit0(Bit0(Bit0(Bit0(One())))))) => BigInt(33) - case Char(Bit0(Bit0(Bit0(Bit0(Bit0(One())))))) => BigInt(32) - case Char(Bit1(Bit1(Bit1(Bit1(One()))))) => BigInt(31) - case Char(Bit0(Bit1(Bit1(Bit1(One()))))) => BigInt(30) - case Char(Bit1(Bit0(Bit1(Bit1(One()))))) => BigInt(29) - case Char(Bit0(Bit0(Bit1(Bit1(One()))))) => BigInt(28) - case Char(Bit1(Bit1(Bit0(Bit1(One()))))) => BigInt(27) - case Char(Bit0(Bit1(Bit0(Bit1(One()))))) => BigInt(26) - case Char(Bit1(Bit0(Bit0(Bit1(One()))))) => BigInt(25) - case Char(Bit0(Bit0(Bit0(Bit1(One()))))) => BigInt(24) - case Char(Bit1(Bit1(Bit1(Bit0(One()))))) => BigInt(23) - case Char(Bit0(Bit1(Bit1(Bit0(One()))))) => BigInt(22) - case Char(Bit1(Bit0(Bit1(Bit0(One()))))) => BigInt(21) - case Char(Bit0(Bit0(Bit1(Bit0(One()))))) => BigInt(20) - case Char(Bit1(Bit1(Bit0(Bit0(One()))))) => BigInt(19) - case Char(Bit0(Bit1(Bit0(Bit0(One()))))) => BigInt(18) - case Char(Bit1(Bit0(Bit0(Bit0(One()))))) => BigInt(17) - case Char(Bit0(Bit0(Bit0(Bit0(One()))))) => BigInt(16) - case Char(Bit1(Bit1(Bit1(One())))) => BigInt(15) - case Char(Bit0(Bit1(Bit1(One())))) => BigInt(14) - case Char(Bit1(Bit0(Bit1(One())))) => BigInt(13) - case Char(Bit0(Bit0(Bit1(One())))) => BigInt(12) - case Char(Bit1(Bit1(Bit0(One())))) => BigInt(11) - case Char(Bit0(Bit1(Bit0(One())))) => BigInt(10) - case Char(Bit1(Bit0(Bit0(One())))) => BigInt(9) - case Char(Bit0(Bit0(Bit0(One())))) => BigInt(8) - case Char(Bit1(Bit1(One()))) => BigInt(7) - case Char(Bit0(Bit1(One()))) => BigInt(6) - case Char(Bit1(Bit0(One()))) => BigInt(5) - case Char(Bit0(Bit0(One()))) => BigInt(4) - case Char(Bit1(One())) => BigInt(3) - case Char(Bit0(One())) => BigInt(2) - case Char(One()) => BigInt(1) - case zero_char() => BigInt(0) - } -} diff --git a/test/files/pos/t9181.flags b/test/files/pos/t9181.flags deleted file mode 100644 index 0f96f1f872a4..000000000000 --- a/test/files/pos/t9181.flags +++ /dev/null @@ -1 +0,0 @@ --nowarn \ No newline at end of file diff --git a/test/files/pos/t9181.scala b/test/files/pos/t9181.scala deleted file mode 100644 index 2edf6fe4a3c9..000000000000 --- a/test/files/pos/t9181.scala +++ /dev/null @@ -1,806 +0,0 @@ -sealed trait C -case object C1 extends C -case object C2 extends C -case object C3 extends C -case object C4 extends C -case object C5 extends C -case object C6 extends C -case object C7 extends C -case object C8 extends C -case object C9 extends C -case object C10 extends C -case object C11 extends C -case object C12 extends C -case object C13 extends C -case object C14 extends C -case object C15 extends C -case object C16 extends C -case object C17 extends C -case object C18 extends C -case object C19 extends C -case object C20 extends C -case object C21 extends C -case object C22 extends C -case object C23 extends C -case object C24 extends C -case object C25 extends C -case object C26 extends C -case object C27 extends C -case object C28 extends C -case object C29 extends C -case object C30 extends C -case object C31 extends C -case object C32 extends C -case object C33 extends C -case object C34 extends C -case object C35 extends C -case object C36 extends C -case object C37 extends C -case object C38 extends C -case object C39 extends C -case object C40 extends C -case object C41 extends C -case object C42 extends C -case object C43 extends C -case object C44 extends C -case object C45 extends C -case object C46 extends C -case object C47 extends C -case object C48 extends C -case object C49 extends C -case object C50 extends C -case object C51 extends C -case object C52 extends C -case object C53 extends C -case object C54 extends C -case object C55 extends C -case object C56 extends C -case object C57 extends C -case object C58 extends C -case object C59 extends C -case object C60 extends C -case object C61 extends C -case object C62 extends C -case object C63 extends C -case object C64 extends C -case object C65 extends C -case object C66 extends C -case object C67 extends C -case object C68 extends C -case object C69 extends C -case object C70 extends C -case object C71 extends C -case object C72 extends C -case object C73 extends C -case object C74 extends C -case object C75 extends C -case object C76 extends C -case object C77 extends C -case object C78 extends C -case object C79 extends C -case object C80 extends C -case object C81 extends C -case object C82 extends C -case object C83 extends C -case object C84 extends C -case object C85 extends C -case object C86 extends C -case object C87 extends C -case object C88 extends C -case object C89 extends C -case object C90 extends C -case object C91 extends C -case object C92 extends C -case object C93 extends C -case object C94 extends C -case object C95 extends C -case object C96 extends C -case object C97 extends C -case object C98 extends C -case object C99 extends C -case object C100 extends C -case object C101 extends C -case object C102 extends C -case object C103 extends C -case object C104 extends C -case object C105 extends C -case object C106 extends C -case object C107 extends C -case object C108 extends C -case object C109 extends C -case object C110 extends C -case object C111 extends C -case object C112 extends C -case object C113 extends C -case object C114 extends C -case object C115 extends C -case object C116 extends C -case object C117 extends C -case object C118 extends C -case object C119 extends C -case object C120 extends C -case object C121 extends C -case object C122 extends C -case object C123 extends C -case object C124 extends C -case object C125 extends C -case object C126 extends C -case object C127 extends C -case object C128 extends C -case object C129 extends C -case object C130 extends C -case object C131 extends C -case object C132 extends C -case object C133 extends C -case object C134 extends C -case object C135 extends C -case object C136 extends C -case object C137 extends C -case object C138 extends C -case object C139 extends C -case object C140 extends C -case object C141 extends C -case object C142 extends C -case object C143 extends C -case object C144 extends C -case object C145 extends C -case object C146 extends C -case object C147 extends C -case object C148 extends C -case object C149 extends C -case object C150 extends C -case object C151 extends C -case object C152 extends C -case object C153 extends C -case object C154 extends C -case object C155 extends C -case object C156 extends C -case object C157 extends C -case object C158 extends C -case object C159 extends C -case object C160 extends C -case object C161 extends C -case object C162 extends C -case object C163 extends C -case object C164 extends C -case object C165 extends C -case object C166 extends C -case object C167 extends C -case object C168 extends C -case object C169 extends C -case object C170 extends C -case object C171 extends C -case object C172 extends C -case object C173 extends C -case object C174 extends C -case object C175 extends C -case object C176 extends C -case object C177 extends C -case object C178 extends C -case object C179 extends C -case object C180 extends C -case object C181 extends C -case object C182 extends C -case object C183 extends C -case object C184 extends C -case object C185 extends C -case object C186 extends C -case object C187 extends C -case object C188 extends C -case object C189 extends C -case object C190 extends C -case object C191 extends C -case object C192 extends C -case object C193 extends C -case object C194 extends C -case object C195 extends C -case object C196 extends C -case object C197 extends C -case object C198 extends C -case object C199 extends C -case object C200 extends C -case object C201 extends C -case object C202 extends C -case object C203 extends C -case object C204 extends C -case object C205 extends C -case object C206 extends C -case object C207 extends C -case object C208 extends C -case object C209 extends C -case object C210 extends C -case object C211 extends C -case object C212 extends C -case object C213 extends C -case object C214 extends C -case object C215 extends C -case object C216 extends C -case object C217 extends C -case object C218 extends C -case object C219 extends C -case object C220 extends C -case object C221 extends C -case object C222 extends C -case object C223 extends C -case object C224 extends C -case object C225 extends C -case object C226 extends C -case object C227 extends C -case object C228 extends C -case object C229 extends C -case object C230 extends C -case object C231 extends C -case object C232 extends C -case object C233 extends C -case object C234 extends C -case object C235 extends C -case object C236 extends C -case object C237 extends C -case object C238 extends C -case object C239 extends C -case object C240 extends C -case object C241 extends C -case object C242 extends C -case object C243 extends C -case object C244 extends C -case object C245 extends C -case object C246 extends C -case object C247 extends C -case object C248 extends C -case object C249 extends C -case object C250 extends C -case object C251 extends C -case object C252 extends C -case object C253 extends C -case object C254 extends C -case object C255 extends C -case object C256 extends C -case object C257 extends C -case object C258 extends C -case object C259 extends C -case object C260 extends C -case object C261 extends C -case object C262 extends C -case object C263 extends C -case object C264 extends C -case object C265 extends C -case object C266 extends C -case object C267 extends C -case object C268 extends C -case object C269 extends C -case object C270 extends C -case object C271 extends C -case object C272 extends C -case object C273 extends C -case object C274 extends C -case object C275 extends C -case object C276 extends C -case object C277 extends C -case object C278 extends C -case object C279 extends C -case object C280 extends C -case object C281 extends C -case object C282 extends C -case object C283 extends C -case object C284 extends C -case object C285 extends C -case object C286 extends C -case object C287 extends C -case object C288 extends C -case object C289 extends C -case object C290 extends C -case object C291 extends C -case object C292 extends C -case object C293 extends C -case object C294 extends C -case object C295 extends C -case object C296 extends C -case object C297 extends C -case object C298 extends C -case object C299 extends C -case object C300 extends C -case object C301 extends C -case object C302 extends C -case object C303 extends C -case object C304 extends C -case object C305 extends C -case object C306 extends C -case object C307 extends C -case object C308 extends C -case object C309 extends C -case object C310 extends C -case object C311 extends C -case object C312 extends C -case object C313 extends C -case object C314 extends C -case object C315 extends C -case object C316 extends C -case object C317 extends C -case object C318 extends C -case object C319 extends C -case object C320 extends C -case object C321 extends C -case object C322 extends C -case object C323 extends C -case object C324 extends C -case object C325 extends C -case object C326 extends C -case object C327 extends C -case object C328 extends C -case object C329 extends C -case object C330 extends C -case object C331 extends C -case object C332 extends C -case object C333 extends C -case object C334 extends C -case object C335 extends C -case object C336 extends C -case object C337 extends C -case object C338 extends C -case object C339 extends C -case object C340 extends C -case object C341 extends C -case object C342 extends C -case object C343 extends C -case object C344 extends C -case object C345 extends C -case object C346 extends C -case object C347 extends C -case object C348 extends C -case object C349 extends C -case object C350 extends C -case object C351 extends C -case object C352 extends C -case object C353 extends C -case object C354 extends C -case object C355 extends C -case object C356 extends C -case object C357 extends C -case object C358 extends C -case object C359 extends C -case object C360 extends C -case object C361 extends C -case object C362 extends C -case object C363 extends C -case object C364 extends C -case object C365 extends C -case object C366 extends C -case object C367 extends C -case object C368 extends C -case object C369 extends C -case object C370 extends C -case object C371 extends C -case object C372 extends C -case object C373 extends C -case object C374 extends C -case object C375 extends C -case object C376 extends C -case object C377 extends C -case object C378 extends C -case object C379 extends C -case object C380 extends C -case object C381 extends C -case object C382 extends C -case object C383 extends C -case object C384 extends C -case object C385 extends C -case object C386 extends C -case object C387 extends C -case object C388 extends C -case object C389 extends C -case object C390 extends C -case object C391 extends C -case object C392 extends C -case object C393 extends C -case object C394 extends C -case object C395 extends C -case object C396 extends C -case object C397 extends C -case object C398 extends C -case object C399 extends C -case object C400 extends C - -object M { - def f(c: C): Int = c match { - case C1 => 1 - case C2 => 2 - case C3 => 3 - case C4 => 4 - case C5 => 5 - case C6 => 6 - case C7 => 7 - case C8 => 8 - case C9 => 9 - case C10 => 10 - case C11 => 11 - case C12 => 12 - case C13 => 13 - case C14 => 14 - case C15 => 15 - case C16 => 16 - case C17 => 17 - case C18 => 18 - case C19 => 19 - case C20 => 20 - case C21 => 21 - case C22 => 22 - case C23 => 23 - case C24 => 24 - case C25 => 25 - case C26 => 26 - case C27 => 27 - case C28 => 28 - case C29 => 29 - case C30 => 30 - case C31 => 31 - case C32 => 32 - case C33 => 33 - case C34 => 34 - case C35 => 35 - case C36 => 36 - case C37 => 37 - case C38 => 38 - case C39 => 39 - case C40 => 40 - case C41 => 41 - case C42 => 42 - case C43 => 43 - case C44 => 44 - case C45 => 45 - case C46 => 46 - case C47 => 47 - case C48 => 48 - case C49 => 49 - case C50 => 50 - case C51 => 51 - case C52 => 52 - case C53 => 53 - case C54 => 54 - case C55 => 55 - case C56 => 56 - case C57 => 57 - case C58 => 58 - case C59 => 59 - case C60 => 60 - case C61 => 61 - case C62 => 62 - case C63 => 63 - case C64 => 64 - case C65 => 65 - case C66 => 66 - case C67 => 67 - case C68 => 68 - case C69 => 69 - case C70 => 70 - case C71 => 71 - case C72 => 72 - case C73 => 73 - case C74 => 74 - case C75 => 75 - case C76 => 76 - case C77 => 77 - case C78 => 78 - case C79 => 79 - case C80 => 80 - case C81 => 81 - case C82 => 82 - case C83 => 83 - case C84 => 84 - case C85 => 85 - case C86 => 86 - case C87 => 87 - case C88 => 88 - case C89 => 89 - case C90 => 90 - case C91 => 91 - case C92 => 92 - case C93 => 93 - case C94 => 94 - case C95 => 95 - case C96 => 96 - case C97 => 97 - case C98 => 98 - case C99 => 99 - case C100 => 100 - case C101 => 101 - case C102 => 102 - case C103 => 103 - case C104 => 104 - case C105 => 105 - case C106 => 106 - case C107 => 107 - case C108 => 108 - case C109 => 109 - case C110 => 110 - case C111 => 111 - case C112 => 112 - case C113 => 113 - case C114 => 114 - case C115 => 115 - case C116 => 116 - case C117 => 117 - case C118 => 118 - case C119 => 119 - case C120 => 120 - case C121 => 121 - case C122 => 122 - case C123 => 123 - case C124 => 124 - case C125 => 125 - case C126 => 126 - case C127 => 127 - case C128 => 128 - case C129 => 129 - case C130 => 130 - case C131 => 131 - case C132 => 132 - case C133 => 133 - case C134 => 134 - case C135 => 135 - case C136 => 136 - case C137 => 137 - case C138 => 138 - case C139 => 139 - case C140 => 140 - case C141 => 141 - case C142 => 142 - case C143 => 143 - case C144 => 144 - case C145 => 145 - case C146 => 146 - case C147 => 147 - case C148 => 148 - case C149 => 149 - case C150 => 150 - case C151 => 151 - case C152 => 152 - case C153 => 153 - case C154 => 154 - case C155 => 155 - case C156 => 156 - case C157 => 157 - case C158 => 158 - case C159 => 159 - case C160 => 160 - case C161 => 161 - case C162 => 162 - case C163 => 163 - case C164 => 164 - case C165 => 165 - case C166 => 166 - case C167 => 167 - case C168 => 168 - case C169 => 169 - case C170 => 170 - case C171 => 171 - case C172 => 172 - case C173 => 173 - case C174 => 174 - case C175 => 175 - case C176 => 176 - case C177 => 177 - case C178 => 178 - case C179 => 179 - case C180 => 180 - case C181 => 181 - case C182 => 182 - case C183 => 183 - case C184 => 184 - case C185 => 185 - case C186 => 186 - case C187 => 187 - case C188 => 188 - case C189 => 189 - case C190 => 190 - case C191 => 191 - case C192 => 192 - case C193 => 193 - case C194 => 194 - case C195 => 195 - case C196 => 196 - case C197 => 197 - case C198 => 198 - case C199 => 199 - case C200 => 200 - case C201 => 201 - case C202 => 202 - case C203 => 203 - case C204 => 204 - case C205 => 205 - case C206 => 206 - case C207 => 207 - case C208 => 208 - case C209 => 209 - case C210 => 210 - case C211 => 211 - case C212 => 212 - case C213 => 213 - case C214 => 214 - case C215 => 215 - case C216 => 216 - case C217 => 217 - case C218 => 218 - case C219 => 219 - case C220 => 220 - case C221 => 221 - case C222 => 222 - case C223 => 223 - case C224 => 224 - case C225 => 225 - case C226 => 226 - case C227 => 227 - case C228 => 228 - case C229 => 229 - case C230 => 230 - case C231 => 231 - case C232 => 232 - case C233 => 233 - case C234 => 234 - case C235 => 235 - case C236 => 236 - case C237 => 237 - case C238 => 238 - case C239 => 239 - case C240 => 240 - case C241 => 241 - case C242 => 242 - case C243 => 243 - case C244 => 244 - case C245 => 245 - case C246 => 246 - case C247 => 247 - case C248 => 248 - case C249 => 249 - case C250 => 250 - case C251 => 251 - case C252 => 252 - case C253 => 253 - case C254 => 254 - case C255 => 255 - case C256 => 256 - case C257 => 257 - case C258 => 258 - case C259 => 259 - case C260 => 260 - case C261 => 261 - case C262 => 262 - case C263 => 263 - case C264 => 264 - case C265 => 265 - case C266 => 266 - case C267 => 267 - case C268 => 268 - case C269 => 269 - case C270 => 270 - case C271 => 271 - case C272 => 272 - case C273 => 273 - case C274 => 274 - case C275 => 275 - case C276 => 276 - case C277 => 277 - case C278 => 278 - case C279 => 279 - case C280 => 280 - case C281 => 281 - case C282 => 282 - case C283 => 283 - case C284 => 284 - case C285 => 285 - case C286 => 286 - case C287 => 287 - case C288 => 288 - case C289 => 289 - case C290 => 290 - case C291 => 291 - case C292 => 292 - case C293 => 293 - case C294 => 294 - case C295 => 295 - case C296 => 296 - case C297 => 297 - case C298 => 298 - case C299 => 299 - case C300 => 300 - case C301 => 301 - case C302 => 302 - case C303 => 303 - case C304 => 304 - case C305 => 305 - case C306 => 306 - case C307 => 307 - case C308 => 308 - case C309 => 309 - case C310 => 310 - case C311 => 311 - case C312 => 312 - case C313 => 313 - case C314 => 314 - case C315 => 315 - case C316 => 316 - case C317 => 317 - case C318 => 318 - case C319 => 319 - case C320 => 320 - case C321 => 321 - case C322 => 322 - case C323 => 323 - case C324 => 324 - case C325 => 325 - case C326 => 326 - case C327 => 327 - case C328 => 328 - case C329 => 329 - case C330 => 330 - case C331 => 331 - case C332 => 332 - case C333 => 333 - case C334 => 334 - case C335 => 335 - case C336 => 336 - case C337 => 337 - case C338 => 338 - case C339 => 339 - case C340 => 340 - case C341 => 341 - case C342 => 342 - case C343 => 343 - case C344 => 344 - case C345 => 345 - case C346 => 346 - case C347 => 347 - case C348 => 348 - case C349 => 349 - case C350 => 350 - case C351 => 351 - case C352 => 352 - case C353 => 353 - case C354 => 354 - case C355 => 355 - case C356 => 356 - case C357 => 357 - case C358 => 358 - case C359 => 359 - case C360 => 360 - case C361 => 361 - case C362 => 362 - case C363 => 363 - case C364 => 364 - case C365 => 365 - case C366 => 366 - case C367 => 367 - case C368 => 368 - case C369 => 369 - case C370 => 370 - case C371 => 371 - case C372 => 372 - case C373 => 373 - case C374 => 374 - case C375 => 375 - case C376 => 376 - case C377 => 377 - case C378 => 378 - case C379 => 379 - case C380 => 380 - case C381 => 381 - case C382 => 382 - case C383 => 383 - case C384 => 384 - case C385 => 385 - case C386 => 386 - case C387 => 387 - case C388 => 388 - case C389 => 389 - case C390 => 390 - case C391 => 391 - case C392 => 392 - case C393 => 393 - case C394 => 394 - case C395 => 395 - case C396 => 396 - case C397 => 397 - case C398 => 398 - case C399 => 399 - case C400 => 400 - } -} diff --git a/test/files/presentation/memory-leaks.check b/test/files/presentation/memory-leaks.check deleted file mode 100644 index 86fb07757d99..000000000000 --- a/test/files/presentation/memory-leaks.check +++ /dev/null @@ -1,54 +0,0 @@ -reload: Trees.scala, Typers.scala, Types.scala -reload: Trees.scala -reload: Types.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -reload: Typers.scala -No leaks detected. diff --git a/test/files/presentation/memory-leaks.javaopts b/test/files/presentation/memory-leaks.javaopts deleted file mode 100644 index 9740f07b079b..000000000000 --- a/test/files/presentation/memory-leaks.javaopts +++ /dev/null @@ -1 +0,0 @@ --Dneeds.forked.jvm \ No newline at end of file diff --git a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala b/test/files/presentation/memory-leaks/MemoryLeaksTest.scala deleted file mode 100644 index f09c6f8e2c79..000000000000 --- a/test/files/presentation/memory-leaks/MemoryLeaksTest.scala +++ /dev/null @@ -1,141 +0,0 @@ -import java.io.PrintWriter -import java.io.FileOutputStream -import java.util.Calendar - -import scala.reflect.internal.util.BatchSourceFile -import scala.tools.nsc.interactive -import scala.tools.nsc.interactive.tests._ -import scala.tools.nsc.io._ -import scala.tools.nsc.doc - -/** This test runs the presentation compiler on the Scala compiler project itself and records memory consumption. - * - * The test scenario is to open Typers, Trees and Types, then repeatedly add and remove one character - * in Typers.scala. Each step causes the parser, namer, and type checker to run. - * - * At each step we record the memory usage after the GC has run. At the end of the test, - * simple linear regression is used to compute the straight line that best fits the - * curve, and if the slope is higher than 1 (meaning a leak of 1MB/run), we fail the test. - * - * The Scala compiler sources are assumed to be under 'basedir/src/compiler'. - * - * The individual data points are saved under 'usedMem-.txt', under the test project - * directory. Use the cool graph-it.R (https://github.com/scala-ide/scala-ide/blob/master/org.scala-ide.sdt.core.tests/graph-it.R) - * script to see the memory curve for the given test run. - */ -object Test extends InteractiveTest { - final val mega = 1024 * 1024 - - import interactive.Global - trait InteractiveScaladocAnalyzer extends interactive.InteractiveAnalyzer with doc.ScaladocAnalyzer { - val global : Global - override def newTyper(context: Context) = new Typer(context) with InteractiveTyper with ScaladocTyper { - override def canAdaptConstantTypeToLiteral = false - } - } - - private class ScaladocEnabledGlobal extends Global(settings, compilerReporter) { - override lazy val analyzer = new { - val global: ScaladocEnabledGlobal.this.type = ScaladocEnabledGlobal.this - } with InteractiveScaladocAnalyzer - } - - override def createGlobal: Global = new ScaladocEnabledGlobal - - override def execute(): Unit = memoryConsumptionTest() - - def batchSource(name: String) = - new BatchSourceFile(AbstractFile.getFile(name)) - - def memoryConsumptionTest() { - val N = 50 - val filename = "usedmem-%tF.txt".format(Calendar.getInstance.getTime) - - val typerUnit = AbstractFile.getFile(baseDir.parent.parent.parent.parent / "src/compiler/scala/tools/nsc/typechecker/Typers.scala") - val typesUnit = AbstractFile.getFile(baseDir.parent.parent.parent.parent / "src/reflect/scala/reflect/internal/Types.scala") - val treesUnit = AbstractFile.getFile(baseDir.parent.parent.parent.parent / "src/reflect/scala/reflect/internal/Trees.scala") - - askReload(Seq(new BatchSourceFile(typerUnit), new BatchSourceFile(typesUnit), new BatchSourceFile(treesUnit))) - typeCheckWith(treesUnit, new String(treesUnit.toCharArray)) - typeCheckWith(typesUnit, new String(typesUnit.toCharArray)) - - val originalTyper = new String(typerUnit.toCharArray) - - val (prefix, postfix) = originalTyper.splitAt(originalTyper.indexOf("import global._")) - val changedTyper = prefix + " a\n " + postfix - - val usedMem = for (i <- 1 to N) yield { - val src = if (i % 2 == 0) originalTyper else changedTyper - - val usedMem = withGC { - typeCheckWith(typerUnit, src) - } - - usedMem / mega // report size in MB - } - - //dumpDataToFile(filename, usedMem) - // drop the first two measurements, since the compiler needs some memory when initializing - val (a, b) = linearModel((3L to N).toSeq, usedMem.drop(2)) - //println("LinearModel: constant: %.4f\tslope:%.4f".format(a, b)) - - if (b > 1.0) - println("Rate of memory consumption is alarming! %.4f MB/run".format(b)) - else - println("No leaks detected.") - } - - private def typeCheckWith(file: AbstractFile, src: String) = { - val sourceFile = new BatchSourceFile(file, src.toCharArray) - askReload(Seq(sourceFile)) - askLoadedTyped(sourceFile).get // block until it's here - } - - private def dumpDataToFile(filename: String, usedMem: Seq[Long]) { - val outputFile = new PrintWriter(new FileOutputStream(filename)) - outputFile.println("\tusedMem") - for ((dataPoint, i) <- usedMem.zipWithIndex) { - outputFile.println("%d\t%d".format(i, dataPoint)) - } - outputFile.close() - } - - - /** Return the linear model of these values, (a, b). First value is the constant factor, - * second value is the slope, i.e. `y = a + bx` - * - * The linear model of a set of points is a straight line that minimizes the square distance - * between the each point and the line. - * - * See: http://en.wikipedia.org/wiki/Simple_linear_regression - */ - def linearModel(xs: Seq[Long], ys: Seq[Long]): (Double, Double) = { - require(xs.length == ys.length) - - def mean(v: Seq[Long]): Double = v.sum.toDouble / v.length - - val meanXs = mean(xs) - val meanYs = mean(ys) - - val beta = (mean((xs, ys).zipped.map(_ * _)) - meanXs * meanYs) / (mean(xs.map(x => x * x)) - meanXs * meanXs) - val alfa = meanYs - beta * meanXs - - (alfa, beta) - } - - /** Run the given closure and return the amount of used memory at the end of its execution. - * - * Runs the GC before and after the execution of `f'. - */ - def withGC(f: => Unit): Long = { - val r = Runtime.getRuntime - System.gc() - - f; - - System.gc() - - r.totalMemory() - r.freeMemory() - } - -} diff --git a/test/files/run/t6853.scala b/test/files/run/t6853.scala deleted file mode 100644 index a518edb03228..000000000000 --- a/test/files/run/t6853.scala +++ /dev/null @@ -1,18 +0,0 @@ -// Test cases: the only place we can cut and paste without crying -// ourself to sleep. -object Test { - - def main(args: Array[String]): Unit = { - // First testing the basic operations - val m = collection.mutable.ListMap[String, Int]() - var i = 0 - while(i < 2) { m += ("foo" + i) -> i; i = i+1} - assert(m == Map("foo1"->1,"foo0"->0)) - m-= "foo0" - assert(m == Map("foo1"->1)) - // Now checking if it scales as described in scala/bug#6853 - i = 0 - while(i < 80000) { m += ("foo" + i) -> i; i = i+1} - assert(m.size == 80000) - } -} diff --git a/test/files/run/t6969.check b/test/files/run/t6969.check deleted file mode 100644 index 78297812c946..000000000000 --- a/test/files/run/t6969.check +++ /dev/null @@ -1 +0,0 @@ -All threads completed. diff --git a/test/files/run/t6969.scala b/test/files/run/t6969.scala deleted file mode 100644 index c4561b442466..000000000000 --- a/test/files/run/t6969.scala +++ /dev/null @@ -1,32 +0,0 @@ - - -import scala.language.{ reflectiveCalls } - -object Test { - private type Clearable = { def clear(): Unit } - private def choke() = { - try new Array[Object]((Runtime.getRuntime().maxMemory min Int.MaxValue).toInt) - catch { - case _: OutOfMemoryError => // what do you mean, out of memory? - case t: Throwable => println(t) - } - } - private def f(x: Clearable) = x.clear() - class Choker(id: Int) extends Thread { - private def g(iteration: Int) = { - val map = scala.collection.mutable.Map[Int, Int](1 -> 2) - try f(map) catch { case t: NullPointerException => println(s"Failed at $id/$iteration") ; throw t } - choke() - } - override def run() { - 1 to 50 foreach g - } - } - - def main(args: Array[String]): Unit = { - val threads = 1 to 3 map (id => new Choker(id)) - threads foreach (_.start()) - threads foreach (_.join()) - println("All threads completed.") - } -} From 21ada8801f0d79fe2ae4f49fda572135d39b388e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 7 Mar 2018 22:50:15 +1000 Subject: [PATCH 1276/2793] Update to partest 1.1.7 --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index 31184131638c..a12b041e9ff9 100644 --- a/versions.properties +++ b/versions.properties @@ -22,6 +22,6 @@ scala.binary.version=2.12 scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.0 -partest.version.number=1.1.1 +partest.version.number=1.1.7 scala-asm.version=6.0.0-scala-1 jline.version=2.14.5 From 2789e264f998e007e7aa5e1db64ed9f3b6fc2f76 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 12 Mar 2018 08:41:14 -0400 Subject: [PATCH 1277/2793] Put back one-argument require for the convenience of downstream users. I was inspired by paulp's comment years ago that we can remove the one-argument forms of `assert` and `require` by shadowing them with two-argument versions in `Global` (now `SymbolTable`). I tried carrying out that hope, and was reminded by the community build that people write compiler plugins against the compiler "API", so that just won't fly. I'm advised that it's kinder to put them back than to make plugin writers scramble to change their code in order to release for 2.12.5, so let's do that. I'd also like us to consider deprecating them in a future release, since raw assertions make guessing what went wrong harder. Or not; it's not my call to make. --- .../scala/reflect/internal/SymbolTable.scala | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 40546145ba90..9c2779f59412 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -132,16 +132,16 @@ abstract class SymbolTable extends macros.Universe } // Getting in front of Predef's asserts to supplement with more info; see `supplementErrorMessage`. - // This has the happy side effect of masking the one argument form of assert - // (but for now it's reproduced here, because there are a million uses to fix). + // This has the happy side effect of masking the one argument forms of assert/require + // (but for now they're reproduced here, because there are a million uses internal and external to fix). @inline final def assert(assertion: Boolean, message: => Any): Unit = { // calling Predef.assert would send a freshly allocated closure wrapping the one received as argument. if (!assertion) throwAssertionError(message) } - // for those of us who use IDEs, this will now at least show up struck-through - @deprecated("prefer to use the two-argument form", since = "2.12.5") + // Let's consider re-deprecating this in the 2.13 series, to encourage informative messages. + //@deprecated("prefer to use the two-argument form", since = "2.12.5") final def assert(assertion: Boolean): Unit = { assert(assertion, "") } @@ -152,6 +152,12 @@ abstract class SymbolTable extends macros.Universe if (!requirement) throwRequirementError(message) } + // Let's consider re-deprecating this in the 2.13 series, to encourage informative messages. + //@deprecated("prefer to use the two-argument form", since = "2.12.5") + final def require(requirement: Boolean): Unit = { + require(requirement, "") + } + // extracted from `assert`/`require` to make them as small (and inlineable) as possible private[internal] def throwAssertionError(msg: Any): Nothing = throw new java.lang.AssertionError(s"assertion failed: ${supplementErrorMessage(String valueOf msg)}") From 3ce99cee351f5a72a6e90646af5b9cc76f226b6d Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 9 Mar 2018 15:27:32 +0100 Subject: [PATCH 1278/2793] Method values are always eta-expanded `m _` is shorthand for eta-expansion. Add an attachment to `m` in order to know, in adapt, that it is used as an explicit method value. --- .../nsc/typechecker/StdAttachments.scala | 3 ++ .../scala/tools/nsc/typechecker/Typers.scala | 46 +++++++++++-------- test/files/neg/t10279.check | 23 +++++++--- test/files/neg/t10279.scala | 17 ++++--- test/files/neg/t7187.check | 39 +++++++++++++--- test/files/neg/t7187.scala | 26 ++++++++++- test/files/run/byname.check | 1 - 7 files changed, 115 insertions(+), 40 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 731ce83c160f..6c2ac8f301bc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -182,4 +182,7 @@ trait StdAttachments { * track of other adapted trees. */ case class OriginalTreeAttachment(original: Tree) + + /** Added to trees that appear in a method value, e.g., to `f(x)` in `f(x) _` */ + case object MethodValueAttachment } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index fb489eccc9f8..1499f8d3b146 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -905,22 +905,29 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else if (isFunctionType(pt) || (!mt.params.isEmpty && samOf(pt).exists)) { // scala/bug#9536 `!mt.params.isEmpty &&`: for backwards compatibility with 2.11, // we don't adapt a zero-arg method value to a SAM - // In 2.13, we won't do any eta-expansion for zero-arg method values, but we should deprecate first + // In 2.13, we won't do any eta-expansion for zero-arg methods, but we should deprecate first debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt") checkParamsConvertible(tree, tree.tpe) - // scala/bug#7187 eta-expansion of zero-arg method value is deprecated, switch order of (4.3) and (4.2) in 2.13 - def isExplicitEtaExpansion = original match { - case Typed(_, Function(Nil, EmptyTree)) => true // tree shape for `f _` - case _ => false - } - val isNullaryPtEtaExpansion = mt.params.isEmpty && !isExplicitEtaExpansion - val skipEta = isNullaryPtEtaExpansion && settings.isScala213 - if (skipEta) emptyApplication + // method values (`m _`) are always eta-expanded (this syntax will disappear once we eta-expand regardless of expected type, at least for arity > 0) + // a "naked" method reference (`m`) may or not be eta expanded -- currently, this depends on the expected type and the arity (the conditions for this are in flux) + def isMethodValue = tree.getAndRemoveAttachment[MethodValueAttachment.type].isDefined + val nakedZeroAryMethod = mt.params.isEmpty && !isMethodValue + + // scala/bug#7187 eta-expansion of zero-arg method value is deprecated + // 2.13 will switch order of (4.3) and (4.2), always inserting () before attempting eta expansion + // (This effectively disables implicit eta-expansion of 0-ary methods.) + // See mind-bending stuff like scala/bug#9178 + if (nakedZeroAryMethod && settings.isScala213) emptyApplication else { - if (isNullaryPtEtaExpansion && settings.isScala212) currentRun.reporting.deprecationWarning(tree.pos, NoSymbol, - s"Eta-expansion of zero-argument method values is deprecated. Did you intend to write ${Apply(tree, Nil)}?", "2.12.0") + // eventually, we will deprecate insertion of `()` (except for java-defined methods) -- this is already the case in dotty + // Once that's done, we can more aggressively eta-expand method references, even if they are 0-arity + // 2.13 will already eta-expand non-zero-arity methods regardless of expected type (whereas 2.12 requires a function-equivalent type) + if (nakedZeroAryMethod && settings.isScala212) { + currentRun.reporting.deprecationWarning(tree.pos, NoSymbol, + s"Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write ${Function(Nil, Apply(tree, Nil))}.", "2.12.0") + } val tree0 = etaExpand(context.unit, tree, this) @@ -4572,11 +4579,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * (2) If $e$ is a parameterless method or call-by-name parameter of type `=>$T$`, `$e$ _` represents * the function of type `() => $T$`, which evaluates $e$ when it is applied to the empty parameterlist `()`. */ - def typedEta(methodValue: Tree, original: Tree): Tree = methodValue.tpe match { + def typedEta(methodValue: Tree): Tree = methodValue.tpe match { case tp@(MethodType(_, _) | PolyType(_, MethodType(_, _))) => // (1) val formals = tp.params if (isFunctionType(pt) || samMatchesFunctionBasedOnArity(samOf(pt), formals)) methodValue - else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length)), original) + else adapt(methodValue, mode, checkArity(methodValue)(functionTypeWildcard(formals.length))) case TypeRef(_, ByNameParamClass, _) | NullaryMethodType(_) => // (2) val pos = methodValue.pos @@ -5333,14 +5340,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // to do that we need to typecheck the tree first (we need a symbol of the eta-expandee) // that typecheck must not trigger macro expansions, so we explicitly prohibit them // however we cannot do `context.withMacrosDisabled` - // because `expr` might contain nested macro calls (see scala/bug#6673) - // - // Note: apparently `Function(Nil, EmptyTree)` is the secret parser marker - // which means trailing underscore -- denoting a method value. See makeMethodValue in TreeBuilder. - case Typed(expr, Function(Nil, EmptyTree)) => + // because `expr` might contain nested macro calls (see scala/bug#6673). + // Otherwise, eta-expand, passing the original tree, which is required in adapt + // for trees of the form `f() _`: if the method type takes implicits, the fallback + // strategy will use `f()`; else if not, original is used to distinguish an explicit + // method value from eta-expansion driven by an expected function type. + case MethodValue(expr) => typed1(suppressMacroExpansion(expr), mode, pt) match { case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef) - case methodValue => typedEta(checkDead(methodValue), expr) + case methodValue => typedEta(checkDead(methodValue).updateAttachment(MethodValueAttachment)) } case Typed(expr, tpt) => val tpt1 = typedType(tpt, mode) // type the ascribed type first diff --git a/test/files/neg/t10279.check b/test/files/neg/t10279.check index f573cd38d1fc..0c8fc3f79894 100644 --- a/test/files/neg/t10279.check +++ b/test/files/neg/t10279.check @@ -1,7 +1,18 @@ -t10279.scala:9: error: could not find implicit value for parameter s: String - val bar = foo(1) _ - ^ -t10279.scala:12: error: could not find implicit value for parameter x: Int - val barSimple = fooSimple _ +t10279.scala:5: error: could not find implicit value for parameter s: String + val t1 = foo(1) _ // error: no implicit string + ^ +t10279.scala:6: error: _ must follow method; cannot follow String + val t2 = foo(1)("") _ // error: _ must follow method + ^ +t10279.scala:7: error: could not find implicit value for parameter s: String + val t3 = foo _ // error: no implicit string + ^ +t10279.scala:14: error: type mismatch; + found : Int + required: ? => ? + val t6 = { implicit val i = 0; bar(0) _ } // error: type mismatch, found Int, required: ? => ? + ^ +t10279.scala:17: error: could not find implicit value for parameter x: Int + val barSimple = fooSimple _ // error: no implicit int ^ -two errors found +5 errors found diff --git a/test/files/neg/t10279.scala b/test/files/neg/t10279.scala index be0f52999dd4..5865f288d56d 100644 --- a/test/files/neg/t10279.scala +++ b/test/files/neg/t10279.scala @@ -1,13 +1,18 @@ object Test { - def foo(i: Int)(implicit s: String): String = ??? + def foo(i: Int)(implicit s: String): String = "" - def test(implicit s: String) { - // foo(1) _ - } + val t1 = foo(1) _ // error: no implicit string + val t2 = foo(1)("") _ // error: _ must follow method + val t3 = foo _ // error: no implicit string + val t4 = { implicit val s = ""; foo _ } // eta-expansion over the non-implicit parameter list + val t4a: Int => String = t4 // ok + val t5 = { implicit val s = ""; foo(1) _ } // compiles as Predef.wrapString(foo(1)(s)) + val t5a: collection.immutable.WrappedString = t5 // don't ask me why - val bar = foo(1) _ + def bar(i: Int)(implicit j: Int): Int = 0 + val t6 = { implicit val i = 0; bar(0) _ } // error: type mismatch, found Int, required: ? => ? def fooSimple(implicit x: Int): Int = x - val barSimple = fooSimple _ + val barSimple = fooSimple _ // error: no implicit int } diff --git a/test/files/neg/t7187.check b/test/files/neg/t7187.check index a30803c74689..7290256a5e6c 100644 --- a/test/files/neg/t7187.check +++ b/test/files/neg/t7187.check @@ -1,6 +1,33 @@ -t7187.scala:3: warning: Eta-expansion of zero-argument method values is deprecated. Did you intend to write EtaExpandZeroArg.this.foo()? - val f: () => Any = foo - ^ -error: No warnings can be incurred under -Xfatal-warnings. -one warning found -one error found +t7187.scala:4: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.foo()). + val t1b: () => Any = foo // eta-expansion (deprecated) in 2.12, `()`-insertion in 2.13 + ^ +t7187.scala:8: error: _ must follow method; cannot follow () => String + val t1f: Any = foo() _ // error: _ must follow method + ^ +t7187.scala:11: error: type mismatch; + found : String + required: () => Any + val t2a: () => Any = bar // error: no eta-expansion of zero-arglist-methods + ^ +t7187.scala:12: error: not enough arguments for method apply: (index: Int)Char in class StringOps. +Unspecified value parameter index. + val t2b: () => Any = bar() // error: bar doesn't take arguments, so expanded to bar.apply(), which misses an argument + ^ +t7187.scala:15: error: not enough arguments for method apply: (index: Int)Char in class StringOps. +Unspecified value parameter index. + val t2e: Any = bar() _ // error: not enough arguments for method apply + ^ +t7187.scala:18: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.baz()). + val t3a: () => Any = baz // eta-expansion (deprecated) in 2.12, error in 2.13 + ^ +t7187.scala:21: error: _ must follow method; cannot follow String + val t3d: Any = baz() _ // error: _ must follow method + ^ +t7187.scala:24: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.zap()). + val t4a: () => Any = zap // eta-expansion (deprecated) in 2.12, error in 2.13 + ^ +t7187.scala:25: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.zap()()). + val t4b: () => Any = zap() // ditto + ^ +four warnings found +5 errors found diff --git a/test/files/neg/t7187.scala b/test/files/neg/t7187.scala index 45d33f06af31..62f86dc51696 100644 --- a/test/files/neg/t7187.scala +++ b/test/files/neg/t7187.scala @@ -1,6 +1,28 @@ class EtaExpandZeroArg { def foo(): () => String = () => "" - val f: () => Any = foo + val t1a: () => Any = foo() // ok (obviously) + val t1b: () => Any = foo // eta-expansion (deprecated) in 2.12, `()`-insertion in 2.13 + val t1c: () => Any = { val t = foo; t } // ok, no expected type, `()`-insertion + val t1d: () => Any = foo _ // ok + val t1e: Any = foo _ // ok + val t1f: Any = foo() _ // error: _ must follow method - // f() would evaluate to instead of "" + def bar = "" + val t2a: () => Any = bar // error: no eta-expansion of zero-arglist-methods + val t2b: () => Any = bar() // error: bar doesn't take arguments, so expanded to bar.apply(), which misses an argument + val t2c: () => Any = bar _ // ok + val t2d: Any = bar _ // ok + val t2e: Any = bar() _ // error: not enough arguments for method apply + + def baz() = "" + val t3a: () => Any = baz // eta-expansion (deprecated) in 2.12, error in 2.13 + val t3b: () => Any = baz _ // ok + val t3c: Any = baz _ // ok + val t3d: Any = baz() _ // error: _ must follow method + + def zap()() = "" + val t4a: () => Any = zap // eta-expansion (deprecated) in 2.12, error in 2.13 + val t4b: () => Any = zap() // ditto + val t4c: () => Any = zap _ // ok + val t4d: () => Any = zap() _ // ok } diff --git a/test/files/run/byname.check b/test/files/run/byname.check index 6829e550a611..7e49eedec111 100644 --- a/test/files/run/byname.check +++ b/test/files/run/byname.check @@ -1,4 +1,3 @@ -warning: there were two deprecation warnings (since 2.12.0); re-run with -deprecation for details test no braces completed properly test no braces r completed properly test plain completed properly From b0a6c8343b1b03a785ca05d5c0046d72c51bcfc6 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 12 Mar 2018 17:01:05 +0100 Subject: [PATCH 1279/2793] Some comments about re-typing implicit applications --- .../scala/tools/nsc/typechecker/Typers.scala | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1499f8d3b146..f2ace996c8b4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -821,6 +821,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * (14) When in mode EXPRmode, do SAM conversion * (15) When in mode EXPRmode, apply a view * If all this fails, error + * + * Note: the `original` tree parameter is for re-typing implicit method invocations (see below) + * and should not be used otherwise. TODO: can it be replaced with a tree attachment? */ protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = { def hasUndets = context.undetparams.nonEmpty @@ -842,13 +845,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper setError(tree) else withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree))(typer1 => - if (original != EmptyTree && pt != WildcardType) ( + if (original != EmptyTree && pt != WildcardType) { typer1 silent { tpr => val withImplicitArgs = tpr.applyImplicitArgs(tree) if (tpr.context.reporter.hasErrors) tree // silent will wrap it in SilentTypeError anyway else tpr.typed(withImplicitArgs, mode, pt) - } - orElse { _ => + } orElse { _ => + // Re-try typing (applying to implicit args) without expected type. Add in 53d98e7d42 to + // for better error message (scala/bug#2180, http://www.scala-lang.org/old/node/3453.html) val resetTree = resetAttrs(original) resetTree match { case treeInfo.Applied(fun, targs, args) => @@ -861,8 +865,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => } debuglog(s"fallback on implicits: ${tree}/$resetTree") - // SO-10066 Need to patch the enclosing tree in the context to make translation of Dynamic - // work during fallback typechecking below. + // scala/bug#10066 Need to patch the enclosing tree in the context to make translation of Dynamic + // work during fallback typechecking below. val resetContext: Context = { object substResetForOriginal extends Transformer { override def transform(tree: Tree): Tree = { @@ -877,10 +881,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // Q: `typed` already calls `pluginsTyped` and `adapt`. the only difference here is that // we pass `EmptyTree` as the `original`. intended? added in 2009 (53d98e7d42) by martin. tree1 setType pluginsTyped(tree1.tpe, typer1, tree1, mode, pt) - if (tree1.isEmpty) tree1 else typer1.adapt(tree1, mode, pt, EmptyTree) + if (tree1.isEmpty) tree1 else typer1.adapt(tree1, mode, pt) } } - ) + } else typer1.typed(typer1.applyImplicitArgs(tree), mode, pt) ) From 94030c28d5f484b7534ece590394f6351bdfa6ac Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Dec 2017 21:12:41 +1000 Subject: [PATCH 1280/2793] [backport] Elide prefixes in printed types uniformly in runtime reflection The logic that decides to print `Function`, rather than `scala.Function` did not account for the multiplicity of symbols for a given package in the JavaMirrors universe. --- .../scala/reflect/internal/Symbols.scala | 11 +++++++---- test/files/jvm/manifests-new.check | 8 ++++---- test/files/run/abstypetags_core.check | 2 +- test/files/run/exprs_serialize.check | 2 +- test/files/run/freetypes_false_alarm1.check | 2 +- .../run/inferred-type-constructors-hou.check | 2 +- .../files/run/inferred-type-constructors.check | 2 +- .../interop_manifests_are_abstypetags.check | 2 +- .../run/interop_manifests_are_typetags.check | 2 +- test/files/run/macro-reify-unreify.check | 2 +- test/files/run/reflection-equality.check | 6 +++--- .../run/reflection-magicsymbols-invoke.check | 16 ++++++++-------- .../run/reflection-magicsymbols-repl.check | 16 ++++++++-------- .../files/run/reflection-repl-elementary.check | 2 +- .../run/reflection-valueclasses-magic.check | 18 +++++++++--------- test/files/run/reify_newimpl_26.check | 2 +- test/files/run/t5256c.check | 2 +- test/files/run/t5256d.check | 4 ++-- test/files/run/t5256h.check | 2 +- test/files/run/t5418a.check | 2 +- test/files/run/t7008.check | 8 ++++---- test/files/run/typetags_core.check | 2 +- 22 files changed, 59 insertions(+), 56 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 40d67d8b7c83..c892db898724 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -872,10 +872,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Conditions where we omit the prefix when printing a symbol, to avoid * unpleasantries like Predef.String, $iw.$iw.Foo and .Bippy. */ - final def isOmittablePrefix = /*!settings.debug.value &&*/ ( - UnqualifiedOwners(skipPackageObject) - || isEmptyPrefix - ) + final def isOmittablePrefix = /*!settings.debug.value &&*/ { + // scala/bug#5941 runtime reflection can have distinct symbols representing `package scala` (from different mirrors) + // We check equality by FQN here to make sure we omit prefixes uniformly for all of them. + def matches(sym1: Symbol, sym2: Symbol) = (sym1 eq sym2) || (sym1.hasPackageFlag && sym2.hasPackageFlag && sym1.name == sym2.name && sym1.fullNameString == sym2.fullNameString) + val skipped = skipPackageObject + UnqualifiedOwners.exists((sym: Symbol) => matches(sym, skipped)) || isEmptyPrefix + } def isEmptyPrefix = ( isEffectiveRoot // has no prefix for real, or || isAnonOrRefinementClass // has uninteresting or prefix diff --git a/test/files/jvm/manifests-new.check b/test/files/jvm/manifests-new.check index 7b229ba6794c..73b7bcb86af3 100644 --- a/test/files/jvm/manifests-new.check +++ b/test/files/jvm/manifests-new.check @@ -2,25 +2,25 @@ x=(), t=TypeTag[Unit], k=TypeRef, s=class Unit x=true, t=TypeTag[Boolean], k=TypeRef, s=class Boolean x=a, t=TypeTag[Char], k=TypeRef, s=class Char x=1, t=TypeTag[Int], k=TypeRef, s=class Int -x=abc, t=TypeTag[java.lang.String], k=TypeRef, s=class String +x=abc, t=TypeTag[String], k=TypeRef, s=class String x='abc, t=TypeTag[Symbol], k=TypeRef, s=class Symbol x=List(()), t=TypeTag[List[Unit]], k=TypeRef, s=class List x=List(true), t=TypeTag[List[Boolean]], k=TypeRef, s=class List x=List(1), t=TypeTag[List[Int]], k=TypeRef, s=class List -x=List(abc), t=TypeTag[List[java.lang.String]], k=TypeRef, s=class List +x=List(abc), t=TypeTag[List[String]], k=TypeRef, s=class List x=List('abc), t=TypeTag[List[Symbol]], k=TypeRef, s=class List x=[Z, t=TypeTag[Array[Boolean]], k=TypeRef, s=class Array x=[C, t=TypeTag[Array[Char]], k=TypeRef, s=class Array x=[I, t=TypeTag[Array[Int]], k=TypeRef, s=class Array -x=[Ljava.lang.String;, t=TypeTag[Array[java.lang.String]], k=TypeRef, s=class Array +x=[Ljava.lang.String;, t=TypeTag[Array[String]], k=TypeRef, s=class Array x=[Lscala.Symbol;, t=TypeTag[Array[Symbol]], k=TypeRef, s=class Array x=((),()), t=TypeTag[(Unit, Unit)], k=TypeRef, s=class Tuple2 x=(true,false), t=TypeTag[(Boolean, Boolean)], k=TypeRef, s=class Tuple2 x=(1,2), t=TypeTag[(Int, Int)], k=TypeRef, s=class Tuple2 -x=(abc,xyz), t=TypeTag[(java.lang.String, java.lang.String)], k=TypeRef, s=class Tuple2 +x=(abc,xyz), t=TypeTag[(String, String)], k=TypeRef, s=class Tuple2 x=('abc,'xyz), t=TypeTag[(Symbol, Symbol)], k=TypeRef, s=class Tuple2 x=Test$, t=TypeTag[Test.type], k=SingleType, s=object Test diff --git a/test/files/run/abstypetags_core.check b/test/files/run/abstypetags_core.check index 980b4719bf07..d81842b05453 100644 --- a/test/files/run/abstypetags_core.check +++ b/test/files/run/abstypetags_core.check @@ -23,7 +23,7 @@ TypeTag[AnyVal] true TypeTag[AnyRef] true -TypeTag[java.lang.Object] +TypeTag[Object] true TypeTag[Null] true diff --git a/test/files/run/exprs_serialize.check b/test/files/run/exprs_serialize.check index 551823ccdc70..3e55376ce376 100644 --- a/test/files/run/exprs_serialize.check +++ b/test/files/run/exprs_serialize.check @@ -1,5 +1,5 @@ Expr[Int(2)](2) -Expr[java.lang.String]({ +Expr[String]({ def foo = "hello"; foo.$plus("world!") }) diff --git a/test/files/run/freetypes_false_alarm1.check b/test/files/run/freetypes_false_alarm1.check index 085b3ee50b13..a9df3544acce 100644 --- a/test/files/run/freetypes_false_alarm1.check +++ b/test/files/run/freetypes_false_alarm1.check @@ -1 +1 @@ -scala.List[Int] +List[Int] diff --git a/test/files/run/inferred-type-constructors-hou.check b/test/files/run/inferred-type-constructors-hou.check index 6b0982334189..8b226db10584 100644 --- a/test/files/run/inferred-type-constructors-hou.check +++ b/test/files/run/inferred-type-constructors-hou.check @@ -51,6 +51,6 @@ warning: there were two feature warnings; re-run with -feature for details Seq[Int] Array[Int] scala.collection.AbstractSet[Int] - Comparable[java.lang.String] + Comparable[String] scala.collection.immutable.LinearSeq[Int] Iterable[Int] diff --git a/test/files/run/inferred-type-constructors.check b/test/files/run/inferred-type-constructors.check index 4a63853bd985..ee8530d9f97f 100644 --- a/test/files/run/inferred-type-constructors.check +++ b/test/files/run/inferred-type-constructors.check @@ -51,6 +51,6 @@ warning: there were two feature warnings; re-run with -feature for details Seq[Int] Array[Int] scala.collection.AbstractSet[Int] - Comparable[java.lang.String] + Comparable[String] scala.collection.immutable.LinearSeq[Int] Iterable[Int] diff --git a/test/files/run/interop_manifests_are_abstypetags.check b/test/files/run/interop_manifests_are_abstypetags.check index 19a35ad3dbde..c33d7a7dca1c 100644 --- a/test/files/run/interop_manifests_are_abstypetags.check +++ b/test/files/run/interop_manifests_are_abstypetags.check @@ -1,3 +1,3 @@ Int -java.lang.String +String Array[Int] diff --git a/test/files/run/interop_manifests_are_typetags.check b/test/files/run/interop_manifests_are_typetags.check index 19a35ad3dbde..c33d7a7dca1c 100644 --- a/test/files/run/interop_manifests_are_typetags.check +++ b/test/files/run/interop_manifests_are_typetags.check @@ -1,3 +1,3 @@ Int -java.lang.String +String Array[Int] diff --git a/test/files/run/macro-reify-unreify.check b/test/files/run/macro-reify-unreify.check index 7a6d53c47e80..55d61e6068b4 100644 --- a/test/files/run/macro-reify-unreify.check +++ b/test/files/run/macro-reify-unreify.check @@ -1 +1 @@ -hello world = Expr[java.lang.String("hello world")]("hello world") +hello world = Expr[String("hello world")]("hello world") diff --git a/test/files/run/reflection-equality.check b/test/files/run/reflection-equality.check index 09a08586309e..b995e0cfb124 100644 --- a/test/files/run/reflection-equality.check +++ b/test/files/run/reflection-equality.check @@ -20,16 +20,16 @@ cs: reflect.runtime.universe.ClassSymbol = class X scala> val ts: Type = cs.info ts: reflect.runtime.universe.Type = -scala.AnyRef { +AnyRef { def (): X - def methodIntIntInt(x: scala.Int,y: scala.Int): scala.Int + def methodIntIntInt(x: Int,y: Int): Int } scala> val ms: MethodSymbol = ts.decl(TermName("methodIntIntInt")).asMethod ms: reflect.runtime.universe.MethodSymbol = method methodIntIntInt scala> val MethodType( _, t1 ) = ms.info -t1: reflect.runtime.universe.Type = scala.Int +t1: reflect.runtime.universe.Type = Int scala> val t2 = typeOf[scala.Int] t2: reflect.runtime.universe.Type = Int diff --git a/test/files/run/reflection-magicsymbols-invoke.check b/test/files/run/reflection-magicsymbols-invoke.check index f580296ae7f6..80023cd49db5 100644 --- a/test/files/run/reflection-magicsymbols-invoke.check +++ b/test/files/run/reflection-magicsymbols-invoke.check @@ -7,10 +7,10 @@ method ##: ()Int method ==: (x$1: Any)Boolean method asInstanceOf: [T0]=> T0 method equals: (x$1: Any)Boolean -method getClass: ()java.lang.Class[_] +method getClass: ()Class[_] method hashCode: ()Int method isInstanceOf: [T0]=> Boolean -method toString: ()java.lang.String +method toString: ()String testing Any.!=: false testing Any.##: 50 testing Any.==: true @@ -34,25 +34,25 @@ testing AnyVal.getClass: class scala.ScalaReflectionException: expected a member AnyRef it's important to print the list of AnyRef's members if some of them change (possibly, adding and/or removing magic symbols), we must update this test -constructor Object: ()java.lang.Object +constructor Object: ()Object method !=: (x$1: Any)Boolean method ##: ()Int method $asInstanceOf: [T0]()T0 method $isInstanceOf: [T0]()Boolean method ==: (x$1: Any)Boolean method asInstanceOf: [T0]=> T0 -method clone: ()java.lang.Object +method clone: ()Object method eq: (x$1: AnyRef)Boolean method equals: (x$1: Any)Boolean method finalize: ()Unit -method getClass: ()java.lang.Class[_] +method getClass: ()Class[_] method hashCode: ()Int method isInstanceOf: [T0]=> Boolean method ne: (x$1: AnyRef)Boolean method notify: ()Unit method notifyAll: ()Unit method synchronized: [T0](x$1: T0)T0 -method toString: ()java.lang.String +method toString: ()String method wait: ()Unit method wait: (x$1: Long)Unit method wait: (x$1: Long, x$2: Int)Unit @@ -91,7 +91,7 @@ method clone: ()Array[T] method eq: (x$1: AnyRef)Boolean method equals: (x$1: Any)Boolean method finalize: ()Unit -method getClass: ()java.lang.Class[_] +method getClass: ()Class[_] method hashCode: ()Int method isInstanceOf: [T0]=> Boolean method length: => Int @@ -99,7 +99,7 @@ method ne: (x$1: AnyRef)Boolean method notify: ()Unit method notifyAll: ()Unit method synchronized: [T0](x$1: T0)T0 -method toString: ()java.lang.String +method toString: ()String method update: (i: Int, x: T)Unit method wait: ()Unit method wait: (x$1: Long)Unit diff --git a/test/files/run/reflection-magicsymbols-repl.check b/test/files/run/reflection-magicsymbols-repl.check index a33f41012e0f..67bd3a6e640b 100644 --- a/test/files/run/reflection-magicsymbols-repl.check +++ b/test/files/run/reflection-magicsymbols-repl.check @@ -23,13 +23,13 @@ warning: there was one feature warning; for details, enable `:setting -feature' test: (n: Int)Unit scala> for (i <- 1 to 8) test(i) -scala.Int* -=> scala.Int -scala.Any -scala.AnyRef -scala.AnyVal -scala.Null -scala.Nothing -scala.Singleton +Int* +=> Int +Any +AnyRef +AnyVal +Null +Nothing +Singleton scala> :quit diff --git a/test/files/run/reflection-repl-elementary.check b/test/files/run/reflection-repl-elementary.check index 2a7f5d90fe56..9420c3ed1fb8 100644 --- a/test/files/run/reflection-repl-elementary.check +++ b/test/files/run/reflection-repl-elementary.check @@ -1,5 +1,5 @@ scala> scala.reflect.runtime.universe.typeOf[List[Nothing]] -res0: reflect.runtime.universe.Type = scala.List[Nothing] +res0: reflect.runtime.universe.Type = List[Nothing] scala> :quit diff --git a/test/files/run/reflection-valueclasses-magic.check b/test/files/run/reflection-valueclasses-magic.check index 8ecad3eb9119..2fa09dae690a 100644 --- a/test/files/run/reflection-valueclasses-magic.check +++ b/test/files/run/reflection-valueclasses-magic.check @@ -112,7 +112,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Int method unary_-: => Int method unary_~: => Int @@ -320,7 +320,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Int method unary_-: => Int method unary_~: => Int @@ -528,7 +528,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Int method unary_-: => Int method unary_~: => Int @@ -736,7 +736,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Int method unary_-: => Int method unary_~: => Int @@ -944,7 +944,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Long method unary_-: => Long method unary_~: => Long @@ -1136,7 +1136,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Float method unary_-: => Float testing Float.toByte() with receiver = 2.0 and args = List(): [class java.lang.Byte] =======> 2 @@ -1322,7 +1322,7 @@ method toFloat: => Float method toInt: => Int method toLong: => Long method toShort: => Short -method toString: ()java.lang.String +method toString: ()String method unary_+: => Double method unary_-: => Double testing Double.toByte() with receiver = 2.0 and args = List(): [class java.lang.Byte] =======> 2 @@ -1428,7 +1428,7 @@ method equals: (x$1: Any)Boolean method getClass: ()Class[Boolean] method hashCode: ()Int method isInstanceOf: [T0]=> Boolean -method toString: ()java.lang.String +method toString: ()String method unary_!: => Boolean method |: (x: Boolean)Boolean method ||: (x: Boolean)Boolean @@ -1453,4 +1453,4 @@ method equals: (x$1: Any)Boolean method getClass: ()Class[Unit] method hashCode: ()Int method isInstanceOf: [T0]=> Boolean -method toString: ()java.lang.String +method toString: ()String diff --git a/test/files/run/reify_newimpl_26.check b/test/files/run/reify_newimpl_26.check index 099231bf62d5..eb2b8309a085 100644 --- a/test/files/run/reify_newimpl_26.check +++ b/test/files/run/reify_newimpl_26.check @@ -10,6 +10,6 @@ scala> def foo[T]{ foo: [T]=> Unit scala> foo[Int] -WeakTypeTag[scala.List[T]] +WeakTypeTag[List[T]] scala> :quit diff --git a/test/files/run/t5256c.check b/test/files/run/t5256c.check index 3eb7b13a97ad..a37990bbaa1f 100644 --- a/test/files/run/t5256c.check +++ b/test/files/run/t5256c.check @@ -1,6 +1,6 @@ class A$1 Test.A$1 -java.lang.Object { +Object { def foo(): Nothing def (): Test.A$1 } diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check index 3cdcb577b084..91b90acef3d8 100644 --- a/test/files/run/t5256d.check +++ b/test/files/run/t5256d.check @@ -18,9 +18,9 @@ scala> println(c.fullName) $line8.$read.$iw.$iw.$iw.$iw.A scala> println(c.info) -scala.AnyRef { +AnyRef { def (): A - def foo: scala.Nothing + def foo: Nothing } scala> :quit diff --git a/test/files/run/t5256h.check b/test/files/run/t5256h.check index dc3e919897e5..61109a453f6a 100644 --- a/test/files/run/t5256h.check +++ b/test/files/run/t5256h.check @@ -1,6 +1,6 @@ $anon Test.$anon$1 -java.lang.Object { +Object { final private val x: Int def x(): Int def (): $anon$1 diff --git a/test/files/run/t5418a.check b/test/files/run/t5418a.check index 527022936d90..0a8916fc7f9c 100644 --- a/test/files/run/t5418a.check +++ b/test/files/run/t5418a.check @@ -1 +1 @@ -Expr[Class[_ <: java.lang.Object]](new Object().getClass()) +Expr[Class[_ <: Object]](new Object().getClass()) diff --git a/test/files/run/t7008.check b/test/files/run/t7008.check index ee077f90ffd4..7590e94b9c79 100644 --- a/test/files/run/t7008.check +++ b/test/files/run/t7008.check @@ -3,7 +3,7 @@ bar: List(throws[Exception](classOf[java.lang.Exception])) baz: List(throws[IllegalStateException](classOf[java.lang.IllegalStateException])) foo: List(throws[Exception](classOf[java.lang.Exception])) ============= -: List(throws[java.lang.NullPointerException](classOf[java.lang.NullPointerException])) -bar: List(throws[java.lang.Exception](classOf[java.lang.Exception])) -baz: List(throws[java.lang.IllegalStateException](classOf[java.lang.IllegalStateException])) -foo: List(throws[java.lang.Exception](classOf[java.lang.Exception])) +: List(throws[NullPointerException](classOf[java.lang.NullPointerException])) +bar: List(throws[Exception](classOf[java.lang.Exception])) +baz: List(throws[IllegalStateException](classOf[java.lang.IllegalStateException])) +foo: List(throws[Exception](classOf[java.lang.Exception])) diff --git a/test/files/run/typetags_core.check b/test/files/run/typetags_core.check index 980b4719bf07..d81842b05453 100644 --- a/test/files/run/typetags_core.check +++ b/test/files/run/typetags_core.check @@ -23,7 +23,7 @@ TypeTag[AnyVal] true TypeTag[AnyRef] true -TypeTag[java.lang.Object] +TypeTag[Object] true TypeTag[Null] true From 3d78c35c4dd478352b512933ac9ea631bb19af24 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 26 Feb 2018 20:29:40 +1000 Subject: [PATCH 1281/2793] Use new mode of partest to execute in process --- build.sbt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index df1c9c65a8d0..1536d45d28ee 100644 --- a/build.sbt +++ b/build.sbt @@ -707,8 +707,8 @@ lazy val test = project // test sources are compiled in partest run, not here sources in IntegrationTest := Seq.empty, fork in IntegrationTest := true, - javaOptions in IntegrationTest ++= "-Xmx2G" :: "-Dfile.encoding=UTF-8" :: Nil, - testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8"), + javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), + testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), From f6859f28bb49193fde83e6020a6a89ce926a91e8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 8 Mar 2018 14:07:30 +1000 Subject: [PATCH 1282/2793] Limit exposure to ConcurrentModificationException when sys props are replaced or mutated --- src/compiler/scala/tools/util/PathResolver.scala | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index f845656980b3..97eb9d529c51 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -47,8 +47,17 @@ object PathResolver { /** Values found solely by inspecting environment or property variables. */ object Environment { - private def searchForBootClasspath = - systemProperties collectFirst { case (k, v) if k endsWith ".boot.class.path" => v } getOrElse "" + import scala.collection.JavaConverters._ + + private def searchForBootClasspath: String = { + val props = System.getProperties + // This formulation should be immune to ConcurrentModificationExceptions when system properties + // we're unlucky enough to witness a partially published result of System.setProperty or direct + // mutation of the System property map. stringPropertyNames internally uses the Enumeration interface, + // rather than Iterator, and this disables the fail-fast ConcurrentModificationException. + val propNames = props.stringPropertyNames() + propNames.asScala collectFirst { case k if k endsWith ".boot.class.path" => props.getProperty(k) } getOrElse "" + } /** Environment variables which java pays attention to so it * seems we do as well. From 7adc0d0cdb9e63036c4aa60d513e29d60afc702f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Mar 2018 16:42:48 +1000 Subject: [PATCH 1283/2793] [backport] Add a custom test listener for usable JUnit XML reports MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - Update to partest that emits more detailed TestEvents - Group partest JUnit XML reports in, e.g, test.files.pos.xml - workaround Jenkins dislike of the work "run" Requires a new version of partest to provide some missing metadata. Sample files generated: ``` > ;partest --srcpath scaladoc --grep t7876; partest --grep default ... ``` ``` ⚡ (cd target/test/test-reports/partest && find . ) . ./test.files.jvm.xml ./test.files.neg.xml ./test.files.pos.xml ./test.files.presentation.xml ./test.files.run_.xml ./test.files.scalap.xml ./test.files.specialized.xml ./test.scaladoc.run_.xml ``` --- build.sbt | 9 ++- project/PartestTestListener.scala | 93 +++++++++++++++++++++++++++++++ 2 files changed, 101 insertions(+), 1 deletion(-) create mode 100644 project/PartestTestListener.scala diff --git a/build.sbt b/build.sbt index 1536d45d28ee..775b28b59e6b 100644 --- a/build.sbt +++ b/build.sbt @@ -32,6 +32,11 @@ * - to modularize the Scala compiler or library further */ +import java.io.{PrintWriter, StringWriter} + +import sbt.TestResult +import sbt.testing.TestSelector + import scala.build._ import VersionUtil._ @@ -694,6 +699,7 @@ lazy val partestJavaAgent = Project("partest-javaagent", file(".") / "src" / "pa lazy val test = project .dependsOn(compiler, interactive, replJlineEmbedded, scalap, partestExtras, partestJavaAgent, scaladoc) + .disablePlugins(plugins.JUnitXmlReportPlugin) .configs(IntegrationTest) .settings(commonSettings) .settings(disableDocs) @@ -737,7 +743,8 @@ lazy val test = project result.copy(overall = TestResult.Error) } else result - } + }, + testListeners in IntegrationTest += new PartestTestListener(target.value) ) lazy val manual = configureAsSubproject(project) diff --git a/project/PartestTestListener.scala b/project/PartestTestListener.scala new file mode 100644 index 000000000000..d94f7c8365ed --- /dev/null +++ b/project/PartestTestListener.scala @@ -0,0 +1,93 @@ +package scala.build + +import java.io.{File, PrintWriter, StringWriter} +import java.util.concurrent.TimeUnit + +import sbt.testing.TestSelector +import sbt.{JUnitXmlTestsListener, TestEvent, TestResult, TestsListener, _} + +// The default JUnitXMLListener doesn't play well with partest: we end up clobbering the one-and-only partest.xml +// file on group of tests run by `testAll`, and the test names in the XML file don't seem to show the path to the +// test for tests defined in a single file. +// +// Let's roll our own to try to enable the Jenkins JUnit test reports. +class PartestTestListener(target: File) extends TestsListener { + val delegate = new JUnitXmlTestsListener(target.getAbsolutePath) + import java.util.EnumSet + + import sbt.testing.{Status => TStatus} + val errorStatus = EnumSet.of(TStatus.Error) + val failStatus = EnumSet.of(TStatus.Failure) + val skipStatus = EnumSet.of(TStatus.Skipped, TStatus.Ignored) + + override def doInit(): Unit = () + override def doComplete(finalResult: TestResult.Value): Unit = () + override def endGroup(name: String, t: Throwable): Unit = () + override def endGroup(name: String, result: TestResult.Value): Unit = () + override def testEvent(event: TestEvent): Unit = { + // E.g "test.files.pos" or "test.scaladoc.run" + def groupOf(e: sbt.testing.Event) = { + val group = e.fullyQualifiedName().replace('/', '.') + "." + e.selector().asInstanceOf[TestSelector].testName().takeWhile(_ != '/') + // Don't even ask. + // You really want to know? Okay.. https://issues.jenkins-ci.org/browse/JENKINS-49832 + group.replaceAll("""\brun\b""", "run_") + } + + // "t1234.scala" or "t1235" + def testOf(e: sbt.testing.Event) = e.selector().asInstanceOf[TestSelector].testName().dropWhile(_ != '/').drop(1) + + for ((group, events) <- event.detail.groupBy(groupOf(_))) { + val statii = events.map(_.status()) + val errorCount = statii.count(errorStatus.contains) + val failCount = statii.count(failStatus.contains) + val skipCount = statii.count(skipStatus.contains) + val testCount = statii.size + val totalDurationMs = events.iterator.map(_.duration()).sum + val xml = + {delegate.properties}{for (e <- events) yield { + val trace: String = if (e.throwable.isDefined) { + val stringWriter = new StringWriter() + val writer = new PrintWriter(stringWriter) + e.throwable.get.printStackTrace(writer) + writer.flush() + ConsoleLogger.removeEscapeSequences(stringWriter.toString) + } else { + "" + } + + + {e.status match { + case TStatus.Error if e.throwable.isDefined => + + {trace} + + case TStatus.Error => + + case TStatus.Failure if e.throwable.isDefined => + + {trace} + + case TStatus.Failure => + + case TStatus.Ignored | TStatus.Skipped | sbt.testing.Status.Pending => + + case _ => + }} + + + + + + + }} + + val partestTestReports = target / "test-reports" / "partest" + val xmlFile = (partestTestReports / (group + ".xml")) + xmlFile.getParentFile.mkdirs() + scala.xml.XML.save(xmlFile.getAbsolutePath, xml, "UTF-8", true, null) + } + } + override def startGroup(name: String): Unit = () +} From 8c5930ac5dd9687b54aad799f8cb1bfff57bc83d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 7 Mar 2018 12:53:43 +1000 Subject: [PATCH 1284/2793] Update custom test listener to avoid CCE after framework internal error --- project/PartestTestListener.scala | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/project/PartestTestListener.scala b/project/PartestTestListener.scala index d94f7c8365ed..c7aa00641d7f 100644 --- a/project/PartestTestListener.scala +++ b/project/PartestTestListener.scala @@ -3,7 +3,7 @@ package scala.build import java.io.{File, PrintWriter, StringWriter} import java.util.concurrent.TimeUnit -import sbt.testing.TestSelector +import sbt.testing.{SuiteSelector, TestSelector} import sbt.{JUnitXmlTestsListener, TestEvent, TestResult, TestsListener, _} // The default JUnitXMLListener doesn't play well with partest: we end up clobbering the one-and-only partest.xml @@ -27,14 +27,24 @@ class PartestTestListener(target: File) extends TestsListener { override def testEvent(event: TestEvent): Unit = { // E.g "test.files.pos" or "test.scaladoc.run" def groupOf(e: sbt.testing.Event) = { - val group = e.fullyQualifiedName().replace('/', '.') + "." + e.selector().asInstanceOf[TestSelector].testName().takeWhile(_ != '/') + val group = e.selector match { + case sel: TestSelector => + e.fullyQualifiedName().replace('/', '.') + "." + sel.testName().takeWhile(_ != '/') + case _: SuiteSelector => + // SBT emits this in the test event when a forked test failed unexpectedly: https://github.com/sbt/sbt/blob/684e2c369269e2aded5861c06aaad6f0b6b70a30/testing/agent/src/main/java/sbt/ForkMain.java#L337-L339 + "" + } // Don't even ask. // You really want to know? Okay.. https://issues.jenkins-ci.org/browse/JENKINS-49832 group.replaceAll("""\brun\b""", "run_") } // "t1234.scala" or "t1235" - def testOf(e: sbt.testing.Event) = e.selector().asInstanceOf[TestSelector].testName().dropWhile(_ != '/').drop(1) + def testOf(e: sbt.testing.Event) = e.selector match { + case sel: TestSelector => sel.testName().dropWhile(_ != '/').drop(1) + case _ => + e.fullyQualifiedName() + } for ((group, events) <- event.detail.groupBy(groupOf(_))) { val statii = events.map(_.status()) From d8ceec44ec2da4da633064fbd6c3854793842010 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 13 Mar 2018 09:02:32 +0100 Subject: [PATCH 1285/2793] Avoid ? in filename, not allowed in Windows --- .../io/{AbstractFileSpec.scala => AbstractFileTest.scala} | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) rename test/junit/scala/reflect/io/{AbstractFileSpec.scala => AbstractFileTest.scala} (89%) diff --git a/test/junit/scala/reflect/io/AbstractFileSpec.scala b/test/junit/scala/reflect/io/AbstractFileTest.scala similarity index 89% rename from test/junit/scala/reflect/io/AbstractFileSpec.scala rename to test/junit/scala/reflect/io/AbstractFileTest.scala index 6440a5cc593e..80e9d40c4b5e 100644 --- a/test/junit/scala/reflect/io/AbstractFileSpec.scala +++ b/test/junit/scala/reflect/io/AbstractFileTest.scala @@ -9,11 +9,11 @@ import org.junit.runners.JUnit4 import scala.tools.testing.TempDir @RunWith(classOf[JUnit4]) -class AbstractFileSpec { +class AbstractFileTest { @Test def handleURLEscapedCharacters(): Unit = { val tempDir = TempDir.createTempDir().toPath - val scalaPath = tempDir.resolve("this is a file?.scala") + val scalaPath = tempDir.resolve("this is a file$.scala") Files.createFile(scalaPath) val scalaFile = scalaPath.toFile From 8e2faf87b32e378cf47366449ed56bc009342c18 Mon Sep 17 00:00:00 2001 From: Ryan Williams Date: Tue, 13 Mar 2018 23:11:51 +0000 Subject: [PATCH 1286/2793] verify that ~/.credentials is not a directory Fixes scala/bug#10775 --- build.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build.sbt b/build.sbt index df1c9c65a8d0..ce43d2c9435c 100644 --- a/build.sbt +++ b/build.sbt @@ -88,7 +88,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( }, credentials ++= { val file = Path.userHome / ".credentials" - if (file.exists) List(Credentials(file)) + if (file.exists && !file.isDirectory) List(Credentials(file)) else Nil }, // Add a "default" Ivy configuration because sbt expects the Scala distribution to have one: From d203b870a9fe8464ed8325586e92f1e3540a2dea Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 14 Mar 2018 14:19:19 +0100 Subject: [PATCH 1287/2793] Build scaladoc in quick And extract the `publishToSonatype` env var in travis for later stages. --- .travis.yml | 3 +-- project/ScriptCommands.scala | 6 ++++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index be286b6ed08c..1d8f2c0a5776 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,8 +42,7 @@ jobs: - rm -rf build/ # ensure we resolve from artifactory - buildModules - buildQuick clean publish - - set | grep "^updatedModuleVersions=" > build/env - - set | grep "^SCALA_VER=" >> build/env + - set | grep -E '^updatedModuleVersions=|^SCALA_VER=|^publishToSonatype=' > build/env - cat build/env # this builds the spec using jekyll diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 4e85d3b95525..5b60126b3521 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -60,14 +60,16 @@ object ScriptCommands { /** Set up the environment for building quick in `validate/bootstrap`. The arguments are: * - Repository URL for publishing - * - Version number to publish */ + * - Version number to publish + * Note that the artifacts produced here are consumed by scala-dist, so the docs have to be built. + */ def setupBootstrapQuick = setup("setupBootstrapQuick") { case Seq(url, ver) => Seq( baseVersion in Global := ver, baseVersionSuffix in Global := "SPLIT", resolvers in Global += "scala-pr" at url, testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) - ) ++ publishTarget(url) ++ noDocs ++ enableOptimizer + ) ++ publishTarget(url) ++ enableOptimizer } /** Set up the environment for publishing in `validate/bootstrap`. The arguments are: From 47eac6312de50556d6f945a03153e1755d4761e4 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 14 Mar 2018 15:54:36 +0100 Subject: [PATCH 1288/2793] Set sbt-pgp settings in the bootstrap script --- project/ScriptCommands.scala | 6 ++---- scripts/bootstrap_fun | 2 ++ 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 5b60126b3521..2f4438273b38 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -77,15 +77,13 @@ object ScriptCommands { * - Version number to publish * All artifacts are published to Sonatype. */ def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(url, ver) => - // Define a copy of the setting key here in case the plugin is not part of the build - val pgpPassphrase = SettingKey[Option[Array[Char]]]("pgp-passphrase", "The passphrase associated with the secret used to sign artifacts.", KeyRanks.BSetting) Seq( baseVersion in Global := ver, baseVersionSuffix in Global := "SPLIT", resolvers in Global += "scala-pr" at url, publishTo in Global := Some("sonatype-releases" at "https://oss.sonatype.org/service/local/staging/deploy/maven2"), - credentials in Global += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")), - pgpPassphrase in Global := Some(Array.empty) + credentials in Global += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", env("SONA_USER"), env("SONA_PASS")) + // pgpSigningKey and pgpPassphrase are set externally by travis / the bootstrap script, as the sbt-pgp plugin is not enabled by default ) ++ enableOptimizer } diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index 06850f45bb50..6e699a3eb8ff 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -316,6 +316,8 @@ publishSonatype() { -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ + 'set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' \ + 'set pgpPassphrase in Global := Some(Array.empty)' \ $publishSonatypeTaskCore travis_fold_end sona From 9f651ee87114f4503c5d14638c47401deb4c6a31 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 14 Mar 2018 19:01:30 +0100 Subject: [PATCH 1289/2793] sbt fun: don't call `set ...` after `setupBootstrapPublish` MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit After `cp admin/files/gpg.sbt ./project`: scala git:(2.12.x) ✗ sbt ... > setupBootstrapPublish "https://scala-ci.typesafe.com/artifactory/scala-integration" "2.12.5-foobarbaz" ... > version [info] repl-jline/*:version [info] 2.12.5-foobarbaz ... > set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue) ... > version [info] repl-jline/*:version [info] 2.12.5-bin-SNAPSHOT So using `set` after `setupBootstrapPublish` resets those settings made by `setupBootstrapPublish`. It seems to work the other way around scala git:(2.12.x) ✗ sbt ... > set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue) ... > setupBootstrapPublish "https://scala-ci.typesafe.com/artifactory/scala-integration" "2.12.5-foobarbaz" ... > pgpSigningKey [info] repl-jline/*:pgpSigningKey [info] Some(-4593968660551123713) ... > version [info] repl-jline/*:version [info] 2.12.5-foobarbaz --- scripts/bootstrap_fun | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index 6e699a3eb8ff..e18760b98ff0 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -315,9 +315,9 @@ publishSonatype() { --warn \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ - "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ 'set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' \ 'set pgpPassphrase in Global := Some(Array.empty)' \ + "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ $publishSonatypeTaskCore travis_fold_end sona From d8473d2988480e13f986209c79445cbc40cf063a Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 15 Mar 2018 10:30:49 +0100 Subject: [PATCH 1290/2793] Clear scala parts of ivy cache on travis --- .travis.yml | 11 ++++++++--- admin/init.sh | 2 +- scripts/bootstrap_fun | 7 +++++-- scripts/common | 13 ++++++++++--- scripts/jobs/integrate/bootstrap | 30 +++--------------------------- 5 files changed, 27 insertions(+), 36 deletions(-) diff --git a/.travis.yml b/.travis.yml index 1d8f2c0a5776..cc6e9217c5db 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,9 +22,13 @@ stages: - name: publish if: type != pull_request -# buildQuick needs following env (is that propagated to stages?) -# - PRIVATE_REPO_PASS, integrationRepoUrl, -# computed: SBT_CMD sbtArgs SCALA_VER updatedModuleVersions +# env available in each stage +# - by travis config (see below): secret env vars +# - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl +# - by `bootstrap_fun`: publishPrivateTask, ... +# env computed in first stage, passed on to later stages with the `build/env` file +# - by `determineScalaVersion`: SCALA_VER, publishToSonatype +# - by `buildModules` / `constructUpdatedModuleVersions`: updatedModuleVersions jobs: include: - stage: build @@ -37,6 +41,7 @@ jobs: - determineScalaVersion - deriveModuleVersions - removeExistingBuilds $integrationRepoUrl + - clearIvyCache - if [ ! -z "$STARR_REF" ]; then buildStarr; fi - buildLocker - rm -rf build/ # ensure we resolve from artifactory diff --git a/admin/init.sh b/admin/init.sh index f53cc9641fb5..48b3f4bb9a5c 100755 --- a/admin/init.sh +++ b/admin/init.sh @@ -22,4 +22,4 @@ gpg --list-secret-keys mkdir -p ~/.sbt/0.13/plugins cp files/gpg.sbt ~/.sbt/0.13/plugins/ -export SBT_CMD=$(which sbt) +export sbtCmd=$(which sbt) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index e18760b98ff0..b979024c0962 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -308,8 +308,10 @@ testStability() { # and publishes those to sonatype as well # finally, the staging repos are closed publishSonatype() { - # stage to sonatype, along with all modules -Dmaven.version.suffix/-Dbuild.release not necessary, - # since we're just publishing an existing build + # Make sure that "quick" is downloaded when building the modules + clearIvyCache + + # Stage to sonatype. No `clean`, just package and publish the `quick` build. travis_fold_start sona "Publishing core to sonatype" $SBT_CMD $sbtArgs \ --warn \ @@ -321,6 +323,7 @@ publishSonatype() { $publishSonatypeTaskCore travis_fold_end sona + # Modules are re-built using quick (the first iteration was built with locker) buildPublishedModules open=$(st_stagingReposOpen) diff --git a/scripts/common b/scripts/common index d65c954b9854..e269f728678f 100644 --- a/scripts/common +++ b/scripts/common @@ -6,6 +6,9 @@ set -e WORKSPACE="${WORKSPACE-`pwd`}" +# On Jenkins, each job needs its own ivy2 cache to avoid conflicts between jobs. On travis, it's ~/.ivy2. +IVY2_DIR="$WORKSPACE/.ivy2" + # Known problems : does not fare well with interrupted, partial # compilations. We should perhaps have a multi-dependency version # of do_i_have below @@ -16,15 +19,13 @@ mkdir -p "$LOGGINGDIR" rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" -# TODO: do we need to nuke the cache on travis? -# rm -rf $WORKSPACE/.ivy2/cache/org.scala-lang - SBT_CMD=${sbtCmd-sbt} SBT_CMD="$SBT_CMD -sbt-version 0.13.17" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} +# only used on jenkins sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" @@ -211,6 +212,12 @@ sbtResolve() { travis_fold_end resolve } +clearIvyCache() { + rm -rf $IVY2_DIR/cache/org.scala-lang + if [ -d $IVY2_DIR ]; then find $IVY2_DIR -name "*compiler-interface*$SCALA_VER*" | xargs rm -rf; fi + if [ -d $HOME/.sbt ]; then find $HOME/.sbt -name "*compiler-interface*$SCALA_VER*" | xargs rm -rf; fi +} + #### travis triggerScalaDist() { diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index 4a540b1a5934..f7aad298ef08 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -73,38 +73,20 @@ #### MAIN -# each job has its own ivy2, sharing between jobs would lead to trouble -mkdir -p $WORKSPACE/ivy2 - source scripts/common -# TODO: this is weird for historical reasons, simplify now that we have one version of sbt in use -# we probably don't need to override the sbt dir? just ivy -# -# (WAS: trying to get this to work on multiple versions of sbt-extras... -# the old version (on jenkins, and I don't want to upgrade for risk of breaking other builds) honors -sbt-dir -# the new version of sbt-extras ignores sbt-dir, so we pass it in as -Dsbt.global.base -# need to set sbt-dir to one that has the gpg.sbt plugin config) -# # scripts/common provides sbtRepositoryConfig -sbtArgs="-ivy $WORKSPACE/ivy2 -Dsbt.global.base=$HOME/.sbt/0.13 -sbt-dir $HOME/.sbt/0.13 -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" - +sbtArgs="-ivy $IVY2_DIR -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" source scripts/bootstrap_fun -# On Jenkins, we must change ivy home to get a fresh ivy cache, otherwise we get half-bootstrapped scala -# rm it in case it existed (and there's no ivy2-shadow, which indicates we're running in a TESTING environment)... -# we don't nuke the whole ws since that clobbers the git clones needlessly -[[ -d $WORKSPACE/ivy2-shadow ]] || rm -rf "$WORKSPACE/ivy2" -mkdir -p "$WORKSPACE/ivy2" - determineScalaVersion - deriveModuleVersions generateRepositoriesConfig $integrationRepoUrl removeExistingBuilds $integrationRepoUrl +clearIvyCache if [ ! -z "$STARR_REF" ]; then buildStarr @@ -119,14 +101,8 @@ buildModules buildQuick clean testAll publish -if [ "$testStability" == "yes" ] - then testStability -fi - +testStability if [ "$publishToSonatype" == "yes" ]; then - # clear ivy cache so the next round of building modules sees the fresh scala - rm -rf "$WORKSPACE/ivy2/cache/org.scala-lang" - publishSonatype fi From ccf99e7f7af93a8e12f83f7b2679f729cb3c5630 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 15 Mar 2018 13:27:50 +0100 Subject: [PATCH 1291/2793] fix IVY2_DIR, and ensure functions cd back to WORKSPACE --- scripts/bootstrap_fun | 14 +++++++++----- scripts/common | 8 +++++++- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index b979024c0962..f1a001ba1b13 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -83,6 +83,8 @@ buildModules() { buildPartest constructUpdatedModuleVersions + + cd $WORKSPACE } # build/test/publish scala core modules to sonatype (this will start a new staging repo) @@ -96,6 +98,8 @@ buildPublishedModules() { buildTasks=($publishSonatypeTaskModules) buildXML buildPartest + + cd $WORKSPACE } @@ -303,16 +307,16 @@ testStability() { travis_fold_end stab } -# assumes we just bootstrapped, and current directory is $WORKSPACE -# publishes locker to sonatype, then builds modules again (those for which version numbers were provided), -# and publishes those to sonatype as well -# finally, the staging repos are closed +# publishes quick to sonatype, then builds modules again (those for which version numbers were provided), +# and publishes those to sonatype as well. finally, the staging repos are closed. publishSonatype() { + travis_fold_start sona "Publishing core to sonatype" + cd $WORKSPACE + # Make sure that "quick" is downloaded when building the modules clearIvyCache # Stage to sonatype. No `clean`, just package and publish the `quick` build. - travis_fold_start sona "Publishing core to sonatype" $SBT_CMD $sbtArgs \ --warn \ -Dstarr.version=$SCALA_VER \ diff --git a/scripts/common b/scripts/common index e269f728678f..7816751fbb1d 100644 --- a/scripts/common +++ b/scripts/common @@ -4,10 +4,15 @@ trap "exit 1" TERM export TOP_PID=$$ set -e +# The scala/scala checkout directory (set by Jenkins, or `/home/travis/build/scala/scala` on travis) WORKSPACE="${WORKSPACE-`pwd`}" # On Jenkins, each job needs its own ivy2 cache to avoid conflicts between jobs. On travis, it's ~/.ivy2. -IVY2_DIR="$WORKSPACE/.ivy2" +if [ "$TRAVIS" = "true" ]; then + IVY2_DIR="$HOME/.ivy2" +else + IVY2_DIR="$WORKSPACE/.ivy2" +fi # Known problems : does not fare well with interrupted, partial # compilations. We should perhaps have a multi-dependency version @@ -209,6 +214,7 @@ sbtResolve() { "$addIntegrationResolver" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ 'show update' + cd $WORKSPACE travis_fold_end resolve } From 89d79ddadc4de6bc5de2f359eca91aaf38f1daf0 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 15 Mar 2018 14:30:25 +0100 Subject: [PATCH 1292/2793] Clean better. --- scripts/bootstrap_fun | 2 +- scripts/common | 10 +++++++--- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index f1a001ba1b13..73db8d2bfbbe 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -310,12 +310,12 @@ testStability() { # publishes quick to sonatype, then builds modules again (those for which version numbers were provided), # and publishes those to sonatype as well. finally, the staging repos are closed. publishSonatype() { - travis_fold_start sona "Publishing core to sonatype" cd $WORKSPACE # Make sure that "quick" is downloaded when building the modules clearIvyCache + travis_fold_start sona "Publishing core to sonatype" # Stage to sonatype. No `clean`, just package and publish the `quick` build. $SBT_CMD $sbtArgs \ --warn \ diff --git a/scripts/common b/scripts/common index 7816751fbb1d..b3c90409bae2 100644 --- a/scripts/common +++ b/scripts/common @@ -219,9 +219,13 @@ sbtResolve() { } clearIvyCache() { - rm -rf $IVY2_DIR/cache/org.scala-lang - if [ -d $IVY2_DIR ]; then find $IVY2_DIR -name "*compiler-interface*$SCALA_VER*" | xargs rm -rf; fi - if [ -d $HOME/.sbt ]; then find $HOME/.sbt -name "*compiler-interface*$SCALA_VER*" | xargs rm -rf; fi + travis_fold_start clearIvy "Clearing ivy cache" + rm -fv $IVY2_DIR/exclude_classifiers $IVY2_DIR/exclude_classifiers.lock + rm -rfv $IVY2_DIR/cache/org.scala-lang $IVY2_DIR/cache/org.scala-lang.modules + rm -rfv $IVY2_DIR/local/org.scala-lang $IVY2_DIR/local/org.scala-lang.modules + if [ -d $IVY2_DIR ]; then find $IVY2_DIR -name "*compiler-interface*$SCALA_VER*" | xargs rm -rfv; fi + if [ -d $HOME/.sbt ]; then find $HOME/.sbt -name "*compiler-interface*$SCALA_VER*" | xargs rm -rfv; fi + travis_fold_end clearIvy } #### travis From 415ba063feb59ae63a723cc2683013e0bd104b3e Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 15 Mar 2018 22:31:12 +0100 Subject: [PATCH 1293/2793] Keep sbt's log level at info on travis / jenkins Setting `--warn` hides the `downloading ... scala-compiler-2.12.4.jar` messages (which I was looking for to ensure clearing the caches has the intended effect). Compared to the current state, it doesn't add a significant amount of noise on travis. Note that the travis log has folded sections, so it's easy to look at individual parts. There are still `--warn` arguments in the pr validation and windows scripts that I didn't touch here. --- project/ScriptCommands.scala | 9 ++------- scripts/bootstrap_fun | 7 ++----- scripts/common | 2 +- 3 files changed, 5 insertions(+), 13 deletions(-) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 2f4438273b38..539db1ac015f 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -90,21 +90,16 @@ object ScriptCommands { def enableOptimizerCommand = setup("enableOptimizer")(_ => enableOptimizer) private[this] def setup(name: String)(f: Seq[String] => Seq[Setting[_]]) = Command.args(name, name) { case (state, seq) => - // `Project.extract(state).append(f(seq) ++ resetLogLevels, state)` would be simpler, but it + // `Project.extract(state).append(f(seq), state)` would be simpler, but it // takes the project's initial state and discards all changes that were made in the sbt console. val session = Project.session(state) val extracted = Project.extract(state) - val settings = f(seq) ++ resetLogLevels + val settings = f(seq) val appendSettings = Load.transformSettings(Load.projectScope(extracted.currentRef), extracted.currentRef.build, extracted.rootProject, settings) val newStructure = Load.reapply(session.mergeSettings ++ appendSettings, extracted.structure)(extracted.showKey) Project.setProject(session, newStructure, state) } - private[this] val resetLogLevels = Seq( - logLevel in ThisBuild := Level.Info, - logLevel in update in ThisBuild := Level.Warn - ) - private[this] val enableOptimizer = Seq( scalacOptions in Compile in ThisBuild ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") ) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index 73db8d2bfbbe..0d0ebbb04d4f 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -244,7 +244,7 @@ buildStarr() { cd $STARR_DIR git co $STARR_REF travis_fold_start starr "Building starr" - $SBT_CMD -no-colors $sbtArgs --warn "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish + $SBT_CMD -no-colors $sbtArgs "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish travis_fold_end starr ) } @@ -261,7 +261,7 @@ buildLocker() { if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi travis_fold_start locker "Building locker" - $SBT_CMD -no-colors $sbtArgs $SET_STARR --warn "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish + $SBT_CMD -no-colors $sbtArgs $SET_STARR "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish travis_fold_end locker } @@ -278,7 +278,6 @@ buildQuick() { travis_fold_start quick "Building bootstrapped" $SBT_CMD $sbtArgs \ - --warn \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ @@ -294,7 +293,6 @@ testStability() { mv build/quick quick1 rm -rf build/ $SBT_CMD $sbtArgs \ - --warn \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ @@ -318,7 +316,6 @@ publishSonatype() { travis_fold_start sona "Publishing core to sonatype" # Stage to sonatype. No `clean`, just package and publish the `quick` build. $SBT_CMD $sbtArgs \ - --warn \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ 'set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' \ diff --git a/scripts/common b/scripts/common index b3c90409bae2..e79197f440ae 100644 --- a/scripts/common +++ b/scripts/common @@ -213,7 +213,7 @@ sbtResolve() { $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ "$addIntegrationResolver" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ - 'show update' + "show update" cd $WORKSPACE travis_fold_end resolve } From 9b3ec8a9421c9ebc647c69ba88feb32651d4dc1a Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 16 Mar 2018 06:56:13 +0100 Subject: [PATCH 1294/2793] Some cleanups in the build. --- .travis.yml | 1 + admin/init.sh | 8 +--- scripts/bootstrap_fun | 68 +++++++++++++++++++++++++++++ scripts/common | 5 +-- scripts/jobs/integrate/bootstrap | 73 +------------------------------- 5 files changed, 73 insertions(+), 82 deletions(-) diff --git a/.travis.yml b/.travis.yml index cc6e9217c5db..2aa55853b91b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -22,6 +22,7 @@ stages: - name: publish if: type != pull_request +# see comment in `bootstrap_fun` for details on the procedure # env available in each stage # - by travis config (see below): secret env vars # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl diff --git a/admin/init.sh b/admin/init.sh index 48b3f4bb9a5c..9c3723542e9c 100755 --- a/admin/init.sh +++ b/admin/init.sh @@ -1,5 +1,4 @@ -#!/bin/bash - +#!/bin/bash -e sensitive() { perl -p -e 's/\$\{([^}]+)\}/defined $ENV{$1} ? $ENV{$1} : $&/eg' < files/credentials-private-repo > ~/.credentials-private-repo @@ -12,14 +11,9 @@ sensitive() { # don't let anything escape from the sensitive part (e.g. leak environment var by echoing to log on failure) sensitive >/dev/null 2>&1 -# pgp signing doesn't work without public key?? -gpg --keyserver pgp.mit.edu --recv-keys 0xa9052b1b6d92e560 - # just to verify gpg --list-keys gpg --list-secret-keys mkdir -p ~/.sbt/0.13/plugins cp files/gpg.sbt ~/.sbt/0.13/plugins/ - -export sbtCmd=$(which sbt) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index 0d0ebbb04d4f..9d578094df46 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -1,3 +1,71 @@ +# Bootstrap procedure +# - determine scala version +# - determine module versions +# - build minimal core (aka locker) of Scala, use the determined version number, publish to scala-integration +# - build those modules where a binary compatible version doesn't exist, publish to scala-integration +# - build Scala using the previously built core and modules, publish to scala-integration +# - run tests +# - for releases +# - stage Scala on sonatype +# - rebuild modules where no binary compatible version existed, stage them on sonatype +# - the Scala version is serialized to jenkins.properties, which is passed downstream to scala-release jobs + + +# Specifying the Scala version: +# - To build a release (enables publishing to sonatype): +# - Specify SCALA_VER_BASE and optionally SCALA_VER_SUFFIX. The version is SCALA_VER=$SCALA_VER_BASE$SCALA_VER_SUFFIX. +# - After building a release, the jenkins job provides an updated versions.properties file as artifact. +# Put this file in the Scala repo and create a pull request, also update `baseVersion in Global` in build.sbt. +# +# - Otherwise, an integration build is performed: +# - version number is read from the build.sbt, extended with -[bin|pre]-$sha + + +# Specifying module versions. We use release versions for modules. +# - Module versions are read from the versions.properties file. +# - Set _VER to override the default, e.g. XML_VER="1.0.4". +# - The git revision is set to _REF="v$_VER". Make sure the tag exists (you can't override _REF). + + +# Modules are automatically built if necessary. +# - A module is built if it doesn't exist in the maven repository. Note that the lookup uses two versions: +# - The version of the module (see below how it's determined) +# - The binary version of of the SCALA_VER release that is being built +# - sbt computes the binary version when looking up / building modules (*). Examples: +# - 2.12.0-M1, 2.12.0-RC3: the full version is used +# - 2.12.0, 2.12.1-M1, 2.12.1-RC3, 2.12.1: the binary version 2.12 is used +# +# - Example: assume that `scala-xml_2.11 % 1.0.3` and `scala-xml_2.12.0-M1 % 1.0.3` both exists +# - XML_VER=1.0.3 and SCALA_VER=2.11.7 => no rebuild (binary version remains 2.11) +# - XML_VER=1.0.3 and SCALA_VER=2.12.0-M2 => rebuild (new binary version 2.12.0-M2) +# - XML_VER=1.0.4 and SCALA_VER=2.11.7 => rebuild (new version for the module, not yet on maven) +# NOTE: this is not the recommended way of publishing a module. Instead, prefer to release `scala-xml_2.11 % 1.0.4` +# using the existing scala 2.11.6 compiler before releasing 2.11.7. Sometimes it's necessary though. One +# example was 2.11.1, which contained a fix in the backend (SerialVersionUID was ignored). All modules needed +# to be re-built using the 2.11.1 release, we could not use 2.11.0. We could also not release the modules +# after 2.11.1 was out, because that way the scala-library-all pom of 2.11.1 would depend on the old modules. +# +# (*) https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L41 + + +# Binary incompatible changes in Modules: example with Scala 2.11 / 2.12 and scala-parser-combinators +# - The 1.0.x branch on scala-parser-combinators remains binary compatible with 1.0.0 +# - Scala 2.11 will always use 1.0.x releases: we ship scala-parser-combinators with the distribution, +# so we cannot introduce incompatible changes in a minor release. +# - The master branch of scala-parser-combinators contains binary incompatible changes, versioned 1.1.x +# - Scala 2.12 will use 1.1.x releases +# - No changes to the build script required: just put the 1.1.x version number into versions.properties +# +# Note: It's still OK for a module to release a binary incompatible version to maven, for example +# scala-parser-combinators_2.11 % 1.1.0. Users can depend on this in their sbt build. But for the +# distribution (tar/zip archives, scala-library-all) we have to stay on the binary compatible version. + + +# Credentials +# - `PRIVATE_REPO_PASS` password for `scala-ci` user on scala-ci.typesafe.com/artifactory +# - `SONA_USER` / `SONA_PASS` for sonatype + + publishPrivateTask=${publishPrivateTask-"publish"} publishSonatypeTaskCore=${publishSonatypeTaskCore-"publishSigned"} publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} diff --git a/scripts/common b/scripts/common index e79197f440ae..b33cd78ad7d7 100644 --- a/scripts/common +++ b/scripts/common @@ -1,5 +1,4 @@ -# This is for forcibly stopping the job from a subshell (see test -# below). +# This is for forcibly stopping the job from a subshell (see test below). trap "exit 1" TERM export TOP_PID=$$ set -e @@ -24,7 +23,7 @@ mkdir -p "$LOGGINGDIR" rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" -SBT_CMD=${sbtCmd-sbt} +SBT_CMD=${SBT_CMD-sbt} SBT_CMD="$SBT_CMD -sbt-version 0.13.17" # repo to publish builds diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index f7aad298ef08..ef691c71c1ad 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -1,77 +1,6 @@ #!/bin/bash -e -# Script Overview -# - determine scala version -# - determine module versions -# - build minimal core (aka locker) of Scala, use the determined version number, publish to scala-integration -# - build those modules where a binary compatible version doesn't exist, publish to scala-integration -# - build Scala using the previously built core and bootstrap modules, publish to scala-integration -# - for releases -# - stage Scala on sonatype -# - rebuild modules that needed a rebuild with this Scala build, and stage them on sonatype -# - the Scala version is serialized to jenkins.properties, which is passed downstream to scala-release jobs - - -# Specifying the Scala version: -# - To build a release (this enables publishing to sonatype): -# - Specify SCALA_VER_BASE. You may also specify SCALA_VER_SUFFIX, the Scala version is SCALA_VER=$SCALA_VER_BASE$SCALA_VER_SUFFIX. -# - Note: After building a release, the jenkins job provides an updated versions.properties file as artifact. -# Put this file in the Scala repo and create a pull request, also update `baseVersion in Global` in build.sbt. -# -# - Otherwise, an integration build is performed: -# - version number is read from the build.sbt, extended with -[bin|pre]-$sha - - -# Specifying module versions. We use release versions for modules. -# - Module versions are read from the versions.properties file. -# - Set _VER to override the default, e.g. XML_VER="1.0.4". -# - The git revision is set to _REF="v$_VER". Make sure the tag exists (you can't override _REF). - - -# Modules are automatically built if necessary. -# - A module is built if it doesn't exist in the maven repository. Note that the lookup uses two versions: -# - The version of the module (see below how it's determined) -# - The binary version of of the SCALA_VER release that is being built -# - sbt computes the binary version when looking up / building modules (*). Examples: -# - 2.12.0-M1, 2.12.0-RC3: the full version is used -# - 2.12.0, 2.12.1-M1, 2.12.1-RC3, 2.12.1: the binary version 2.12 is used -# -# - Example: assume that `scala-xml_2.11 % 1.0.3` and `scala-xml_2.12.0-M1 % 1.0.3` both exists -# - XML_VER=1.0.3 and SCALA_VER=2.11.7 => no rebuild (binary version remains 2.11) -# - XML_VER=1.0.3 and SCALA_VER=2.12.0-M2 => rebuild (new binary version 2.12.0-M2) -# - XML_VER=1.0.4 and SCALA_VER=2.11.7 => rebuild (new version for the module, not yet on maven) -# NOTE: this is not the recommended way of publishing a module. Instead, prefer to release `scala-xml_2.11 % 1.0.4` -# using the existing scala 2.11.6 compiler before releasing 2.11.7. Sometimes it's necessary though. One -# example was 2.11.1, which contained a fix in the backend (SerialVersionUID was ignored). All modules needed -# to be re-built using the 2.11.1 release, we could not use 2.11.0. We could also not release the modules -# after 2.11.1 was out, because that way the scala-library-all pom of 2.11.1 would depend on the old modules. -# -# (*) https://github.com/sbt/sbt/blob/v0.13.13/util/cross/src/main/input_sources/CrossVersionUtil.scala#L41 - - -# Binary incompatible changes in Modules: example with Scala 2.11 / 2.12 and scala-parser-combinators -# - The 1.0.x branch on scala-parser-combinators remains binary compatible with 1.0.0 -# - Scala 2.11 will always use 1.0.x releases: we ship scala-parser-combinators with the distribution, -# so we cannot introduce incompatible changes in a minor release. -# - The master branch of scala-parser-combinators contains binary incompatible changes, versioned 1.1.x -# - Scala 2.12 will use 1.1.x releases -# - No changes to the build script required: just put the 1.1.x version number into versions.properties -# -# Note: It's still OK for a module to release a binary incompatible version to maven, for example -# scala-parser-combinators_2.11 % 1.1.0. Users can depend on this in their sbt build. But for the -# distribution (tar/zip archives, scala-library-all) we have to stay on the binary compatible version. - - -# Requirements -# - SBT_CMD must point to sbt from sbt-extras -# - ~/.sonatype-curl, ~/.m2/settings.xml, ~/.credentials, ~/.credentials-sonatype, ~/.credentials-private-repo -# as defined by https://github.com/scala/scala-jenkins-infra/tree/master/templates/default -# - ~/.sbt/0.13/plugins/gpg.sbt with: -# addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.1") - -# Note: private-repo used to be private-repo.typesafe.com. now we're running artifactory on scala-ci.typesafe.com/artifactory - -#### MAIN +# See comment in bootstrap_fun source scripts/common From 3634f78564fd17ccdef4712d253c09921a3735a2 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 16 Mar 2018 14:05:08 +0100 Subject: [PATCH 1295/2793] Run clean before sonatype publishSigned --- scripts/bootstrap_fun | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index 9d578094df46..e4e4b48975a0 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -383,12 +383,16 @@ publishSonatype() { travis_fold_start sona "Publishing core to sonatype" # Stage to sonatype. No `clean`, just package and publish the `quick` build. + # TODO: currently we `clean` because everything is re-compiled anyway on travis. Cleaning ensures + # that we compile from a clean state and get identical classfiles (scala-dev#428). Once we figure + # out how to prevent sbt from re-compiling (also needed for test stages), we can remove the `clean`. $SBT_CMD $sbtArgs \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ 'set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' \ 'set pgpPassphrase in Global := Some(Array.empty)' \ "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ + clean \ $publishSonatypeTaskCore travis_fold_end sona From 0d0506c3fcb257e9b6eae6d011f87dcf9a0f867c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sat, 17 Mar 2018 14:37:59 -0700 Subject: [PATCH 1296/2793] Treat JAVA_CONST names as snake for completion The previous behavior is for every uppercase letter to represent a camel hump, so "jon" and "a_o" would complete to "JAVA_CONST". This commit splits the identifier on underscore if the user is not asking for underscore, such as "_local", and if the candidate name looks like an old-style Java constant, uppercase with underscores. --- .../scala/tools/nsc/interactive/Global.scala | 8 +++++--- .../tools/nsc/interpreter/CompletionTest.scala | 13 +++++++++++++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 3ba7fe7b1e40..a65216e920fb 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1177,15 +1177,17 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") } private val CamelRegex = "([A-Z][^A-Z]*)".r - private def camelComponents(s: String): List[String] = { - CamelRegex.findAllIn("X" + s).toList match { case head :: tail => head.drop(1) :: tail; case Nil => Nil } + private def camelComponents(s: String, allowSnake: Boolean): List[String] = { + if (allowSnake && s.forall(c => c.isUpper || c == '_')) s.split('_').toList.filterNot(_.isEmpty) + else CamelRegex.findAllIn("X" + s).toList match { case head :: tail => head.drop(1) :: tail; case Nil => Nil } } def camelMatch(entered: Name): Name => Boolean = { val enteredS = entered.toString val enteredLowercaseSet = enteredS.toLowerCase().toSet + val allowSnake = !enteredS.contains('_') (candidate: Name) => { - def candidateChunks = camelComponents(candidate.toString) + def candidateChunks = camelComponents(candidate.dropLocal.toString, allowSnake) // Loosely based on IntelliJ's autocompletion: the user can just write everything in // lowercase, as we'll let `isl` match `GenIndexedSeqLike` or `isLovely`. def lenientMatch(entered: String, candidate: List[String], matchCount: Int): Boolean = { diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 83db7079caff..1eb2558880f3 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -91,6 +91,8 @@ class CompletionTest { checkExact(completer, "object O { def xxxxYyyyyZzzz = 1; def xxxxYyZeee = 1 }; import O._; xYZ")("", "xxxxYyyyyZzzz", "xxxxYyZeee") checkExact(completer, "object O { def xxxxYyyyyZzzz = 1; def xxxxYyyyyZeee = 1 }; import O._; xYZ")("xxxxYyyyyZzzz", "xxxxYyyyyZeee") checkExact(completer, "object O { class AbstractMetaFactoryFactory }; new O.AMFF")("AbstractMetaFactoryFactory") + checkExact(completer, "object O { val DECIMAL_DIGIT_NUMBER = 0 }; import O._; L_")("DECIMAL_DIGIT_NUMBER") + checkExact(completer, "object O { val _unusualIdiom = 0 }; import O._; _ui")("_unusualIdiom") } @Test @@ -99,9 +101,20 @@ class CompletionTest { val completer = new PresentationCompilerCompleter(intp) checkExact(completer, "object O { def theCatSatOnTheMat = 1 }; import O._; tcso")("theCatSatOnTheMat") checkExact(completer, "object O { def theCatSatOnTheMat = 1 }; import O._; sotm")("theCatSatOnTheMat") + checkExact(completer, "object O { def theCatSatOnTheMat = 1 }; import O._; caton")("theCatSatOnTheMat") + checkExact(completer, "object O { def theCatSatOnTheMat = 1; def catOnYoutube = 2 }; import O._; caton")("", "theCatSatOnTheMat", "catOnYoutube") checkExact(completer, "object O { def theCatSatOnTheMat = 1 }; import O._; TCSOTM")() } + @Test + def snakeCompletions(): Unit = { + val intp = newIMain() + val completer = new PresentationCompilerCompleter(intp) + checkExact(completer, "object O { final val THE_CAT_SAT_ON_THE_MAT = 1 }; import O._; TCSO")("THE_CAT_SAT_ON_THE_MAT") + checkExact(completer, "object O { final val THE_CAT_SAT_ON_THE_MAT = 1 }; import O._; tcso")("THE_CAT_SAT_ON_THE_MAT") + checkExact(completer, "object C { def isIdentifierIgnorable = ??? ; val DECIMAL_DIGIT_NUMBER = 0 }; import C._; iii")("isIdentifierIgnorable") + } + @Test def previousLineCompletions(): Unit = { val intp = newIMain() From 6b3146bb9889a72e05b5965b3bac6a2686c842c7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 18 Mar 2018 14:15:18 +1000 Subject: [PATCH 1297/2793] Avoid using STARR on the scalacheck classpath Integrating Scalacheck into our SBT build is all frying pans and fires. We disabled forking to get test failure reporting working, but didn't realise that this put STARR on the classpath of the tests. This commits switches back to forking, but only after customizing the framework to get early access to what hopefully will be part of the next scalacheck release: https://github.com/rickynils/scalacheck/pull/388 --- build.sbt | 13 +- .../scalacheck/CustomScalaCheckRunner.scala | 232 ++++++++++++++++++ test/scalacheck/sanitycheck.scala | 14 ++ 3 files changed, 256 insertions(+), 3 deletions(-) create mode 100644 test/scalacheck/org/scalacheck/CustomScalaCheckRunner.scala create mode 100644 test/scalacheck/sanitycheck.scala diff --git a/build.sbt b/build.sbt index ce43d2c9435c..6c2e78b72b1a 100644 --- a/build.sbt +++ b/build.sbt @@ -612,10 +612,17 @@ lazy val scalacheck = project.in(file("test") / "scalacheck") .settings(disableDocs) .settings(disablePublishing) .settings( - fork in Test := false, + // enable forking to workaround https://github.com/sbt/sbt/issues/4009 + fork in Test := true, + // customise framework for early acess to https://github.com/rickynils/scalacheck/pull/388 + // TODO remove this when we upgrade scalacheck + testFrameworks := Seq(TestFramework("org.scalacheck.CustomScalaCheckFramework")), javaOptions in Test += "-Xss1M", - testOptions += Tests.Cleanup { loader => - ModuleUtilities.getObject("scala.TestCleanup", loader).asInstanceOf[Runnable].run() + testOptions ++= { + if ((fork in Test).value) Nil + else List(Tests.Cleanup { loader => + ModuleUtilities.getObject("scala.TestCleanup", loader).asInstanceOf[Runnable].run() + }) }, libraryDependencies ++= Seq(scalacheckDep), unmanagedSourceDirectories in Compile := Nil, diff --git a/test/scalacheck/org/scalacheck/CustomScalaCheckRunner.scala b/test/scalacheck/org/scalacheck/CustomScalaCheckRunner.scala new file mode 100644 index 000000000000..340940d7cb01 --- /dev/null +++ b/test/scalacheck/org/scalacheck/CustomScalaCheckRunner.scala @@ -0,0 +1,232 @@ +package org.scalacheck + +import java.util.concurrent.atomic.AtomicInteger + +import org.scalacheck.Test.Parameters +import sbt.testing._ + +private abstract class CustomScalaCheckRunner extends Runner { + + val args: Array[String] + val loader: ClassLoader + val applyCmdParams: Parameters => Parameters + + val successCount = new AtomicInteger(0) + val failureCount = new AtomicInteger(0) + val errorCount = new AtomicInteger(0) + val testCount = new AtomicInteger(0) + + def deserializeTask(task: String, deserializer: String => TaskDef) = { + val taskDef = deserializer(task) + val countTestSelectors = taskDef.selectors.toSeq.count { + case _:TestSelector => true + case _ => false + } + if (countTestSelectors == 0) rootTask(taskDef) + else checkPropTask(taskDef, single = true) + } + + def serializeTask(task: Task, serializer: TaskDef => String) = + serializer(task.taskDef) + + def tasks(taskDefs: Array[TaskDef]): Array[Task] = { + val isForked = taskDefs.exists(_.fingerprint().getClass.getName.contains("ForkMain")) + taskDefs.map { taskDef => + if (isForked) checkPropTask(taskDef, single = false) + else rootTask(taskDef) + } + } + + abstract class BaseTask(override val taskDef: TaskDef) extends Task { + val tags: Array[String] = Array() + + val props: Seq[(String,Prop)] = { + val fp = taskDef.fingerprint.asInstanceOf[SubclassFingerprint] + val obj = if (fp.isModule) Platform.loadModule(taskDef.fullyQualifiedName,loader) + else Platform.newInstance(taskDef.fullyQualifiedName, loader)(Seq()) + obj match { + case props: Properties => props.properties + case prop: Prop => Seq("" -> prop) + } + } + + // TODO copypasted from props val + val properties: Option[Properties] = { + val fp = taskDef.fingerprint.asInstanceOf[SubclassFingerprint] + val obj = if (fp.isModule) Platform.loadModule(taskDef.fullyQualifiedName,loader) + else Platform.newInstance(taskDef.fullyQualifiedName, loader)(Seq()) + obj match { + case props: Properties => Some(props) + case prop: Prop => None + } + } + + def log(loggers: Array[Logger], ok: Boolean, msg: String) = + loggers foreach { l => + val logstr = + if(!l.ansiCodesSupported) msg + else s"${if (ok) Console.GREEN else Console.RED}$msg${Console.RESET}" + l.info(logstr) + } + + def execute(handler: EventHandler, loggers: Array[Logger], + continuation: Array[Task] => Unit + ): Unit = continuation(execute(handler,loggers)) + } + + def rootTask(td: TaskDef) = { + new BaseTask(td) { + def execute(handler: EventHandler, loggers: Array[Logger]): Array[Task] = { + props.map(_._1).toSet.toArray map { name => + checkPropTask(new TaskDef(td.fullyQualifiedName, td.fingerprint, + td.explicitlySpecified, Array(new TestSelector(name))) + , single = true) + } + } + } + } + + def checkPropTask(taskDef: TaskDef, single: Boolean) = new BaseTask(taskDef) { + def execute(handler: EventHandler, loggers: Array[Logger]): Array[Task] = { + val params = applyCmdParams(properties.foldLeft(Parameters.default)((params, props) => props.overrideParameters(params))) + val propertyFilter = None + + if (single) { + val names = taskDef.selectors flatMap { + case ts: TestSelector => Array(ts.testName) + case _ => Array.empty[String] + } + names foreach { name => + for ((`name`, prop) <- props) + executeInternal(prop, name, handler, loggers, propertyFilter) + } + } else { + for ((name, prop) <- props) + executeInternal(prop, name, handler, loggers, propertyFilter) + } + Array.empty[Task] + } + + def executeInternal(prop: Prop, name: String, handler: EventHandler, loggers: Array[Logger], propertyFilter: Option[scala.util.matching.Regex]): Unit = { + import util.Pretty.{Params, pretty} + val params = applyCmdParams(properties.foldLeft(Parameters.default)((params, props) => props.overrideParameters(params))) + val result = Test.check(params, prop) + + val event = new Event { + val status = result.status match { + case Test.Passed => Status.Success + case _: Test.Proved => Status.Success + case _: Test.Failed => Status.Failure + case Test.Exhausted => Status.Failure + case _: Test.PropException => Status.Error + } + val throwable = result.status match { + case Test.PropException(_, e, _) => new OptionalThrowable(e) + case _: Test.Failed => new OptionalThrowable( + new Exception(pretty(result, Params(0))) + ) + case _ => new OptionalThrowable() + } + val fullyQualifiedName = taskDef.fullyQualifiedName + val selector = new TestSelector(name) + val fingerprint = taskDef.fingerprint + val duration = -1L + } + + handler.handle(event) + + event.status match { + case Status.Success => successCount.incrementAndGet() + case Status.Error => errorCount.incrementAndGet() + case Status.Skipped => errorCount.incrementAndGet() + case Status.Failure => failureCount.incrementAndGet() + case _ => failureCount.incrementAndGet() + } + testCount.incrementAndGet() + + // TODO Stack traces should be reported through event + val verbosityOpts = Set("-verbosity", "-v") + val verbosity = + args.grouped(2).filter(twos => verbosityOpts(twos.head)) + .toSeq.headOption.map(_.last).map(_.toInt).getOrElse(0) + val s = if (result.passed) "+" else "!" + val n = if (name.isEmpty) taskDef.fullyQualifiedName else name + val logMsg = s"$s $n: ${pretty(result, Params(verbosity))}" + log(loggers, result.passed, logMsg) + } + } +} + + +final class CustomScalaCheckFramework extends Framework { + + private def mkFP(mod: Boolean, cname: String, noArgCons: Boolean = true) = + new SubclassFingerprint { + def superclassName(): String = cname + val isModule = mod + def requireNoArgConstructor(): Boolean = noArgCons + } + + val name = "ScalaCheck" + + def fingerprints: Array[Fingerprint] = Array( + mkFP(false, "org.scalacheck.Properties"), + mkFP(false, "org.scalacheck.Prop"), + mkFP(true, "org.scalacheck.Properties"), + mkFP(true, "org.scalacheck.Prop") + ) + + def runner(_args: Array[String], _remoteArgs: Array[String], + _loader: ClassLoader + ): Runner = new CustomScalaCheckRunner { + + val args = _args + val remoteArgs = _remoteArgs + val loader = _loader + val (prms,unknownArgs) = Test.cmdLineParser.parseParams(args) + val applyCmdParams = prms.andThen { + p => p.withTestCallback(new Test.TestCallback {}) + .withCustomClassLoader(Some(loader)) + } + + def receiveMessage(msg: String): Option[String] = msg(0) match { + case 'd' => + val Array(t,s,f,e) = msg.tail.split(',') + testCount.addAndGet(t.toInt) + successCount.addAndGet(s.toInt) + failureCount.addAndGet(f.toInt) + errorCount.addAndGet(e.toInt) + None + } + + def done = if (testCount.get > 0) { + val heading = if (testCount.get == successCount.get) "Passed" else "Failed" + s"$heading: Total $testCount, " + + s"Failed $failureCount, Errors $errorCount, Passed $successCount" + + (if(unknownArgs.isEmpty) "" else + s"\nWarning: Unknown ScalaCheck args provided: ${unknownArgs.mkString(" ")}") + } else "" + + } + + def slaveRunner(_args: Array[String], _remoteArgs: Array[String], + _loader: ClassLoader, send: String => Unit + ): Runner = new ScalaCheckRunner { + val args = _args + val remoteArgs = _remoteArgs + val loader = _loader + val applyCmdParams = Test.cmdLineParser.parseParams(args)._1.andThen { + p => p.withTestCallback(new Test.TestCallback {}) + .withCustomClassLoader(Some(loader)) + } + + def receiveMessage(msg: String) = None + + def done = { + send(s"d$testCount,$successCount,$failureCount,$errorCount") + "" + } + + } + +} diff --git a/test/scalacheck/sanitycheck.scala b/test/scalacheck/sanitycheck.scala new file mode 100644 index 000000000000..3b6a7a3d9f2e --- /dev/null +++ b/test/scalacheck/sanitycheck.scala @@ -0,0 +1,14 @@ +import java.io.File + +import org.scalacheck._ + +object SanityCheck extends Properties("SanityCheck") { + property("classpath correct") = { + val codeSource = classOf[Option[_]].getProtectionDomain.getCodeSource.getLocation.toURI + val path = new File(codeSource).getAbsolutePath + if (path.endsWith("quick/classes/library")) + Prop.proved + else + Prop.falsified :| s"Unexpected code source for scala library: $path" + } +} From a111acb1ab8f4a80051ad722c2e1403562f62020 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 19 Mar 2018 13:51:40 +0100 Subject: [PATCH 1298/2793] Fix return value of sbtResolve `sbtResolve` is used in an `if` test. The last statement of the function makes the return value, so the `travis_fold_end` made it univerally `true`. For reference, the `errexit` mode (`set -e`, set in `common`) is ignored in the test position of an `if` statement (otherwise the build would have been aborted), see `man bash`. --- scripts/common | 2 ++ 1 file changed, 2 insertions(+) diff --git a/scripts/common b/scripts/common index b33cd78ad7d7..f98ac97cce36 100644 --- a/scripts/common +++ b/scripts/common @@ -213,8 +213,10 @@ sbtResolve() { "$addIntegrationResolver" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ "show update" + res=$? cd $WORKSPACE travis_fold_end resolve + return $res } clearIvyCache() { From 363c377f5eb599c887185cee6f35f599632df9e5 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 19 Mar 2018 14:05:44 +0100 Subject: [PATCH 1299/2793] remove some cruft in bash scripts --- scripts/common | 47 ++--------------------------------------------- 1 file changed, 2 insertions(+), 45 deletions(-) diff --git a/scripts/common b/scripts/common index f98ac97cce36..b9e089b7a195 100644 --- a/scripts/common +++ b/scripts/common @@ -1,6 +1,3 @@ -# This is for forcibly stopping the job from a subshell (see test below). -trap "exit 1" TERM -export TOP_PID=$$ set -e # The scala/scala checkout directory (set by Jenkins, or `/home/travis/build/scala/scala` on travis) @@ -13,13 +10,7 @@ else IVY2_DIR="$WORKSPACE/.ivy2" fi -# Known problems : does not fare well with interrupted, partial -# compilations. We should perhaps have a multi-dependency version -# of do_i_have below - -LOGGINGDIR="$WORKSPACE/logs" -mkdir -p "$LOGGINGDIR" - +# used by `sbtResolve` rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" @@ -35,45 +26,11 @@ addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integratio jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} -# temp dir where all 'non-build' operation are performed +# used by `checkAvailability` TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) TMP_DIR="${TMP_ROOT_DIR}/tmp" mkdir "${TMP_DIR}" - -# detect sed version and how to enable extended regexes -SEDARGS="-n$(if (echo "a" | sed -nE "s/a/b/" &> /dev/null); then echo E; else echo r; fi)" - - - -# :docstring test: -# Usage: test -# Executes , logging the launch of the command to the -# main log file, and kills global script execution with the TERM -# signal if the commands ends up failing. -# DO NOT USE ON FUNCTIONS THAT DECLARE VARIABLES, -# AS YOU'LL BE RUNNING IN A SUBSHELL AND VARIABLE DECLARATIONS WILL BE LOST -# :end docstring: - -function test() { - echo "### $@" - "$@" - status=$? - if [ $status -ne 0 ]; then - say "### ERROR with $1" - kill -s TERM $TOP_PID - fi -} - -# :docstring say: -# Usage: say -# Prints to both console and the main log file. -# :end docstring: - -function say(){ - (echo "$@") | tee -a $LOGGINGDIR/compilation-$SCALADATE-$SCALAHASH.log -} - # General debug logging # $* - message function debug () { From 9b152fadbceca7ce6ebd86ec53abebf9527d4a15 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 21 Mar 2018 12:58:26 +1000 Subject: [PATCH 1300/2793] Restore callee line numbers in prologue of inlined code --- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 9 ++++++++ .../tools/nsc/backend/jvm/opt/Inliner.scala | 13 +++++++++++ .../nsc/backend/jvm/opt/InlinerTest.scala | 22 +++++++++++++++++-- 3 files changed, 42 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 2e82d024f6ae..5248fb6aae35 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -141,6 +141,15 @@ object BytecodeUtils { else previousExecutableInstruction(prev, stopBefore) } + @tailrec def previousLineNumber(insn: AbstractInsnNode): Option[Int] = { + val prev = insn.getPrevious + prev match { + case null => None + case line: LineNumberNode => Some(line.line) + case _ => previousLineNumber(prev) + } + } + @tailrec def nextExecutableInstruction(insn: AbstractInsnNode, alsoKeep: AbstractInsnNode => Boolean = Set()): Option[AbstractInsnNode] = { val next = insn.getNext if (next == null || isExecutable(next) || alsoKeep(next)) Option(next) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index b305fbfa3eab..30cff49a2e39 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -440,6 +440,19 @@ abstract class Inliner { // label for the exit of the inlined functions. xRETURNs are replaced by GOTOs to this label. val postCallLabel = newLabelNode clonedInstructions.add(postCallLabel) + if (sameSourceFile) { + BytecodeUtils.previousLineNumber(callsiteInstruction) match { + case Some(line) => + BytecodeUtils.nextExecutableInstruction(callsiteInstruction).flatMap(BytecodeUtils.previousLineNumber) match { + case Some(line1) => + if (line == line1) + // SD-479 code follows on the same line, restore the line number + clonedInstructions.add(new LineNumberNode(line, postCallLabel)) + case None => + } + case None => + } + } // replace xRETURNs: // - store the return value (if any) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 3688c7aada1d..d430cba1b29e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1551,11 +1551,11 @@ class InlinerTest extends BytecodeTesting { assertSameCode(is("t2"), List( Label(0), LineNumber(3, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "B", "fx", "()V", false), - Label(4), LineNumber(4, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8))) + Label(4), LineNumber(4, Label(4)), Op(ICONST_1), Label(7), LineNumber(13, Label(7)), Op(IRETURN), Label(10))) assertSameCode(is("t3"), List( Label(0), LineNumber(9, Label(0)), VarOp(ALOAD, 0), Invoke(INVOKEVIRTUAL, "C", "fx", "()V", false), - Label(4), LineNumber(10, Label(4)), Op(ICONST_1), Op(IRETURN), Label(8))) + Label(4), LineNumber(10, Label(4)), Op(ICONST_1), Label(7), LineNumber(14, Label(7)), Op(IRETURN), Label(10))) } @Test @@ -1754,4 +1754,22 @@ class InlinerTest extends BytecodeTesting { assertDoesNotInvoke(i, "f") assertInvoke(i, "T", "T$_setter_$x_$eq") } + + @Test + def sd479_same_unit_inlining_line_number(): Unit = { + val code = + """class Test { + | @inline final def foo(b: Boolean): String = { + | "foo" + | } + | + | def bar(a: AnyRef, b: Boolean): AnyRef = { + | foo(b); a.toString // line 7 + | } + |} + """.stripMargin + val List(t) = compileClasses(code) + val i = getMethod(t, "bar") + assertSameCode(i.instructions, List(Label(0), LineNumber(7, Label(0)), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false), Op(ARETURN), Label(5))) + } } From 821305835b060d58d45a41adf24625c7d4a8099e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 20 Mar 2018 15:52:33 +1000 Subject: [PATCH 1301/2793] Don't drop line number nodes in dead frames --- .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 1 + .../backend/jvm/opt/UnreachableCodeTest.scala | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 6bf6f48c13ce..3d0da4edd1e3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -524,6 +524,7 @@ abstract class LocalOpt { case i: IincInsnNode if isLive => maxLocals = math.max(maxLocals, i.`var` + 1) + case _: LineNumberNode => case _ => if (!isLive || insn.getOpcode == NOP) { // Instruction iterators allow removing during iteration. diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index 2a8753a65a64..bb7aac2876ae 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.tools.asm.Opcodes._ +import scala.tools.asm.tree.ClassNode import scala.tools.partest.ASMConverters._ import scala.tools.testing.AssertUtil._ import scala.tools.testing.BytecodeTesting._ @@ -245,4 +246,21 @@ class UnreachableCodeTest extends ClearAfterClass { assertSameSummary(getMethod(cDCE, "t3"), List(ALOAD, NEW, DUP, LDC, "", ATHROW)) assertSameSummary(getMethod(cDCE, "t4"), List(ALOAD, ALOAD, "nt", ATHROW)) } + + @Test + def patmatDefaultLineNumber(): Unit = { + val code = + """class Test { + | def test = (this: AnyRef) match { + | case _: String => + | "line4" // the synthetic `throw new MatchError` used to be positioned, here, despite the fact that patmat positions it at line 3. + | } + |} + |""".stripMargin + val test: ClassNode = dceCompiler.compileClass(code) + val i = getAsmMethod(test, "test") + val instr = findInstrs(i, "NEW scala/MatchError").head + val lineNumber = BytecodeUtils.previousLineNumber(instr) + assertEquals(Some(2), lineNumber) + } } From 77d866620b3f2e93ff9dae1ad2339f7788117b28 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 21 Mar 2018 21:07:51 +1000 Subject: [PATCH 1302/2793] Windows friendliness for classpath construction --- src/partest-extras/scala/tools/partest/BytecodeTest.scala | 2 +- src/partest-extras/scala/tools/partest/ReplTest.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index 2056f9d8be65..93ac14a98ed6 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -133,7 +133,7 @@ abstract class BytecodeTest { // logic inspired by scala.tools.util.PathResolver implementation // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath val factory = new ClassPathFactory(new Settings()) - val containers = factory.classesInExpandedPath(sys.props("partest.output") + ":" + Defaults.javaUserClassPath) + val containers = factory.classesInExpandedPath(sys.props("partest.output") + java.io.File.pathSeparator + Defaults.javaUserClassPath) new AggregateClassPath(containers) } } diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala index 1538dba394fe..d039f2ec6ab3 100644 --- a/src/partest-extras/scala/tools/partest/ReplTest.scala +++ b/src/partest-extras/scala/tools/partest/ReplTest.scala @@ -22,7 +22,7 @@ abstract class ReplTest extends DirectTest { if (getClass.getClassLoader.getParent != null) { s.classpath.value = s.classpath.value match { case "" => testOutput.toString - case s => s + ":" + testOutput.toString + case s => s + java.io.File.pathSeparator + testOutput.toString } s.usejavacp.value = true } From 9051019cfdc531638b10e24088480c9a12317be3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 21 Mar 2018 18:09:17 +1000 Subject: [PATCH 1303/2793] Allow compilation of files in empty package to a -d MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes a regression in 2.12.5 Before: ``` ⚡ mkdir /tmp/foo; ln -s /tmp/foo /tmp/foo-symlink ⚡ qscalac -d /tmp/foo-symlink $(f "package p1; class C") ⚡ qscalac -d /tmp/foo-symlink $(f "class C") error: error writing C: Can't create directory /tmp/foo-symlink; there is an existing (non-directory) file in its path one error found ``` After: ``` ⚡ qscalac -d /tmp/foo-symlink $(f "package p1; class C") ⚡ qscalac -d /tmp/foo-symlink $(f "class C") ⚡ touch /tmp/exists ``` And after, error cases: ``` ⚡ qscalac -d /tmp/exists $(f "class C") scalac error: /tmp/exists does not exist or is not a directory scalac -help gives more information ⚡ rm /tmp/exists ⚡ mkdir -p /tmp/out1/p1 ⚡ qscalac -d /tmp/out1 $(f "class C") ⚡ qscalac -d /tmp/out1 $(f "package p2; class C") ⚡ qscalac -d /tmp/out1 $(f "package p1; class C") ⚡ touch /tmp/out1/p3 ⚡ qscalac -d /tmp/out1 $(f "package p3; class C") error: error writing p3/C: Can't create directory /tmp/out1/p3; there is an existing (non-directory) file in its path one error found ⚡ mkdir -p /tmp/out; echo "" > /tmp/a-file; ln -s /tmp/a-file /tmp/out/p1; qscalac -d /tmp/out $(f "package p1; class C") error: error writing p1/C: Can't create directory /tmp/out/p1; there is an existing (non-directory) file in its path one error found ``` --- .../scala/tools/nsc/backend/jvm/ClassfileWriters.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 840a71311ff5..4d9b478c7dc4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -143,7 +143,10 @@ abstract class ClassfileWriters { try Files.createDirectories(parent, noAttributes: _*) catch { case e: FileAlreadyExistsException => - throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) + // `createDirectories` reports this exception if `parent` is an existing symlink to a directory + // but that's fine for us (and common enough, `scalac -d /tmp` on mac targets symlink). + if (!Files.isDirectory(parent)) + throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) } builtPaths.put(baseDir, TRUE) var current = parent From 0a88b4cfa5127974a2bc506e43e52316546b132f Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 21 Mar 2018 21:26:26 +0100 Subject: [PATCH 1304/2793] Update starr to 2.12.5 --- build.sbt | 2 +- versions.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 821771b0cb19..2ab4ef3f6b40 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.5" +baseVersion in Global := "2.12.6" baseVersionSuffix in Global := "SNAPSHOT" mimaReferenceVersion in Global := Some("2.12.0") diff --git a/versions.properties b/versions.properties index a12b041e9ff9..ff096b3da227 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.12.4 +starr.version=2.12.5 # The scala.binary.version determines how modules are resolved. It is set as follows: # - After 2.x.0 is released, the binary version is 2.x From d5a2defc2094cd7b176c1eab448a4ce61a56debb Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 21 Mar 2018 15:45:14 +1000 Subject: [PATCH 1305/2793] Add a test showing pattern matcher positions status quo --- test/files/run/sd187.check | 100 +++++++++++++++++++++++++++++++++++++ test/files/run/sd187.scala | 42 ++++++++++++++++ 2 files changed, 142 insertions(+) create mode 100644 test/files/run/sd187.check create mode 100644 test/files/run/sd187.scala diff --git a/test/files/run/sd187.check b/test/files/run/sd187.check new file mode 100644 index 000000000000..626c92749a32 --- /dev/null +++ b/test/files/run/sd187.check @@ -0,0 +1,100 @@ +[[syntax trees at end of patmat]] // newSource1.scala +[7]package [7] { + [7]class C extends [9][2302]scala.AnyRef { + [2302]def (): [9]C = [2302]{ + [2302][2302][2302]C.super.(); + [9]() + }; + [107]def commonSubPattern([124]x: [127]): [107]AnyVal = [205]{ + [412] var rc6: [412]Boolean = [412]false; + [412] var x3: [412]String = [412][412][412]null.asInstanceOf[[412]String]; + [205]{ + [205]case val x1: [205]Any = [205]x; + [205]case8(){ + [313]if ([313][313]x1.isInstanceOf[[313]Option[_]]) + [325][325]matchEnd7([325]()) + else + [313][313]case9() + }; + [205]case9(){ + [412]if ([412][412]x1.isInstanceOf[[412]String]) + [412]{ + [412][412]rc6 = [412]true; + [412][412]x3 = [412]([412][412]x1.asInstanceOf[[412]String]: [412]String); + [412]if ([427][427]x3.==([430]"4")) + [512][512]matchEnd7([512][512]x3.hashCode()) + else + [412][412]case10() + } + else + [412][412]case10() + }; + [205]case10(){ + [205]if ([205][205]rc6.&&([627][627]x3.==([630]"6"))) + [712][712]matchEnd7([712][712]x3.hashCode()) + else + [205][205]case11() + }; + [205]case11(){ + [205][205]matchEnd7([205]throw [205][205][205]new [205]MatchError([205]x1)) + }; + [205]matchEnd7(x: [NoPosition]AnyVal){ + [205]x + } + } + }; + [1007]def extractor([1017]x: [1020]): [1007]Any = [1027]{ + [1027]case val x1: [1027]Any = [1027]x; + [1027]case6(){ + [1120]if ([1120][1120]x1.isInstanceOf[[1120]Product2[T1,T2]]) + [1120]{ + [1120] val x2: [1120]Product2[T1,T2] = [1120]([1120][1120]x1.asInstanceOf[[1120]Product2[T1,T2]]: [1120]Product2[T1,T2]); + [1112]{ + [1112] val o8: [1112]Option[Product2[T1,T2]] = [1112][1112][1112]scala.Product2.unapply[[1112]T1, [1112]T2]([1112]x2); + [1112]if ([1112]o8.isEmpty.unary_!) + [1112]{ + [1121]val a: [1121]Any = [1121]o8.get._1; + [1210][1210]matchEnd5([1210]a) + } + else + [1112][1112]case7() + } + } + else + [1120][1120]case7() + }; + [1027]case7(){ + [1027][1027]matchEnd5([1027]throw [1027][1027][1027]new [1027]MatchError([1027]x1)) + }; + [1027]matchEnd5(x: [NoPosition]Any){ + [1027]x + } + }; + [1407]def swatch: [1407]String = [1505]try { + [1607][1607][1607]C.this.toString() + } catch { + [1505]case [1505](ex6 @ [1505]_) => [1505]{ + [1505] val x4: [1505]Throwable = [1505]ex6; + [1505]case9(){ + [1812]if ([1812][1812]x4.ne([1812]null)) + [1812]{ + [1812] val x5: [1812]Throwable = [1812]x4; + [1812]if ([1915][1915][1912]"".isEmpty()) + [2014][2014]matchEnd8([2014][2014]x5.toString()) + else + [1812][1812]case10() + } + else + [1812][1812]case10() + }; + [1505]case10(){ + [1505][1505]matchEnd8([1505]throw [1505]ex6) + }; + [1505]matchEnd8(x: [NoPosition]String){ + [1505]x + } + } + } + } +} + diff --git a/test/files/run/sd187.scala b/test/files/run/sd187.scala new file mode 100644 index 000000000000..91d4d56cde74 --- /dev/null +++ b/test/files/run/sd187.scala @@ -0,0 +1,42 @@ +import scala.tools.partest._ +import java.io.{Console => _, _} + +object Test extends DirectTest { + + override def extraSettings: String = "-usejavacp -Xprint-pos -Xprint:patmat -Ystop-after:patmat -d " + testOutput.path + + override def code = + """ + |class C { // + | def commonSubPattern(x: Any) = { // + | x match { // + | case _: Option[_] => // + | case s: String if s == "4" => // + | s.hashCode // + | case s: String if s == "6" => // + | s.hashCode // + | } // + | } // + | def extractor(x: Any) = x match { // + | case Product2(a, b) => // + | a // + | } // + | def swatch = { // + | try { // + | toString // + | } catch { // + | case t: Throwable // + | if "".isEmpty => // + | t.toString // + | } // + | } // + |} + |""".stripMargin + + + override def show(): Unit = { + Console.withErr(System.out) { + compile() + } + } +} \ No newline at end of file From 2a4af68941759074ba678e44503247041cd0ae54 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 Nov 2016 16:52:57 +1000 Subject: [PATCH 1306/2793] Improve positioning of translated patterns - Definitions of temp vars supporting CSE are positioned at the position of the `match` (was at the first pattern that referred to them) - References to these vars are positioned at the pattern that references them (was the position of the `match`) Some of these problems have been worked around in the IntelliJ debugger, as discussed in scala/scala-dev#187. Hopefully fixing the problems here at the source obviates those fixes (but doesn't clash with them.) Fixes scala/scala-dev#187 --- .../transform/patmat/MatchOptimization.scala | 17 +++++++++-------- .../nsc/transform/patmat/MatchTranslation.scala | 9 +++++---- .../nsc/transform/patmat/MatchTreeMaking.scala | 11 ++++++----- .../nsc/transform/patmat/PatternMatching.scala | 15 ++++++++------- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 2 ++ test/files/run/sd187.check | 10 +++++----- 7 files changed, 36 insertions(+), 30 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index 8a546dcaa94f..de41991c90ab 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -31,7 +31,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { * the variable is floated up so that its scope includes all of the program that shares it * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree) */ - def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { + def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position): List[List[TreeMaker]] = { debug.patmat("before CSE:") showTreeMakers(cases) @@ -112,7 +112,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { if (sharedPrefix.isEmpty) None else { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%) for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match { - case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM) + case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM, selectorPos) case _ => } @@ -139,13 +139,14 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } object ReusedCondTreeMaker { - def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos) + def apply(orig: CondTreeMaker, selectorPos: Position) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, selectorPos, orig.pos) } - class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, val pos: Position) extends TreeMaker { + class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, selectorPos: Position, val pos: Position) extends TreeMaker { lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) - lazy val storedCond = freshSym(pos, BooleanTpe, "rc") setFlag MUTABLE + lazy val storedCond = freshSym(selectorPos, BooleanTpe, "rc") setFlag MUTABLE lazy val treesToHoist: List[Tree] = { nextBinder setFlag MUTABLE + nextBinder.setPos(selectorPos) List(storedCond, nextBinder) map (b => ValDef(b, codegen.mkZero(b.info))) } @@ -190,7 +191,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { def chainBefore(next: Tree)(casegen: Casegen): Tree = { // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, // and in its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S) - casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate) + atPos(pos)(casegen.ifThenElseZero(REF(lastReusedTreeMaker.storedCond), substitution(next).duplicate)) } override def toString = "R"+((lastReusedTreeMaker.storedCond.name, substitution)) } @@ -584,9 +585,9 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { trait MatchOptimizer extends OptimizedCodegen with SwitchEmission with CommonSubconditionElimination { - override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = { + override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position): (List[List[TreeMaker]], List[Tree]) = { // TODO: do CSE on result of doDCE(prevBinder, cases, pt) - val optCases = doCSE(prevBinder, cases, pt) + val optCases = doCSE(prevBinder, cases, pt, selectorPos) val toHoist = ( for (treeMakers <- optCases) yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist} diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 7a84f14942f8..c8e27c2640ee 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -28,6 +28,7 @@ trait MatchTranslation { trait MatchTranslator extends TreeMakers with TreeMakerWarnings { import typer.context + def selectorPos: Position /** A conservative approximation of which patterns do not discern anything. * They are discarded during the translation. @@ -224,7 +225,7 @@ trait MatchTranslation { val selectorSym = freshSym(selector.pos, pureType(selectorTp)) setFlag treeInfo.SYNTH_CASE_FLAGS // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental - val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, matchOwner, defaultOverride) + val combined = combineCases(selector, selectorSym, nonSyntheticCases map translateCase(selectorSym, pt), pt, selectorPos, matchOwner, defaultOverride) if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.patmatNanos, start) combined @@ -246,7 +247,7 @@ trait MatchTranslation { val bindersAndCases = caseDefs.map(_.duplicate) map { caseDef => // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) - val caseScrutSym = freshSym(pos, pureType(ThrowableTpe)) + val caseScrutSym = freshSym(caseDef.pat.pos, pureType(ThrowableTpe)) (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution)) } @@ -256,7 +257,7 @@ trait MatchTranslation { } val catches = if (swatches.nonEmpty) swatches else { - val scrutSym = freshSym(pos, pureType(ThrowableTpe)) + val scrutSym = freshSym(caseDefs.head.pat.pos, pureType(ThrowableTpe)) val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))} val exSym = freshSym(pos, pureType(ThrowableTpe), "ex") @@ -266,7 +267,7 @@ trait MatchTranslation { CaseDef( Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping? EmptyTree, - combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(REF(exSym)))) + combineCasesNoSubstOnly(REF(exSym), scrutSym, casesNoSubstOnly, pt, selectorPos, matchOwner, Some(scrut => Throw(REF(exSym)))) ) }) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index eff6b859b748..9381c8a375ac 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -31,7 +31,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { // the making of the trees /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait TreeMakers extends TypedSubstitution with CodegenCore { - def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) + def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position): (List[List[TreeMaker]], List[Tree]) def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree], unchecked: Boolean): Option[Tree] = @@ -546,14 +546,15 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { } // calls propagateSubstitution on the treemakers - def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = { + def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = { // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) - combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride) + combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, selectorPos, owner, matchFailGenOverride) } // pt is the fully defined type of the cases (either pt or the lub of the types of the cases) - def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = + def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, + selectorPos: Position, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = fixerUpper(owner, scrut.pos) { def matchFailGen = matchFailGenOverride orElse Some(Throw(MatchErrorClass.tpe, _: Tree)) @@ -609,7 +610,7 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { analyzeCases(scrutSym, casesNoSubstOnly, pt, suppression) - val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt) + val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt, selectorPos) val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 2b775113a1f8..3e4fe35395ee 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -59,7 +59,7 @@ trait PatternMatching extends Transform case Match(sel, cases) => val origTp = tree.tpe // setType origTp intended for CPS -- TODO: is it necessary? - val translated = translator.translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]])) + val translated = translator(sel.pos).translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]])) try { localTyper.typed(translated) setType origTp } catch { @@ -69,24 +69,25 @@ trait PatternMatching extends Transform translated } case Try(block, catches, finalizer) => - treeCopy.Try(tree, transform(block), translator.translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer)) + val selectorPos = catches.headOption.getOrElse(EmptyTree).orElse(finalizer).pos.focusEnd + treeCopy.Try(tree, transform(block), translator(selectorPos).translateTry(transformTrees(catches).asInstanceOf[List[CaseDef]], tree.tpe, tree.pos), transform(finalizer)) case _ => super.transform(tree) } // TODO: only instantiate new match translator when localTyper has changed // override def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A // as this is the only time TypingTransformer changes it - def translator: MatchTranslator with CodegenCore = { - new OptimizingMatchTranslator(localTyper) + def translator(selectorPos: Position): MatchTranslator with CodegenCore = { + new OptimizingMatchTranslator(localTyper, selectorPos) } } - class PureMatchTranslator(val typer: analyzer.Typer, val matchStrategy: Tree) extends MatchTranslator with PureCodegen { - def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type) = (cases, Nil) + class PureMatchTranslator(val typer: analyzer.Typer, val matchStrategy: Tree, val selectorPos: Position) extends MatchTranslator with PureCodegen { + def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, selectorPos: Position) = (cases, Nil) def analyzeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type, suppression: Suppression): Unit = {} } - class OptimizingMatchTranslator(val typer: analyzer.Typer) extends MatchTranslator + class OptimizingMatchTranslator(val typer: analyzer.Typer, val selectorPos: Position) extends MatchTranslator with MatchOptimizer with MatchAnalyzer with Solver diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ee128b2fc3fb..c8404236b57a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2584,7 +2584,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match)), reportAmbiguousErrors = false) orElse (_ => null) if (matchStrategy ne null) // virtualize - typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy)).translateMatch(match_), mode, pt) + typed((new PureMatchTranslator(this.asInstanceOf[patmat.global.analyzer.Typer] /*TODO*/, matchStrategy, match_.selector.pos.focusEnd)).translateMatch(match_), mode, pt) else match_ // will be translated in phase `patmat` } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 7b78fca09b58..288478a9b152 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -545,6 +545,8 @@ trait Trees extends api.Trees { object Select extends SelectExtractor case class Ident(name: Name) extends RefTree with IdentApi { + if (name.string_==("rc6")) + "".reverse def qualifier: Tree = EmptyTree def isBackquoted = this.hasAttachment[BackquotedIdentifierAttachment.type] } diff --git a/test/files/run/sd187.check b/test/files/run/sd187.check index 626c92749a32..f88fbc292333 100644 --- a/test/files/run/sd187.check +++ b/test/files/run/sd187.check @@ -6,8 +6,8 @@ [9]() }; [107]def commonSubPattern([124]x: [127]): [107]AnyVal = [205]{ - [412] var rc6: [412]Boolean = [412]false; - [412] var x3: [412]String = [412][412][412]null.asInstanceOf[[412]String]; + [205] var rc6: [205]Boolean = [205]false; + [205] var x3: [205]String = [205][205][205]null.asInstanceOf[[205]String]; [205]{ [205]case val x1: [205]Any = [205]x; [205]case8(){ @@ -30,10 +30,10 @@ [412][412]case10() }; [205]case10(){ - [205]if ([205][205]rc6.&&([627][627]x3.==([630]"6"))) + [612]if ([612][612]rc6.&&([627][627]x3.==([630]"6"))) [712][712]matchEnd7([712][712]x3.hashCode()) else - [205][205]case11() + [612][612]case11() }; [205]case11(){ [205][205]matchEnd7([205]throw [205][205][205]new [205]MatchError([205]x1)) @@ -74,7 +74,7 @@ [1607][1607][1607]C.this.toString() } catch { [1505]case [1505](ex6 @ [1505]_) => [1505]{ - [1505] val x4: [1505]Throwable = [1505]ex6; + [1812] val x4: [1812]Throwable = [1812]ex6; [1505]case9(){ [1812]if ([1812][1812]x4.ne([1812]null)) [1812]{ From e60b5b01bb80e8672309fb2b1915a8251ad4da5d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 22 Mar 2018 18:16:31 +1000 Subject: [PATCH 1307/2793] Fix problem in Jenkins bootstrap script after recent changes to scripts --- scripts/jobs/integrate/bootstrap | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index ef691c71c1ad..ffd25721aca3 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -9,11 +9,11 @@ sbtArgs="-ivy $IVY2_DIR -Dsbt.override.build.repos=true -Dsbt.repository.config= source scripts/bootstrap_fun +generateRepositoriesConfig $integrationRepoUrl + determineScalaVersion deriveModuleVersions -generateRepositoriesConfig $integrationRepoUrl - removeExistingBuilds $integrationRepoUrl clearIvyCache From 7e51489e2c1b8ff6dd0a4ebb0710e80d1f559aa9 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 22 Mar 2018 14:04:45 +0100 Subject: [PATCH 1308/2793] persist more env across travis stages --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 2aa55853b91b..0125bab34e95 100644 --- a/.travis.yml +++ b/.travis.yml @@ -48,7 +48,7 @@ jobs: - rm -rf build/ # ensure we resolve from artifactory - buildModules - buildQuick clean publish - - set | grep -E '^updatedModuleVersions=|^SCALA_VER=|^publishToSonatype=' > build/env + - set | grep -E '^SCALA_VER=|^SCALA_BINARY_VER=|^scalaVersionTasks=|^XML_VER=|^PARTEST_VER=|^SCALACHECK_VER=|^XML_BUILT=|^PARTEST_BUILT=|^SCALACHECK_REF=|^updatedModuleVersions=|^publishToSonatype=' > build/env - cat build/env # this builds the spec using jekyll From b43b93c137c053f467402dc91528f665ff9d007a Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 22 Mar 2018 15:14:30 +0100 Subject: [PATCH 1309/2793] Select pgp signing key when publishing modules --- scripts/bootstrap_fun | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index e4e4b48975a0..cfa72f465400 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -162,7 +162,7 @@ buildModules() { buildPublishedModules() { echo "### Publishing modules to sonatype" - publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' "set pgpPassphrase := Some(Array.empty)") + publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' 'set pgpSigningKey := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' "set pgpPassphrase := Some(Array.empty)") buildTasks=($publishSonatypeTaskModules) buildXML buildPartest From 43181945fbf227bb39aa16ff815588c7ef5a8c15 Mon Sep 17 00:00:00 2001 From: Robert Stoll Date: Thu, 22 Mar 2018 21:10:38 +0100 Subject: [PATCH 1310/2793] fix spelling, which has a parameter type --- spec/03-types.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/03-types.md b/spec/03-types.md index 94b791663464..acb83c8f9f65 100644 --- a/spec/03-types.md +++ b/spec/03-types.md @@ -307,7 +307,7 @@ equivalent to `AnyRef` $\\{ R \\}$. ###### Example -The following example shows how to declare and use a method which +The following example shows how to declare and use a method which has a parameter type that contains a refinement with structural declarations. ```scala From 957780fa254023b99ace03120888fa3d74f15b85 Mon Sep 17 00:00:00 2001 From: jvican Date: Tue, 20 Mar 2018 15:14:59 +0100 Subject: [PATCH 1311/2793] Don't compute `locations` in macroclassloader cache Fixes scala/scala-dev#480. It reports whenever `AbstractFile.getUrl` returns `null` if verbose is enabled. --- .../scala/tools/nsc/typechecker/Macros.scala | 33 ++++++++++++------- 1 file changed, 22 insertions(+), 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index e5dceb0a477b..faadf07235eb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -69,19 +69,30 @@ trait Macros extends MacroRuntimes with Traces with Helpers { ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) } - import scala.tools.nsc.io.Jar - import scala.reflect.io.{AbstractFile, Path} - val locations = classpath.map(u => Path(AbstractFile.getURL(u).file)) val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name - if (disableCache || locations.exists(!Jar.isJarOrZip(_))) { - if (disableCache) macroLogVerbose("macro classloader: caching is disabled by the user.") - else { - val offenders = locations.filterNot(!Jar.isJarOrZip(_)) - macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${offenders.mkString(",")}.") + if (disableCache) newLoader() + else { + import scala.tools.nsc.io.Jar + import scala.reflect.io.{AbstractFile, Path} + + val urlsAndFiles = classpath.map(u => u -> AbstractFile.getURL(u)) + val hasNullURL = urlsAndFiles.filter(_._2 eq null) + if (hasNullURL.nonEmpty) { + // TODO if the only null is jrt:// we can still cache + // TODO filter out classpath elements pointing to non-existing files before we get here, that's another source of null + macroLogVerbose(s"macro classloader: caching is disabled because `AbstractFile.getURL` returned `null` for ${hasNullURL.map(_._1).mkString(", ")}.") + newLoader() + } else { + val locations = urlsAndFiles.map(t => Path(t._2.file)) + val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) + if (nonJarZips.nonEmpty) { + macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") + newLoader() + } else { + macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) + } } - - newLoader() - } else macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) + } } /** `MacroImplBinding` and its companion module are responsible for From 109f03e56c37c215b6d910d52e491f209658cc3a Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 5 Mar 2018 23:16:45 -0800 Subject: [PATCH 1312/2793] No warn-unused:params for unimplemented method Cut some slack for `def f(i: Int) = ???`. --- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- test/files/neg/warn-unused-params.scala | 4 ++++ test/files/neg/warn-unused-privates.check | 8 ++++---- test/files/neg/warn-unused-privates.scala | 4 ++-- 4 files changed, 11 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index da3883d10c66..503ead997ce2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -513,7 +513,7 @@ trait TypeDiagnostics { if (sym.isPrimaryConstructor) for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa else if (sym.isSynthetic && sym.isImplicit) return - else if (!sym.isConstructor) + else if (!sym.isConstructor && rhs.symbol != Predef_???) for (vs <- vparamss) params ++= vs.map(_.symbol) defnTrees += m case _ => diff --git a/test/files/neg/warn-unused-params.scala b/test/files/neg/warn-unused-params.scala index b166e8fae699..559e6352434d 100644 --- a/test/files/neg/warn-unused-params.scala +++ b/test/files/neg/warn-unused-params.scala @@ -67,3 +67,7 @@ class Reusing(u: Int) extends Unusing(u) // no warn class Main { def main(args: Array[String]): Unit = println("hello, args") // no warn } + +trait Unimplementation { + def f(u: Int): Int = ??? // no warn for param in unimplementation +} diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index 8ed83c76d37f..cdb5f21b48d2 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -53,13 +53,13 @@ warn-unused-privates.scala:113: warning: local object HiObject in method l1 is n object HiObject { def f = this } // warn ^ warn-unused-privates.scala:136: warning: private method x_= in class OtherNames is never used - private def x_=(i: Int): Unit = ??? + private def x_=(i: Int): Unit = () ^ warn-unused-privates.scala:137: warning: private method x in class OtherNames is never used private def x: Int = 42 ^ warn-unused-privates.scala:138: warning: private method y_= in class OtherNames is never used - private def y_=(i: Int): Unit = ??? + private def y_=(i: Int): Unit = () ^ warn-unused-privates.scala:97: warning: local var x in method f2 is never updated: consider using immutable val var x = 100 // warn about it being a var @@ -110,10 +110,10 @@ warn-unused-privates.scala:20: warning: parameter value msg0 in class B3 is neve class B3(msg0: String) extends A("msg") ^ warn-unused-privates.scala:136: warning: parameter value i in method x_= is never used - private def x_=(i: Int): Unit = ??? + private def x_=(i: Int): Unit = () ^ warn-unused-privates.scala:138: warning: parameter value i in method y_= is never used - private def y_=(i: Int): Unit = ??? + private def y_=(i: Int): Unit = () ^ error: No warnings can be incurred under -Xfatal-warnings. 39 warnings found diff --git a/test/files/neg/warn-unused-privates.scala b/test/files/neg/warn-unused-privates.scala index 280d6b15a2a2..a061279df2f6 100644 --- a/test/files/neg/warn-unused-privates.scala +++ b/test/files/neg/warn-unused-privates.scala @@ -133,9 +133,9 @@ trait Underwarn { } class OtherNames { - private def x_=(i: Int): Unit = ??? + private def x_=(i: Int): Unit = () private def x: Int = 42 - private def y_=(i: Int): Unit = ??? + private def y_=(i: Int): Unit = () private def y: Int = 42 def f = y From 1069f2483385f6308101b6b6c3dbf0ed4c20ef39 Mon Sep 17 00:00:00 2001 From: David Gregory Date: Mon, 26 Mar 2018 16:44:51 +0100 Subject: [PATCH 1313/2793] Don't suggest using the global EC when an implicit EC cannot be found. Fixes scala/bug#10808. --- src/library/scala/concurrent/ExecutionContext.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index 5075f6466ae3..a4db70885311 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -54,8 +54,7 @@ import scala.annotation.implicitNotFound * Application callback execution can be configured separately. */ @implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass -an (implicit ec: ExecutionContext) parameter to your method -or import scala.concurrent.ExecutionContext.Implicits.global.""") +an (implicit ec: ExecutionContext) parameter to your method.""") trait ExecutionContext { /** Runs a block of code on this execution context. From 54a706a46520d97cdc6f8bf6147733b24922a2c9 Mon Sep 17 00:00:00 2001 From: David Gregory Date: Mon, 26 Mar 2018 23:09:29 +0100 Subject: [PATCH 1314/2793] Update wording according to discussion on the issue report. --- src/library/scala/concurrent/ExecutionContext.scala | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index a4db70885311..5cc9aaf96d08 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -54,7 +54,17 @@ import scala.annotation.implicitNotFound * Application callback execution can be configured separately. */ @implicitNotFound("""Cannot find an implicit ExecutionContext. You might pass -an (implicit ec: ExecutionContext) parameter to your method.""") +an (implicit ec: ExecutionContext) parameter to your method. + +The ExecutionContext is used to configure how and on which +thread pools Futures will run, so the specific ExecutionContext +that is selected is important. + +If your application does not define an ExecutionContext elsewhere, +consider using Scala's global ExecutionContext by defining +the following: + +implicit val ec = ExecutionContext.global""") trait ExecutionContext { /** Runs a block of code on this execution context. From 4053968c5ac07b805492aa6896ca0eff70bc5341 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Thu, 22 Mar 2018 17:02:54 +0100 Subject: [PATCH 1315/2793] [nomerge] Use temporary repository for bootstrapping Also in the 2.13.x-new-collections branch, therefore [nomerge]. --- project/ScriptCommands.scala | 39 ++++++++++++++++++++++++-------- scripts/bootstrap_fun | 17 ++++++++++---- scripts/common | 14 +++++++++--- scripts/jobs/integrate/bootstrap | 6 +++-- 4 files changed, 56 insertions(+), 20 deletions(-) diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index 539db1ac015f..e92275bb69e2 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -1,7 +1,10 @@ package scala.build +import java.nio.file.Paths + import sbt._ import Keys._ + import BuildSettings.autoImport._ /** Custom commands for use by the Jenkins scripts. This keeps the surface area and call syntax small. */ @@ -40,7 +43,8 @@ object ScriptCommands { /** Set up the environment for building STARR in `validate/bootstrap`. The arguments are: * - Repository URL for publishing * - Version number to publish */ - def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(url, ver) => + def setupBootstrapStarr = setup("setupBootstrapStarr") { case Seq(fileOrUrl, ver) => + val url = fileToUrl(fileOrUrl) Seq( baseVersion in Global := ver, baseVersionSuffix in Global := "SPLIT" @@ -48,9 +52,10 @@ object ScriptCommands { } /** Set up the environment for building locker in `validate/bootstrap`. The arguments are: - * - Repository URL for publishing locker and resolving STARR + * - Repository file or URL for publishing locker and resolving STARR * - Version number to publish */ - def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(url, ver) => + def setupBootstrapLocker = setup("setupBootstrapLocker") { case Seq(fileOrUrl, ver) => + val url = fileToUrl(fileOrUrl) Seq( baseVersion in Global := ver, baseVersionSuffix in Global := "SPLIT", @@ -61,15 +66,24 @@ object ScriptCommands { /** Set up the environment for building quick in `validate/bootstrap`. The arguments are: * - Repository URL for publishing * - Version number to publish + * - Optional: Repository for resolving (same as repository for publishing if not specified) * Note that the artifacts produced here are consumed by scala-dist, so the docs have to be built. */ - def setupBootstrapQuick = setup("setupBootstrapQuick") { case Seq(url, ver) => - Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at url, - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) - ) ++ publishTarget(url) ++ enableOptimizer + def setupBootstrapQuick = { + def f(targetFileOrUrl: String, ver: String, resolverFileOrUrl: String): Seq[Setting[_]] = { + val targetUrl = fileToUrl(targetFileOrUrl) + val resolverUrl = fileToUrl(resolverFileOrUrl) + Seq( + baseVersion in Global := ver, + baseVersionSuffix in Global := "SPLIT", + resolvers in Global += "scala-pr" at resolverUrl, + testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + ) ++ publishTarget(targetUrl) ++ enableOptimizer + } + setup("setupBootstrapQuick") { + case Seq(targetFileOrUrl, ver, resolverFileOrUrl) => f(targetFileOrUrl, ver, resolverFileOrUrl) + case Seq(targetFileOrUrl, ver) => f(targetFileOrUrl, ver, targetFileOrUrl) + } } /** Set up the environment for publishing in `validate/bootstrap`. The arguments are: @@ -118,6 +132,11 @@ object ScriptCommands { ) } + // If fileOrUrl is already a file:, http: or https: URL, return it, otherwise treat it as a local file and return a URL for it + private[this] def fileToUrl(fileOrUrl: String): String = + if(fileOrUrl.startsWith("file:") || fileOrUrl.startsWith("http:") || fileOrUrl.startsWith("https:")) fileOrUrl + else Paths.get(fileOrUrl).toUri.toString + /** Like `Def.sequential` but accumulate all results */ def sequence[B](tasks: List[Def.Initialize[Task[B]]]): Def.Initialize[Task[List[B]]] = tasks match { case Nil => Def.task { Nil } diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index cfa72f465400..e6e12c33810d 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -142,9 +142,16 @@ buildScalaCheck(){ # we only need to build the modules necessary to build Scala itself # since the version of locker and quick are the same buildModules() { - echo "### Building modules using locker" + if [ "$1" = "bootstrap" ]; then + echo "### Building modules using locker" + addResolvers="$addBootstrapResolver" + publishTasks=("set every publishTo := Some(Resolver.file(\"file\", new File(\"$BOOTSTRAP_REPO_DIR\")))") + else + echo "### Building modules using quick" + addResolvers="$addIntegrationResolver" + publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") + fi - publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") buildTasks=($publishPrivateTask) buildXML # buildScalaCheck @@ -312,7 +319,7 @@ buildStarr() { cd $STARR_DIR git co $STARR_REF travis_fold_start starr "Building starr" - $SBT_CMD -no-colors $sbtArgs "setupBootstrapStarr $integrationRepoUrl $STARR_VER" $clean publish + $SBT_CMD -no-colors $sbtArgs "setupBootstrapStarr \"$BOOTSTRAP_REPO_DIR\" $STARR_VER" $clean publish travis_fold_end starr ) } @@ -329,7 +336,7 @@ buildLocker() { if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi travis_fold_start locker "Building locker" - $SBT_CMD -no-colors $sbtArgs $SET_STARR "setupBootstrapLocker $integrationRepoUrl $SCALA_VER" $clean publish + $SBT_CMD -no-colors $sbtArgs $SET_STARR "setupBootstrapLocker \"$BOOTSTRAP_REPO_DIR\" $SCALA_VER" $clean publish travis_fold_end locker } @@ -348,7 +355,7 @@ buildQuick() { $SBT_CMD $sbtArgs \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ - "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ + "setupBootstrapQuick $integrationRepoUrl $SCALA_VER \"$BOOTSTRAP_REPO_DIR\"" \ "$@" travis_fold_end quick } diff --git a/scripts/common b/scripts/common index b9e089b7a195..161147a870c8 100644 --- a/scripts/common +++ b/scripts/common @@ -22,7 +22,6 @@ integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifacto # only used on jenkins sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" -addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} @@ -31,6 +30,15 @@ TMP_ROOT_DIR=$(mktemp -d -t pr-scala.XXXX) TMP_DIR="${TMP_ROOT_DIR}/tmp" mkdir "${TMP_DIR}" +# Used for publishing starr and locker +BOOTSTRAP_REPO_DIR="${TMP_ROOT_DIR}/bootstrap-repo" +mkdir "${BOOTSTRAP_REPO_DIR}" + +addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" +addBootstrapResolver="set resolvers in Global += Resolver.file(\"scala-bootstrap\", file(\"$BOOTSTRAP_REPO_DIR\"))" +# Gets set to addIntegrationResolver or addBootstrapResolver for use in sbtBuild and sbtResolve: +addResolvers="" + # General debug logging # $* - message function debug () { @@ -155,7 +163,7 @@ st_stagingRepoClose() { sbtBuild() { travis_fold_start build "Building $(basename $PWD) with $@" - $SBT_CMD -no-colors $sbtArgs "$addIntegrationResolver" "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" + $SBT_CMD -no-colors $sbtArgs "$addResolvers" "${scalaVersionTasks[@]}" "${publishTasks[@]}" "$@" travis_fold_end build } @@ -167,7 +175,7 @@ sbtResolve() { # echo "### sbtResolve: $SBT_CMD -no-colors $sbtArgs " "${scalaVersionTasks[@]}" "\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" travis_fold_start resolve "Resolving \"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross" $SBT_CMD -no-colors $sbtArgs "${scalaVersionTasks[@]}" \ - "$addIntegrationResolver" \ + "$addResolvers" \ "set libraryDependencies := Seq(\"$1\" % \"$2\" % \"$3\" cross CrossVersion.$cross)" \ "show update" res=$? diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index ffd25721aca3..c7531ba8a515 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -23,15 +23,17 @@ fi buildLocker -# locker is now published in artifactory -- make sure we resolve from there +# locker is now published in BOOTSTRAP_REPO_DIR -- make sure we resolve from there rm -rf build/ -buildModules +buildModules bootstrap buildQuick clean testAll publish testStability +buildModules + if [ "$publishToSonatype" == "yes" ]; then publishSonatype fi From 41d81b1bf659f4b6c572746471ac4174a3f1e62d Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 27 Mar 2018 08:50:02 -0700 Subject: [PATCH 1316/2793] Apply no warn attachment to binds The warning in TypeDiagnostics checks for an attachment where a variable is introduced. --- src/reflect/scala/reflect/internal/TreeGen.scala | 10 +++++++++- test/files/pos/t10763.flags | 2 +- test/files/pos/t10763.scala | 1 + 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index e69829baea69..5c58e2eff517 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -813,11 +813,19 @@ abstract class TreeGen { else ValFrom(pat1, mkCheckIfRefutable(pat1, rhs)).setPos(pos) } + private def unwarnable(pat: Tree): Tree = { + pat foreach { + case b @ Bind(_, _) => b updateAttachment AtBoundIdentifierAttachment + case _ => + } + pat + } + def mkCheckIfRefutable(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator) = if (treeInfo.isVarPatternDeep(pat)) rhs else { val cases = List( - CaseDef(pat.duplicate updateAttachment AtBoundIdentifierAttachment, EmptyTree, Literal(Constant(true))), + CaseDef(unwarnable(pat.duplicate), EmptyTree, Literal(Constant(true))), CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) ) val visitor = mkVisitor(cases, checkExhaustive = false, nme.CHECK_IF_REFUTABLE_STRING) diff --git a/test/files/pos/t10763.flags b/test/files/pos/t10763.flags index ae548523beb5..23e3c2aaabce 100644 --- a/test/files/pos/t10763.flags +++ b/test/files/pos/t10763.flags @@ -1 +1 @@ --Xfatal-warnings -Xlint:unused +-Xfatal-warnings -Ywarn-unused diff --git a/test/files/pos/t10763.scala b/test/files/pos/t10763.scala index 42c45d2d3dd9..5900986d1d40 100644 --- a/test/files/pos/t10763.scala +++ b/test/files/pos/t10763.scala @@ -4,4 +4,5 @@ class Test { for (refute@1 <- xs) {} } + def f() = for (Some(i: Int) <- List(Option(42))) println(i) } From 0e8cf5286029144b6732e26b2930cd5c1de3b2fb Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 27 Mar 2018 09:05:19 -0700 Subject: [PATCH 1317/2793] Rename attachment to NoWarn Simple rename to `NoWarnAttachment`. It is still used selectively to turn off unused warning for pattern variables. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 2 +- .../tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- .../scala/reflect/internal/StdAttachments.scala | 12 ++++++------ src/reflect/scala/reflect/internal/TreeGen.scala | 12 ++++++------ .../scala/reflect/runtime/JavaUniverseForce.scala | 2 +- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 6df212c4503a..3d1eb3530b17 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1969,7 +1969,7 @@ self => atPos(p.pos.start, p.pos.start, body.pos.end) { val t = Bind(name, body) body match { - case Ident(nme.WILDCARD) if settings.warnUnusedPatVars => t updateAttachment AtBoundIdentifierAttachment + case Ident(nme.WILDCARD) if settings.warnUnusedPatVars => t updateAttachment NoWarnAttachment case _ => t } } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index da3883d10c66..c0b2413a753d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -80,7 +80,7 @@ trait TypeDiagnostics { } // Bind of pattern var was `x @ _` - private def atBounded(t: Tree) = t.hasAttachment[AtBoundIdentifierAttachment.type] + private def atBounded(t: Tree) = t.hasAttachment[NoWarnAttachment.type] // ValDef was a PatVarDef `val P(x) = ???` private def wasPatVarDef(t: Tree) = t.hasAttachment[PatVarDefAttachment.type] diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index 3c2126813abb..e704632b4991 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -64,12 +64,12 @@ trait StdAttachments { case object BackquotedIdentifierAttachment extends PlainAttachment /** A pattern binding exempt from unused warning. - * - * Its host `Ident` has been created from a pattern2 binding, `case x @ p`. - * In the absence of named parameters in patterns, allows nuanced warnings for unused variables. - * Hence, `case X(x = _) =>` would not warn; for now, `case X(x @ _) =>` is documentary if x is unused. - */ - case object AtBoundIdentifierAttachment extends PlainAttachment + * + * Its host `Ident` has been created from a pattern2 binding, `case x @ p`. + * In the absence of named parameters in patterns, allows nuanced warnings for unused variables. + * Hence, `case X(x = _) =>` would not warn; for now, `case X(x @ _) =>` is documentary if x is unused. + */ + case object NoWarnAttachment extends PlainAttachment /** Indicates that a `ValDef` was synthesized from a pattern definition, `val P(x)`. */ diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 5c58e2eff517..6a5d1ca4c4f5 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -732,19 +732,19 @@ abstract class TreeGen { def mkPatDef(pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = mkPatDef(Modifiers(0), pat, rhs) - private def propagateAtBoundAttachment(from: Tree, to: ValDef): to.type = - if (isPatVarWarnable && from.hasAttachment[AtBoundIdentifierAttachment.type]) to.updateAttachment(AtBoundIdentifierAttachment) + private def propagateNoWarnAttachment(from: Tree, to: ValDef): to.type = + if (isPatVarWarnable && from.hasAttachment[NoWarnAttachment.type]) to.updateAttachment(NoWarnAttachment) else to // Keep marker for `x@_`, add marker for `val C(x) = ???` to distinguish from ordinary `val x = ???`. private def propagatePatVarDefAttachments(from: Tree, to: ValDef): to.type = - propagateAtBoundAttachment(from, to).updateAttachment(PatVarDefAttachment) + propagateNoWarnAttachment(from, to).updateAttachment(PatVarDefAttachment) /** Create tree for pattern definition */ def mkPatDef(mods: Modifiers, pat: Tree, rhs: Tree)(implicit fresh: FreshNameCreator): List[ValDef] = matchVarPattern(pat) match { case Some((name, tpt)) => List(atPos(pat.pos union rhs.pos) { - propagateAtBoundAttachment(pat, ValDef(mods, name.toTermName, tpt, rhs)) + propagateNoWarnAttachment(pat, ValDef(mods, name.toTermName, tpt, rhs)) }) case None => @@ -815,7 +815,7 @@ abstract class TreeGen { private def unwarnable(pat: Tree): Tree = { pat foreach { - case b @ Bind(_, _) => b updateAttachment AtBoundIdentifierAttachment + case b @ Bind(_, _) => b updateAttachment NoWarnAttachment case _ => } pat @@ -917,7 +917,7 @@ abstract class TreeGen { case Ident(name) if treeInfo.isVarPattern(tree) && name != nme.WILDCARD => atPos(tree.pos) { val b = Bind(name, atPos(tree.pos.focus) (Ident(nme.WILDCARD))) - if (forFor && isPatVarWarnable) b updateAttachment AtBoundIdentifierAttachment + if (forFor && isPatVarWarnable) b updateAttachment NoWarnAttachment else b } case Typed(id @ Ident(name), tpt) if treeInfo.isVarPattern(id) && name != nme.WILDCARD => diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index b50eb9814c7d..2926bd4d6946 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -40,7 +40,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.SAMFunction this.DelambdafyTarget this.BackquotedIdentifierAttachment - this.AtBoundIdentifierAttachment + this.NoWarnAttachment this.PatVarDefAttachment this.ForAttachment this.SyntheticUnitAttachment From 1f7468f0f3de00c2e9bf170ea90da2a934c136b3 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 27 Mar 2018 12:41:22 -0400 Subject: [PATCH 1318/2793] Fix up embarrassing typo. I put this in in cdf74190c442ff60dc6b4ed7c7567fb58448a90e, right before the PR got merged, and moved the condition that used to be in the `else` block into a `devWarning`, as I was pretty sure we'd never get here at or later than erasure (otherwise, there's a chance we'd let a `Constant()` get to the backend. I wasn't willing to wager on it for an assertion, though. In retrospect it would have been better to poke around with `-Xdev` on first, so I would notice the reversed condition. (I build with the flag at work so someone sees the warnings, is how I noticed.) --- src/compiler/scala/reflect/reify/package.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index 591b76727165..8102bd7170c0 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -51,7 +51,7 @@ package object reify { import definitions._ import analyzer.enclosingMacroPosition - if (global.phase.id < global.currentRun.erasurePhase.id) + if (global.phase.id >= global.currentRun.erasurePhase.id) devWarning(enclosingMacroPosition, s"reify Class[$tpe0] during ${global.phase.name}") // scala/bug#7375 @@ -72,7 +72,7 @@ package object reify { } } - // Note: If current context is inside the constructor of an object or otherwise not inside + // Note: If current context is inside the constructor of an object or otherwise not inside // a class/object body, this will return an EmptyTree. def reifyEnclosingRuntimeClass(global: Global)(typer0: global.analyzer.Typer): global.Tree = { import global._ From d078c498ec96ebb8b35e96d4ae5e14b4f9d7df33 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 28 Mar 2018 14:35:26 +1000 Subject: [PATCH 1319/2793] Bump JarJar version to avoid invalid bytecode on Java9 As previously done in Play: https://github.com/playframework/play-ws/pull/174 --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 76fe81fe107d..351c52084c6a 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,7 +3,7 @@ scalacOptions ++= Seq("-unchecked", "-feature", /*"-deprecation",*/ libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" -libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.3" +libraryDependencies += "org.pantsbuild" % "jarjar" % "1.6.5" libraryDependencies += "biz.aQute.bnd" % "biz.aQute.bnd" % "2.4.1" From 532c56d44683062de92ae668b10fd36dba393bd6 Mon Sep 17 00:00:00 2001 From: Heikki Vesalainen Date: Mon, 5 Mar 2018 19:12:48 +0000 Subject: [PATCH 1320/2793] Add a `completions` command similar to the one found in sbt The command can be used by, for example, emacs to query completions. --- .../scala/tools/nsc/interpreter/ILoop.scala | 17 +++++++++ test/files/run/repl-completions.check | 35 +++++++++++++++++++ test/files/run/repl-completions.scala | 17 +++++++++ 3 files changed, 69 insertions(+) create mode 100644 test/files/run/repl-completions.check create mode 100644 test/files/run/repl-completions.scala diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 7883abdea8a4..5cd9bc34c403 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -177,6 +177,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend /** Standard commands **/ lazy val standardCommands = List( + cmd("completions", "", "output completions for the given string", completionsCommand), cmd("edit", "|", "edit history", editCommand), cmd("help", "[command]", "print this summary or command-specific help", helpCommand), historyCommand, @@ -539,6 +540,22 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend def lineCommand(what: String): Result = editCommand(what, None) + def completionsCommand(what: String): Result = { + val completions = new ReplCompletion(intp).complete(what, what.length) + val prefix = if (completions == NoCandidates) "" else what.substring(0, completions.cursor) + + val completionLines = + completions.candidates.map { c => + s"[completions] $prefix$c" + } + + if (completionLines.nonEmpty) { + echo(completionLines.mkString("\n")) + } + + Result.default // never record completions + } + // :edit id or :edit line def editCommand(what: String): Result = editCommand(what, Properties.envOrNone("EDITOR")) diff --git a/test/files/run/repl-completions.check b/test/files/run/repl-completions.check new file mode 100644 index 000000000000..b86ac0312418 --- /dev/null +++ b/test/files/run/repl-completions.check @@ -0,0 +1,35 @@ + +scala> // completions! + +scala> object O { def x_y_x = 1; def x_y_z = 2; def getFooBarZot = 3} +defined object O + +scala> :completions O.x +[completions] O.x_y_x +[completions] O.x_y_z + +scala> :completions O.x_y_x + +scala> :completions O.x_y_a + +scala> import O._ +import O._ + +scala> :completions x_y_ +[completions] x_y_x +[completions] x_y_z + +scala> :completions x_y_a + +scala> :completions fBZ +[completions] getFooBarZot + +scala> :completions object O2 { val x = O. +[completions] object O2 { val x = O.getFooBarZot +[completions] object O2 { val x = O.x_y_x +[completions] object O2 { val x = O.x_y_z + +scala> :completions :completion +[completions] :completions + +scala> :quit diff --git a/test/files/run/repl-completions.scala b/test/files/run/repl-completions.scala new file mode 100644 index 000000000000..6217efb8e4a3 --- /dev/null +++ b/test/files/run/repl-completions.scala @@ -0,0 +1,17 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = + """|// completions! + |object O { def x_y_x = 1; def x_y_z = 2; def getFooBarZot = 3} + |:completions O.x + |:completions O.x_y_x + |:completions O.x_y_a + |import O._ + |:completions x_y_ + |:completions x_y_a + |:completions fBZ + |:completions object O2 { val x = O. + |:completions :completion + |""".stripMargin +} From 46596b42a5b743b35c72eb4396029e3a36e8c0ad Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 26 Mar 2018 21:57:36 +0200 Subject: [PATCH 1321/2793] [nomerge] Streamline the bootstrap script Ensure that modules are built in each stage (if they need building), including even starr. This ensures that the locker scaladoc when building quick runs with a freshly built xml on the classpath, which can be necessary for binary compatibility. Run scaladoc only in the quick stage (not starr or locker), for scala and the modules. Don't use `clone --reference` to build starr, doesn't work with shallow clones (on travis). Set `STARR_VER` to the full `SCALA_VER` + a suffix, so that it gets the same treatment in sbt with respect to cross-versioning (binary vs full). The quick stage either publishes to scala-integration or sonatype (for releases). [nomerge] because this commit is in the 2.13.x-new-collections branch. --- .travis.yml | 29 +--- project/ScriptCommands.scala | 27 ++- scripts/bootstrap_fun | 282 +++++++++++++++++-------------- scripts/common | 15 +- scripts/jobs/integrate/bootstrap | 18 +- 5 files changed, 184 insertions(+), 187 deletions(-) diff --git a/.travis.yml b/.travis.yml index 0125bab34e95..4abdda13c070 100644 --- a/.travis.yml +++ b/.travis.yml @@ -42,14 +42,12 @@ jobs: - determineScalaVersion - deriveModuleVersions - removeExistingBuilds $integrationRepoUrl - - clearIvyCache - if [ ! -z "$STARR_REF" ]; then buildStarr; fi - buildLocker - - rm -rf build/ # ensure we resolve from artifactory - - buildModules - - buildQuick clean publish - - set | grep -E '^SCALA_VER=|^SCALA_BINARY_VER=|^scalaVersionTasks=|^XML_VER=|^PARTEST_VER=|^SCALACHECK_VER=|^XML_BUILT=|^PARTEST_BUILT=|^SCALACHECK_REF=|^updatedModuleVersions=|^publishToSonatype=' > build/env + - buildQuick + - set | grep -E '^SCALA_VER=|^SCALA_BINARY_VER=|^XML_VER=|^PARTEST_VER=|^SCALACHECK_VER=|^XML_BUILT=|^PARTEST_BUILT=|^SCALACHECK_BUILT=|^updatedModuleVersions=|^publishToSonatype=' > build/env - cat build/env + - triggerScalaDist # this builds the spec using jekyll # based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html @@ -60,7 +58,6 @@ jobs: # the key is restricted using forced commands so that it can only upload to the directory we need here after_success: ./scripts/travis-publish-spec.sh - # be careful to not set any env vars, as this will result in a cache miss - &test stage: test @@ -70,27 +67,13 @@ jobs: - source scripts/common - source scripts/bootstrap_fun # - find build -type f -exec touch {} + # "set antStyle := true" seems to cause really long compiles for the test suite?? - script: buildQuick testRest # shouldn't rebuild, since build/ is cached + script: invokeQuick testRest # shouldn't rebuild, since build/ is cached - <<: *test - script: buildQuick testPosPres + script: invokeQuick testPosPres - <<: *test - script: buildQuick testRun + script: invokeQuick testRun - script: testStability - - stage: publish # note that it's important that this runs on a fresh VM, so we're sure to resolve scala from artifactory, and not out of local cache (which was overwritten during bootstrap, somewhat unorthodoxly) - script: - - source build/env - - if [ -z "$SCALA_VER" ]; then echo "Environment not propagated. Caching issue?"; cat build/env ; exit 1; fi - - source scripts/common - - source scripts/bootstrap_fun - - if [ "$publishToSonatype" = "yes" ]; then publishSonatype; fi - - triggerScalaDist - # using bash conditional, because a travis condition on the stage won't work: - # the `env` function only picks stuff up from yaml, not variables set in bash, - # and we can't supply more env vars using a custom build from the web - # It would work using the API according to https://github.com/travis-ci/docs-travis-ci-com/issues/1485#issuecomment-351726416, - # but that's too much right now. - # cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret # openssl aes-256-cbc -pass "file:./secret" -in id_dsa_spec212_b4096 -out spec/id_dsa_travis.enc -a # travis encrypt "PRIV_KEY_SECRET=`cat ./secret`" diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index e92275bb69e2..a5564242ebf3 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -69,28 +69,23 @@ object ScriptCommands { * - Optional: Repository for resolving (same as repository for publishing if not specified) * Note that the artifacts produced here are consumed by scala-dist, so the docs have to be built. */ - def setupBootstrapQuick = { - def f(targetFileOrUrl: String, ver: String, resolverFileOrUrl: String): Seq[Setting[_]] = { - val targetUrl = fileToUrl(targetFileOrUrl) - val resolverUrl = fileToUrl(resolverFileOrUrl) - Seq( - baseVersion in Global := ver, - baseVersionSuffix in Global := "SPLIT", - resolvers in Global += "scala-pr" at resolverUrl, - testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) - ) ++ publishTarget(targetUrl) ++ enableOptimizer - } - setup("setupBootstrapQuick") { - case Seq(targetFileOrUrl, ver, resolverFileOrUrl) => f(targetFileOrUrl, ver, resolverFileOrUrl) - case Seq(targetFileOrUrl, ver) => f(targetFileOrUrl, ver, targetFileOrUrl) - } + def setupBootstrapQuick = setup("setupBootstrapQuick") { case Seq(targetFileOrUrl, ver, resolverFileOrUrl) => + val targetUrl = fileToUrl(targetFileOrUrl) + val resolverUrl = fileToUrl(resolverFileOrUrl) + Seq( + baseVersion in Global := ver, + baseVersionSuffix in Global := "SPLIT", + resolvers in Global += "scala-pr" at resolverUrl, + testOptions in IntegrationTest in LocalProject("test") ++= Seq(Tests.Argument("--show-log"), Tests.Argument("--show-diff")) + ) ++ publishTarget(targetUrl) ++ enableOptimizer } /** Set up the environment for publishing in `validate/bootstrap`. The arguments are: * - Temporary bootstrap repository URL for resolving modules * - Version number to publish * All artifacts are published to Sonatype. */ - def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(url, ver) => + def setupBootstrapPublish = setup("setupBootstrapPublish") { case Seq(fileOrUrl, ver) => + val url = fileToUrl(fileOrUrl) Seq( baseVersion in Global := ver, baseVersionSuffix in Global := "SPLIT", diff --git a/scripts/bootstrap_fun b/scripts/bootstrap_fun index e6e12c33810d..510f1fdbf536 100644 --- a/scripts/bootstrap_fun +++ b/scripts/bootstrap_fun @@ -1,14 +1,25 @@ # Bootstrap procedure # - determine scala version # - determine module versions -# - build minimal core (aka locker) of Scala, use the determined version number, publish to scala-integration -# - build those modules where a binary compatible version doesn't exist, publish to scala-integration -# - build Scala using the previously built core and modules, publish to scala-integration +# - optionally build a fresh "starr", publish to BOOTSTRAP_REPO_DIR +# - build minimal core (aka "locker") of Scala, publish to BOOTSTRAP_REPO_DIR +# - build Scala (aka "quick") using locker, publish to scala-integration (or sonatype for releases) # - run tests -# - for releases -# - stage Scala on sonatype -# - rebuild modules where no binary compatible version existed, stage them on sonatype -# - the Scala version is serialized to jenkins.properties, which is passed downstream to scala-release jobs + + +# Modules and stages +# - Each stage (starr, locker quick) builds the modules (if no binary compatible version exists) +# - The reason is: the compiler POM depends on the xml module of the previous stage, i.e., the +# locker compiler uses the starr modules. So the locker scaladoc (building the quick compiler) +# runs with a re-built xml, which may be necessary under binary incompatible changes. +# - In the starr / locker stages, the modules are built using the compiler just built at this stage. +# So the locker modules are built using locker, unlike the locker compiler, which is built by starr. +# - The quick (the actual release) compiler POM depends on the locker xml module. Therefore we need +# to use the same Scala version number in locker and quick, so that the modules built in the quick +# stage can be swapped in (quick compiler and modules are released to scala-integration / sonatype). +# - Final quirk: in the quick stage, the modules are built using the locker compiler. The reason: +# the quick compiler lives in scala-integration / sonatype, but there's no xml module there yet +# (we're just about to build it), which we need to run scaladoc. So we use the locker compiler. # Specifying the Scala version: @@ -73,41 +84,28 @@ publishSonatypeTaskModules=${publishSonatypeTaskModules-"publishSigned"} forceBuildModules=${forceBuildModules-no} clean="clean" # TESTING leave empty to speed up testing (on jenkins/locally; on travis it's a fresh machine every time) -stApi="https://oss.sonatype.org/service/local" +docTask() { + # Build the module docs only in the last (quick) stage. The locker scaladoc may be binary + # incompatible with the starr scala-xml (on which it depends, by the pom file) + if [ "$1" = "quick" ]; then + echo "doc" + else + echo "set publishArtifact in (Compile, packageDoc) in ThisBuild := false" + fi +} # Oh boy... can't use scaladoc to document scala-xml if scaladoc depends on the same version of scala-xml. # Even if that version is available through the project's resolvers, sbt won't look past this project. # SOOOOO, we set the version to a dummy (-DOC), generate documentation, # then set the version to the right one and publish (which won't re-gen the docs). # Also tried publish-local without docs using 'set publishArtifact in (Compile, packageDoc) := false' and republishing, no dice. - -# Each buildModule() function is invoked twice: first to build against locker and publish to artifactory, then -# to build against the release and publish to sonatype (or publish-local if publishToSonatype is not "yes"). -# In the second round, sbtResolve is always true: the module will be found in the artifactory! -# Therefore, if MODULE_BUILT is "yes" (in the second round), we know that we need to build (and publish) the -# module again. -# -# Note: we tried an alternative solution in which sbtResolve would not look at artifactory, but that fails. For example, -# scala-xml depends on scala-library, so sbt tries to find the scala-library of the version that we are currently building, -# which exists only in artifactory. - -docTask() { - if [[ "$STARR_REF" != "" && "$1" != "yes" ]]; then - # Don't build module docs on the first round of module builds when bootstrapping - # a binary incompatible compiler change to avoid linkage errors with using the old Scaladoc - echo set publishArtifact in packageDoc in Compile := false - else - echo doc - fi -} - buildXML() { - if [ "$XML_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) + if [ "$XML_BUILT" != "yes" ] && [ "$forceBuildModules" != "yes" ] && ( sbtResolve "org.scala-lang.modules" "scala-xml" $XML_VER ) then echo "Found scala-xml $XML_VER; not building." else update scala scala-xml "$XML_REF" && gfxd - doc="$(docTask $XML_BUILT)" + doc="$(docTask $1)" sbtBuild 'set version := "'$XML_VER'-DOC"' $clean "$doc" 'set version := "'$XML_VER'"' test "${buildTasks[@]}" XML_BUILT="yes" # ensure the module is built and published when buildXML is invoked for the second time, see comment above fi @@ -118,8 +116,8 @@ buildPartest() { then echo "Found scala-partest $PARTEST_VER; not building." else update scala scala-partest "$PARTEST_REF" && gfxd - doc="$(docTask $PARTEST_BUILT)" - # disable -Xfatal-warnings until https://github.com/scala/bug/issues/10763 is fixed + doc="$(docTask $1)" + # disable -Xfatal-warnings until https://github.com/scala/scala-partest/pull/101 is released sbtBuild 'set version :="'$PARTEST_VER'"' 'set VersionKeys.scalaXmlVersion := "'$XML_VER'"' $clean "$doc" 'set scalacOptions := scalacOptions.value.filterNot(_.contains("fatal-warn"))' test "${buildTasks[@]}" PARTEST_BUILT="yes" fi @@ -131,48 +129,42 @@ buildScalaCheck(){ then echo "Found scalacheck $SCALACHECK_VER; not building." else update rickynils scalacheck $SCALACHECK_REF && gfxd - doc="$(docTask $SCALACHECK_BUILT)" + doc="$(docTask $1)" sbtBuild 'set version := "'$SCALACHECK_VER'"' 'set VersionKeys.scalaParserCombinatorsVersion := "'$PARSERS_VER'"' $clean "$doc" publish # test times out NOTE: never published to sonatype SCALACHECK_BUILT="yes" fi } -# build modules, using ${buildTasks[@]} (except for ScalaCheck, which is hard-coded to publish to artifactory) -# publish to our internal repo (so we can resolve the modules in the scala build below) -# we only need to build the modules necessary to build Scala itself -# since the version of locker and quick are the same buildModules() { - if [ "$1" = "bootstrap" ]; then - echo "### Building modules using locker" - addResolvers="$addBootstrapResolver" - publishTasks=("set every publishTo := Some(Resolver.file(\"file\", new File(\"$BOOTSTRAP_REPO_DIR\")))") + clearIvyCache + + if [ "$1" = "starr" ]; then + scalaVersionTasks=('set every scalaVersion := "'$STARR_VER'"') else - echo "### Building modules using quick" - addResolvers="$addIntegrationResolver" - publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") + scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"') fi - buildTasks=($publishPrivateTask) - buildXML - # buildScalaCheck - buildPartest - - constructUpdatedModuleVersions - - cd $WORKSPACE -} + if [[ "$1" = "starr" || "$1" == "locker" ]]; then + addResolvers="$addBootstrapResolver" + publishTasks=("set every publishTo := Some(\"scala-bootstrap\" at \"file://$BOOTSTRAP_REPO_DIR\")") + buildTasks=($publishPrivateTask) + else + if [ "$publishToSonatype" == "yes" ]; then + addResolvers="$addBootstrapResolver" # locker compiler builds quick modules, see comment on top of this file + publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' 'set pgpSigningKey := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' "set pgpPassphrase := Some(Array.empty)") + buildTasks=($publishSonatypeTaskModules) + else + addResolvers="$addBootstrapResolver" # locker compiler builds quick modules, see comment on top of this file + publishTasks=('set credentials += Credentials("Artifactory Realm", "scala-ci.typesafe.com", "scala-ci", System.getenv("PRIVATE_REPO_PASS"))' "set every publishTo := Some(\"publish-repo\" at \"$integrationRepoUrl\")") + buildTasks=($publishPrivateTask) + fi + fi -# build/test/publish scala core modules to sonatype (this will start a new staging repo) -# (was hoping we could make everything go to the same staging repo, but it's not timing that causes two staging repos to be opened) -# NOTE: only publish those for which versions are set -# test and publish to sonatype, assuming you have ~/.sbt/0.13/sonatype.sbt and ~/.sbt/0.13/plugin/gpg.sbt -buildPublishedModules() { - echo "### Publishing modules to sonatype" + buildXML $1 + # buildScalaCheck $1 + buildPartest $1 - publishTasks=('set credentials += Credentials("Sonatype Nexus Repository Manager", "oss.sonatype.org", System.getenv("SONA_USER"), System.getenv("SONA_PASS"))' 'set pgpSigningKey := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' "set pgpPassphrase := Some(Array.empty)") - buildTasks=($publishSonatypeTaskModules) - buildXML - buildPartest + constructUpdatedModuleVersions $1 cd $WORKSPACE } @@ -234,8 +226,6 @@ determineScalaVersion() { echo "version=$SCALA_VER" >> $WORKSPACE/jenkins.properties echo "sbtDistVersionOverride=-Dproject.version=$SCALA_VER" >> $WORKSPACE/jenkins.properties - scalaVersionTasks=('set every scalaVersion := "'$SCALA_VER'"') - echo "Building Scala $SCALA_VER." } @@ -301,118 +291,150 @@ constructUpdatedModuleVersions() { # allow overriding the jline version using a jenkins build parameter if [ ! -z "$JLINE_VER" ] ; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Djline.version=$JLINE_VER"); fi - if [ ! -z "$SCALA_BINARY_VER" ]; then updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$SCALA_BINARY_VER"); fi + if [ "$SCALA_BINARY_VER" = "$SCALA_VER" ]; then + if [ "$1" = "starr" ]; then + binaryVer=$STARR_VER + else + binaryVer=$SCALA_BINARY_VER + fi + updatedModuleVersions=("${updatedModuleVersions[@]}" "-Dscala.binary.version=$binaryVer") + fi } -# build locker (scala + modules) and quick, publishing everything to artifactory +pollForStagingReposClosed() { + OK=false + + for i in $(seq 1 10); do + OK=true + for repo in $1; do + if [[ "$(st_stagingRepoStatus $repo)" != "closed" ]]; then + echo "Staging repo $repo not yet closed, waiting 30 seconds ($i / 10)" + OK=false + break + fi + done + if [ "$OK" = "true" ]; then break; fi + sleep 30s + done + + if [ "$OK" = "false" ]; then + echo "Failed to close staging repos in 5 minutes: $1" + exit 1 + fi +} + +closeStagingRepos() { + if [ "$publishToSonatype" = "yes" ]; then + open=$(st_stagingReposOpen) + allOpenUrls=$(echo $open | jq '.repositoryURI' | tr -d \") + allOpen=$(echo $open | jq '.repositoryId' | tr -d \") + + echo "Closing open repos: $allOpen" + for repo in $allOpen; do st_stagingRepoClose $repo; done + + # ensure the release is available on sonatype staging before triggering scala-dist + pollForStagingReposClosed "$allOpen" + + echo "Closed sonatype staging repos: $allOpenUrls." + fi +} + +#### STARR (optional) -#### (Optional) STARR. buildStarr() { + clearIvyCache cd $WORKSPACE STARR_DIR=./scala-starr - STARR_VER_SUFFIX="-$(git rev-parse --short $STARR_REF)-starr" - STARR_VER=$SCALA_VER_BASE$STARR_VER_SUFFIX + STARR_VER_SUFFIX="-starr-$(git rev-parse --short $STARR_REF)" + STARR_VER=$SCALA_VER$STARR_VER_SUFFIX rm -rf "$STARR_DIR" ( - git clone --reference $WORKSPACE/.git $WORKSPACE/.git $STARR_DIR + git clone "file://$(pwd)" $STARR_DIR cd $STARR_DIR - git co $STARR_REF + git checkout $STARR_REF travis_fold_start starr "Building starr" $SBT_CMD -no-colors $sbtArgs "setupBootstrapStarr \"$BOOTSTRAP_REPO_DIR\" $STARR_VER" $clean publish travis_fold_end starr ) + SET_STARR=-Dstarr.version=$STARR_VER + + buildModules starr # the locker compiler uses these modules to run scaladoc, see comment on top of this file } #### LOCKER + # for bootstrapping, publish core (or at least smallest subset we can get away with) # so that we can build modules with this version of Scala and publish them locally # must publish under $SCALA_VER so that the modules will depend on this (binary) version of Scala # publish more than just core: partest needs scalap # in sabbus lingo, the resulting Scala build will be used as starr to build the released Scala compiler buildLocker() { + clearIvyCache cd $WORKSPACE - if [ ! -z "$STARR_VER" ]; then SET_STARR=-Dstarr.version=$STARR_VER; fi - travis_fold_start locker "Building locker" - $SBT_CMD -no-colors $sbtArgs $SET_STARR "setupBootstrapLocker \"$BOOTSTRAP_REPO_DIR\" $SCALA_VER" $clean publish + $SBT_CMD -no-colors $sbtArgs \ + $SET_STARR \ + ${updatedModuleVersions[@]} \ + "setupBootstrapLocker \"$BOOTSTRAP_REPO_DIR\" $SCALA_VER" \ + $clean publish travis_fold_end locker + + buildModules locker } #### QUICK -buildQuick() { - cd $WORKSPACE - # # TODO: close all open staging repos so that we can be reasonably sure the only open one we see after publishing below is ours - # # the sbt call will create a new one - # - # Rebuild Scala with these modules so that all binary versions are consistent. - # Update versions.properties to new modules. - # Sanity check: make sure the Scala test suite passes / docs can be generated with these modules. +invokeQuickInternal() { + cd $WORKSPACE + setupCmd="$1" + shift travis_fold_start quick "Building bootstrapped" $SBT_CMD $sbtArgs \ -Dstarr.version=$SCALA_VER \ ${updatedModuleVersions[@]} \ - "setupBootstrapQuick $integrationRepoUrl $SCALA_VER \"$BOOTSTRAP_REPO_DIR\"" \ + "$setupCmd" \ "$@" travis_fold_end quick } -testStability() { - travis_fold_start stab "Testing stability" - cd $WORKSPACE - - # Run stability tests using the just built version as "quick" and a new version as "strap" - mv build/quick quick1 - rm -rf build/ - $SBT_CMD $sbtArgs \ - -Dstarr.version=$SCALA_VER \ - ${updatedModuleVersions[@]} \ - "setupBootstrapQuick $integrationRepoUrl $SCALA_VER" \ - $clean \ - library/compile reflect/compile compiler/compile - mv build/quick build/strap - mv quick1 build/quick - scripts/stability-test.sh - - travis_fold_end stab +invokeQuick() { + invokeQuickInternal \ + "setupBootstrapQuick $integrationRepoUrl $SCALA_VER \"$BOOTSTRAP_REPO_DIR\"" \ + "$@" } -# publishes quick to sonatype, then builds modules again (those for which version numbers were provided), -# and publishes those to sonatype as well. finally, the staging repos are closed. -publishSonatype() { - cd $WORKSPACE - - # Make sure that "quick" is downloaded when building the modules +buildQuick() { clearIvyCache - - travis_fold_start sona "Publishing core to sonatype" - # Stage to sonatype. No `clean`, just package and publish the `quick` build. - # TODO: currently we `clean` because everything is re-compiled anyway on travis. Cleaning ensures - # that we compile from a clean state and get identical classfiles (scala-dev#428). Once we figure - # out how to prevent sbt from re-compiling (also needed for test stages), we can remove the `clean`. - $SBT_CMD $sbtArgs \ - -Dstarr.version=$SCALA_VER \ - ${updatedModuleVersions[@]} \ + if [ "$publishToSonatype" = "yes" ]; then + invokeQuickInternal \ 'set pgpSigningKey in Global := Some(new java.math.BigInteger("C03EF1D7D692BCFF", 16).longValue)' \ 'set pgpPassphrase in Global := Some(Array.empty)' \ - "setupBootstrapPublish $integrationRepoUrl $SCALA_VER" \ - clean \ - $publishSonatypeTaskCore - travis_fold_end sona + "setupBootstrapPublish \"$BOOTSTRAP_REPO_DIR\" $SCALA_VER" \ + $clean $publishSonatypeTaskCore + else + invokeQuick $clean publish + fi - # Modules are re-built using quick (the first iteration was built with locker) - buildPublishedModules + buildModules quick + + closeStagingRepos +} - open=$(st_stagingReposOpen) - allOpenUrls=$(echo $open | jq '.repositoryURI' | tr -d \") - allOpen=$(echo $open | jq '.repositoryId' | tr -d \") +testStability() { + # Run stability tests using the just built version as "quick" and a new version as "strap" + travis_fold_start stab "Testing stability" + cd $WORKSPACE - echo "Closing open repos: $allOpen" + mv build/quick quick1 + rm -rf build/ - for repo in $allOpen; do st_stagingRepoClose $repo; done + invokeQuick $clean library/compile reflect/compile compiler/compile - echo "Closed sonatype staging repos: $allOpenUrls." + mv build/quick build/strap + mv quick1 build/quick + scripts/stability-test.sh + travis_fold_end stab } diff --git a/scripts/common b/scripts/common index 161147a870c8..c05ddef34144 100644 --- a/scripts/common +++ b/scripts/common @@ -22,7 +22,6 @@ integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifacto # only used on jenkins sbtRepositoryConfig="$WORKSPACE/scripts/sbt-repositories-config" - jcenterCacheUrl=${jcenterCacheUrl-"https://scala-ci.typesafe.com/artifactory/jcenter/"} # used by `checkAvailability` @@ -35,10 +34,12 @@ BOOTSTRAP_REPO_DIR="${TMP_ROOT_DIR}/bootstrap-repo" mkdir "${BOOTSTRAP_REPO_DIR}" addIntegrationResolver="set resolvers in Global += \"scala-pr\" at \"$integrationRepoUrl\"" -addBootstrapResolver="set resolvers in Global += Resolver.file(\"scala-bootstrap\", file(\"$BOOTSTRAP_REPO_DIR\"))" +addBootstrapResolver="set resolvers in Global += \"scala-bootstrap\" at \"file://$BOOTSTRAP_REPO_DIR\"" # Gets set to addIntegrationResolver or addBootstrapResolver for use in sbtBuild and sbtResolve: addResolvers="" +stApi="https://oss.sonatype.org/service/local" + # General debug logging # $* - message function debug () { @@ -143,8 +144,16 @@ st_curl(){ curl -H "Content-Type: application/json" -H "accept: application/json,application/vnd.siesta-error-v1+json,application/vnd.siesta-validation-errors-v1+json" -K ~/.sonatype-curl -s -o - $@ } +st_stagingRepos() { + st_curl "$stApi/staging/profile_repositories" | jq '.data[] | select(.profileName == "org.scala-lang")' +} + st_stagingReposOpen() { - st_curl "$stApi/staging/profile_repositories" | jq '.data[] | select(.profileName == "org.scala-lang") | select(.type == "open")' + st_stagingRepos | jq 'select(.type == "open")' +} + +st_stagingRepoStatus() { + st_stagingRepos | jq -r "select(.repositoryId == \"$1\") | .type" } st_stagingRepoDrop() { diff --git a/scripts/jobs/integrate/bootstrap b/scripts/jobs/integrate/bootstrap index c7531ba8a515..e936f4106f7d 100755 --- a/scripts/jobs/integrate/bootstrap +++ b/scripts/jobs/integrate/bootstrap @@ -3,19 +3,17 @@ # See comment in bootstrap_fun source scripts/common +source scripts/bootstrap_fun # scripts/common provides sbtRepositoryConfig sbtArgs="-ivy $IVY2_DIR -Dsbt.override.build.repos=true -Dsbt.repository.config=$sbtRepositoryConfig" -source scripts/bootstrap_fun - generateRepositoriesConfig $integrationRepoUrl determineScalaVersion deriveModuleVersions removeExistingBuilds $integrationRepoUrl -clearIvyCache if [ ! -z "$STARR_REF" ]; then buildStarr @@ -23,17 +21,7 @@ fi buildLocker -# locker is now published in BOOTSTRAP_REPO_DIR -- make sure we resolve from there -rm -rf build/ - -buildModules bootstrap - -buildQuick clean testAll publish +buildQuick +invokeQuick testAll testStability - -buildModules - -if [ "$publishToSonatype" == "yes" ]; then - publishSonatype -fi From 84112e8029d088674b957a23672510446c252969 Mon Sep 17 00:00:00 2001 From: Heikki Vesalainen Date: Tue, 3 Apr 2018 16:19:04 +0300 Subject: [PATCH 1322/2793] Update to Jline 2.14.6 (#6478) This version of Jline fixes three things for Emacs, which means all the special handling of emacs can be removed from scala-code. The things fixed in Jline 2.14.6 are: - ANSI colors are now enabled for Emacs. - Terminal echo is now disabled for Emacs. - History is enabled for all dump terminals. --- src/compiler/scala/tools/ant/templates/tool-unix.tmpl | 2 -- src/compiler/scala/tools/ant/templates/tool-windows.tmpl | 2 +- src/compiler/scala/tools/nsc/Properties.scala | 1 + src/repl/scala/tools/nsc/interpreter/ILoop.scala | 2 +- versions.properties | 2 +- 5 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl index 634190a31b4c..9045e0547e0b 100755 --- a/src/compiler/scala/tools/ant/templates/tool-unix.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-unix.tmpl @@ -78,7 +78,6 @@ SEP=":" # Possible additional command line options WINDOWS_OPT="" -EMACS_OPT="-Denv.emacs=$EMACS" # Remove spaces from SCALA_HOME on windows if [[ -n "$cygwin" ]]; then @@ -216,7 +215,6 @@ execCommand \ "${classpath_args[@@]}" \ -Dscala.home="$SCALA_HOME" \ $OVERRIDE_USEJAVACP \ - "$EMACS_OPT" \ $WINDOWS_OPT \ @properties@ @class@ @toolflags@ "$@@" diff --git a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl index 6c6dbbb01cf0..48e1c322237f 100644 --- a/src/compiler/scala/tools/ant/templates/tool-windows.tmpl +++ b/src/compiler/scala/tools/ant/templates/tool-windows.tmpl @@ -134,7 +134,7 @@ if "%_TOOL_CLASSPATH%"=="" ( if not "%_LINE_TOOLCP%"=="" call :add_cpath "%_LINE_TOOLCP%" -set _PROPS=-Dscala.home="!_SCALA_HOME!" -Denv.emacs="%EMACS%" %_OVERRIDE_USEJAVACP% @properties@ +set _PROPS=-Dscala.home="!_SCALA_HOME!" %_OVERRIDE_USEJAVACP% @properties@ rem echo "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %* "%_JAVACMD%" %_JAVA_OPTS% %_PROPS% -cp "%_TOOL_CLASSPATH%" @class@ @toolflags@ %* diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 334158982bbb..873f26f51016 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -29,6 +29,7 @@ object Properties extends scala.util.PropertiesTrait { def shellInterruptedString = scalaPropOrElse("shell.interrupted", f":quit$lineSeparator") // derived values + @deprecated("Emacs support is fully handled by JLine, this will be removed in next release", "2.12.6") def isEmacsShell = propOrEmpty("env.emacs") != "" // Where we keep fsc's state (ports/redirection) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 5cd9bc34c403..5ea22049c530 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -926,7 +926,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend * supplied as a `() => Completion`; the Completion object provides a concrete Completer. */ def chooseReader(settings: Settings): InteractiveReader = { - if (settings.Xnojline || Properties.isEmacsShell) SimpleReader() + if (settings.Xnojline) SimpleReader() else { type Completer = () => Completion type ReaderMaker = Completer => InteractiveReader diff --git a/versions.properties b/versions.properties index ff096b3da227..ba8a94d56261 100644 --- a/versions.properties +++ b/versions.properties @@ -24,4 +24,4 @@ scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.0 partest.version.number=1.1.7 scala-asm.version=6.0.0-scala-1 -jline.version=2.14.5 +jline.version=2.14.6 From b83a6479c24e7852b056dd7493b7c99a88c45770 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 11 Apr 2018 11:15:58 -0700 Subject: [PATCH 1323/2793] Check unused function params Both for explicit anon funs and those introduced by for expr desugarings. --- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 7 ++++--- test/files/neg/warn-unused-params.check | 8 +++++++- test/files/neg/warn-unused-params.scala | 6 ++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index fc1cf9acc471..99869d9b3e89 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -524,9 +524,10 @@ trait TypeDiagnostics { case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol case _ => } - case _: RefTree if sym ne null => targets += sym - case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol - case _ => + case _: RefTree if sym ne null => targets += sym + case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol + case Function(ps, _) if settings.warnUnusedParams => params ++= ps.map(_.symbol) + case _ => } if (t.tpe ne null) { diff --git a/test/files/neg/warn-unused-params.check b/test/files/neg/warn-unused-params.check index 373417ce0809..0c63b23b8184 100644 --- a/test/files/neg/warn-unused-params.check +++ b/test/files/neg/warn-unused-params.check @@ -13,6 +13,12 @@ case class CaseyAtTheBat(k: Int)(s: String) // warn warn-unused-params.scala:62: warning: parameter value readResolve in method f is never used def f(readResolve: Int) = 42 // warn ^ +warn-unused-params.scala:76: warning: parameter value i in value $anonfun is never used + val f = (i: Int) => 42 + ^ +warn-unused-params.scala:78: warning: parameter value i in value $anonfun is never used + val g = for (i <- List(1)) yield 42 + ^ error: No warnings can be incurred under -Xfatal-warnings. -5 warnings found +7 warnings found one error found diff --git a/test/files/neg/warn-unused-params.scala b/test/files/neg/warn-unused-params.scala index 559e6352434d..486a18d85634 100644 --- a/test/files/neg/warn-unused-params.scala +++ b/test/files/neg/warn-unused-params.scala @@ -71,3 +71,9 @@ class Main { trait Unimplementation { def f(u: Int): Int = ??? // no warn for param in unimplementation } + +trait Anonymous { + val f = (i: Int) => 42 + + val g = for (i <- List(1)) yield 42 +} From c1d7de48d983409ff1d100c175721ee41f6487c4 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 12 Apr 2018 02:46:10 -0700 Subject: [PATCH 1324/2793] No warn underscore in anon funs Placeholder syntax results in synthetic params which are excluded from unused warnings. Underscore function params also get a fresh name, but since they are not synthetic, they receive the `NoWarnAttachment` which is awkwardly propagated. --- .../scala/tools/nsc/ast/parser/Parsers.scala | 13 ++++++++----- .../tools/nsc/typechecker/TypeDiagnostics.scala | 7 ++++--- test/files/neg/warn-unused-params.check | 6 +++--- test/files/neg/warn-unused-params.scala | 8 ++++++-- 4 files changed, 21 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 3d1eb3530b17..1a76c229cbdd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -751,13 +751,16 @@ self => placeholderParams = placeholderParams filter (_.name != name) } def errorParam = makeParam(nme.ERROR, errorTypeTree setPos o2p(tree.pos.end)) + def propagateNoWarnAttachment(from: Tree, to: ValDef): to.type = + if (from.hasAttachment[NoWarnAttachment.type]) to.updateAttachment(NoWarnAttachment) + else to tree match { - case Ident(name) => + case id @ Ident(name) => removeAsPlaceholder(name) - makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.end)) - case Typed(Ident(name), tpe) if tpe.isType => // get the ident! + propagateNoWarnAttachment(id, makeParam(name.toTermName, TypeTree() setPos o2p(tree.pos.end))) + case Typed(id @ Ident(name), tpe) if tpe.isType => // get the ident! removeAsPlaceholder(name) - makeParam(name.toTermName, tpe) + propagateNoWarnAttachment(id, makeParam(name.toTermName, tpe)) case build.SyntacticTuple(as) => val arity = as.length val example = analyzer.exampleTuplePattern(as map { case Ident(name) => name; case _ => nme.EMPTY }) @@ -1310,7 +1313,7 @@ self => val id = atPos(start)(Ident(pname)) val param = atPos(id.pos.focus)(gen.mkSyntheticParam(pname.toTermName)) placeholderParams = param :: placeholderParams - id + id.updateAttachment(NoWarnAttachment) } private def interpolatedString(inPattern: Boolean): Tree = { diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 99869d9b3e89..35a0e581ac6d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -505,7 +505,7 @@ trait TypeDiagnostics { override def traverse(t: Tree): Unit = if (!t.isErrorTyped) { val sym = t.symbol t match { - case m: MemberDef if qualifies(sym) => + case m: MemberDef if qualifies(sym) => t match { case ValDef(mods@_, name@_, tpt@_, rhs@_) if wasPatVarDef(t) => if (settings.warnUnusedPatVars && !atBounded(t)) patvars += sym @@ -519,14 +519,15 @@ trait TypeDiagnostics { case _ => defnTrees += m } - case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars => + case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars => pat.foreach { case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol case _ => } case _: RefTree if sym ne null => targets += sym case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol - case Function(ps, _) if settings.warnUnusedParams => params ++= ps.map(_.symbol) + case Function(ps, _) if settings.warnUnusedParams => + params ++= ps.filterNot(p => atBounded(p) || p.symbol.isSynthetic).map(_.symbol) case _ => } diff --git a/test/files/neg/warn-unused-params.check b/test/files/neg/warn-unused-params.check index 0c63b23b8184..7a14cb3dc83d 100644 --- a/test/files/neg/warn-unused-params.check +++ b/test/files/neg/warn-unused-params.check @@ -14,10 +14,10 @@ warn-unused-params.scala:62: warning: parameter value readResolve in method f is def f(readResolve: Int) = 42 // warn ^ warn-unused-params.scala:76: warning: parameter value i in value $anonfun is never used - val f = (i: Int) => 42 + def f = (i: Int) => 42 // warn ^ -warn-unused-params.scala:78: warning: parameter value i in value $anonfun is never used - val g = for (i <- List(1)) yield 42 +warn-unused-params.scala:82: warning: parameter value i in value $anonfun is never used + def g = for (i <- List(1)) yield 42 // warn map.(i => 42) ^ error: No warnings can be incurred under -Xfatal-warnings. 7 warnings found diff --git a/test/files/neg/warn-unused-params.scala b/test/files/neg/warn-unused-params.scala index 486a18d85634..246098176950 100644 --- a/test/files/neg/warn-unused-params.scala +++ b/test/files/neg/warn-unused-params.scala @@ -73,7 +73,11 @@ trait Unimplementation { } trait Anonymous { - val f = (i: Int) => 42 + def f = (i: Int) => 42 // warn - val g = for (i <- List(1)) yield 42 + def f1 = (_: Int) => 42 // no warn underscore parameter (a fresh name) + + def f2: Int => Int = _ + 1 // no warn placeholder syntax (a fresh name and synthethic parameter) + + def g = for (i <- List(1)) yield 42 // warn map.(i => 42) } From 43653fe789bdf60ba4503b93a99793910e3b2a39 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Wed, 11 Apr 2018 12:51:29 +0200 Subject: [PATCH 1325/2793] [backport] Remove mentions of named functions in Return Expressions AFAIU Scala doesn't have "named functions". Also made the language around NonLocalReturnException a bit more lenient, so that a conforming implementation may optimize in some cases by not throwing. --- spec/06-expressions.md | 40 ++++++++++++++++++++-------------------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 9e49dfa1991f..71bf73840eb2 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1057,35 +1057,35 @@ Expr1 ::= ‘return’ [Expr] ``` A _return expression_ `return $e$` must occur inside the body of some -enclosing named method or function. The innermost enclosing named -method or function in a source program, $f$, must have an explicitly declared result type, -and the type of $e$ must conform to it. -The return expression -evaluates the expression $e$ and returns its value as the result of -$f$. The evaluation of any statements or +enclosing method. The innermost enclosing method in a source program, +$f$, must have an explicitly declared result type, and the type of +$e$ must conform to it. + +The return expression evaluates the expression $e$ and returns its +value as the result of $f$. The evaluation of any statements or expressions following the return expression is omitted. The type of a return expression is `scala.Nothing`. -The expression $e$ may be omitted. The return expression -`return` is type-checked and evaluated as if it was `return ()`. +The expression $e$ may be omitted. The return expression +`return` is type-checked and evaluated as if it were `return ()`. -An `apply` method which is generated by the compiler as an -expansion of an anonymous function does not count as a named function -in the source program, and therefore is never the target of a return -expression. +An `apply` method which is generated by the compiler as an expansion +of an anonymous function does not count as a method in the source +program, and therefore is never the target of a return expression. -Returning from a nested anonymous function is implemented by throwing -and catching a `scala.runtime.NonLocalReturnException`. Any -exception catches between the point of return and the enclosing -methods might see the exception. A key comparison makes sure that -these exceptions are only caught by the method instance which is -terminated by the return. +Returning from the method from withing a nested function may be +implemented by throwing and catching a +`scala.runtime.NonLocalReturnException`. Any exception catches +between the point of return and the enclosing methods might see +and catch that exception. A key comparison makes sure that this +exception is only caught by the method instance which is terminated +by the return. If the return expression is itself part of an anonymous function, it is possible that the enclosing instance of $f$ has already returned before the return expression is executed. In that case, the thrown -`scala.runtime.NonLocalReturnException` will not be caught, -and will propagate up the call stack. +`scala.runtime.NonLocalReturnException` will not be caught, and will +propagate up the call stack. ## Throw Expressions From 9046d698a3db9ff123006e57c7fb06996d96478d Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Thu, 12 Apr 2018 12:04:54 +0200 Subject: [PATCH 1326/2793] [backport] restrict applicability to user defined methods --- spec/06-expressions.md | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 71bf73840eb2..ed779f0f32dc 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1057,22 +1057,18 @@ Expr1 ::= ‘return’ [Expr] ``` A _return expression_ `return $e$` must occur inside the body of some -enclosing method. The innermost enclosing method in a source program, -$f$, must have an explicitly declared result type, and the type of -$e$ must conform to it. +enclosing user defined method. The innermost enclosing method in a +source program, $m$, must have an explicitly declared result type, and +the type of $e$ must conform to it. The return expression evaluates the expression $e$ and returns its -value as the result of $f$. The evaluation of any statements or +value as the result of $m$. The evaluation of any statements or expressions following the return expression is omitted. The type of a return expression is `scala.Nothing`. The expression $e$ may be omitted. The return expression `return` is type-checked and evaluated as if it were `return ()`. -An `apply` method which is generated by the compiler as an expansion -of an anonymous function does not count as a method in the source -program, and therefore is never the target of a return expression. - Returning from the method from withing a nested function may be implemented by throwing and catching a `scala.runtime.NonLocalReturnException`. Any exception catches @@ -1082,7 +1078,7 @@ exception is only caught by the method instance which is terminated by the return. If the return expression is itself part of an anonymous function, it -is possible that the enclosing instance of $f$ has already returned +is possible that the enclosing method $m$ has already returned before the return expression is executed. In that case, the thrown `scala.runtime.NonLocalReturnException` will not be caught, and will propagate up the call stack. From 0a8daf257c1bf565b07b80bb4005efadafbf4412 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 12 Apr 2018 11:09:07 +0200 Subject: [PATCH 1327/2793] Spec: say "method" instead of "function" The latter is reserved for function literals, function types, or when we have an application `f(a)`, and we don't know the type of `f` yet. If the type is a method type, it makes more sense to call `f` a method. --- spec/06-expressions.md | 44 +++++++++++++++++++++--------------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index ed779f0f32dc..174438670ac3 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -232,7 +232,7 @@ Then we have: (new D).superD == "B" ``` -Note that the `superB` function returns different results +Note that the `superB` method returns different results depending on whether `B` is mixed in with class `Root` or `A`. ## Function Applications @@ -247,9 +247,9 @@ Exprs ::= Expr {‘,’ Expr} An application `$f(e_1 , \ldots , e_m)$` applies the function `$f$` to the argument expressions `$e_1, \ldots , e_m$`. For this expression to be well-typed, the function must be *applicable* to its arguments, which is defined next by case analysis on $f$'s type. -If $f$ has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, each argument expression $e_i$ is typed with the corresponding parameter type $T_i$ as expected type. Let $S_i$ be the type of argument $e_i$ $(i = 1 , \ldots , m)$. The function $f$ must be _applicable_ to its arguments $e_1, \ldots , e_n$ of types $S_1 , \ldots , S_n$. We say that an argument expression $e_i$ is a _named_ argument if it has the form `$x_i=e'_i$` and `$x_i$` is one of the parameter names `$p_1, \ldots, p_n$`. +If $f$ has a method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$`, each argument expression $e_i$ is typed with the corresponding parameter type $T_i$ as expected type. Let $S_i$ be the type of argument $e_i$ $(i = 1 , \ldots , m)$. The method $f$ must be _applicable_ to its arguments $e_1, \ldots , e_n$ of types $S_1 , \ldots , S_n$. We say that an argument expression $e_i$ is a _named_ argument if it has the form `$x_i=e'_i$` and `$x_i$` is one of the parameter names `$p_1, \ldots, p_n$`. -Once the types $S_i$ have been determined, the function $f$ of the above method type is said to be applicable if all of the following conditions hold: +Once the types $S_i$ have been determined, the method $f$ of the above method type is said to be applicable if all of the following conditions hold: - for every named argument $p_j=e_i'$ the type $S_i$ is [compatible](03-types.html#compatibility) with the parameter type $T_j$; - for every positional argument $e_i$ the type $S_i$ is [compatible](03-types.html#compatibility) with $T_i$; - if the expected type is defined, the result type $U$ is [compatible](03-types.html#compatibility) to it. @@ -296,12 +296,12 @@ sequence $e$ with its elements. When the application uses named arguments, the vararg parameter has to be specified exactly once. A function application usually allocates a new frame on the program's -run-time stack. However, if a local function or a final method calls +run-time stack. However, if a local method or a final method calls itself as its last action, the call is executed using the stack-frame of the caller. ###### Example -Assume the following function which computes the sum of a +Assume the following method which computes the sum of a variable number of arguments: ```scala @@ -330,7 +330,7 @@ arguments, the following conditions must hold. - For every named argument $p_i = e_i$ which appears left of a positional argument in the argument list $e_1 \ldots e_m$, the argument position $i$ coincides with - the position of parameter $p_i$ in the parameter list of the applied function. + the position of parameter $p_i$ in the parameter list of the applied method. - The names $x_i$ of all named arguments are pairwise distinct and no named argument defines a parameter which is already specified by a positional argument. @@ -341,7 +341,7 @@ If the application uses named or default arguments the following transformation is applied to convert it into an application without named or default arguments. -If the function $f$ +If the method $f$ has the form `$p.m$[$\mathit{targs}$]` it is transformed into the block @@ -351,7 +351,7 @@ block } ``` -If the function $f$ is itself an application expression the transformation +If the method $f$ is itself an application expression the transformation is applied recursively on $f$. The result of transforming $f$ is a block of the form @@ -398,7 +398,7 @@ The final result of the transformation is a block of the form ### Signature Polymorphic Methods For invocations of signature polymorphic methods of the target platform `$f$($e_1 , \ldots , e_m$)`, -the invoked function has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call +the invoked method has a different method type `($p_1$:$T_1 , \ldots , p_n$:$T_n$)$U$` at each call site. The parameter types `$T_ , \ldots , T_n$` are the types of the argument expressions `$e_1 , \ldots , e_m$` and `$U$` is the expected type at the call site. If the expected type is undefined then `$U$` is `scala.AnyRef`. The parameter names `$p_1 , \ldots , p_n$` are fresh. @@ -460,7 +460,7 @@ $e$. Type applications can be omitted if [local type inference](#local-type-inference) can infer best type parameters -for a polymorphic function from the types of the actual function arguments +for a polymorphic method from the types of the actual method arguments and the expected result type. ## Tuples @@ -625,10 +625,10 @@ equivalent to the postfix method application -Prefix operators are different from normal function applications in +Prefix operators are different from normal method applications in that their operand expression need not be atomic. For instance, the input sequence `-sin(x)` is read as `-(sin(x))`, whereas the -function application `negate sin(x)` would be parsed as the +method application `negate sin(x)` would be parsed as the application of the infix operator `sin` to the operands `negate` and `(x)`. @@ -778,17 +778,17 @@ depends on the definition of $x$. If $x$ denotes a mutable variable, then the assignment changes the current value of $x$ to be the result of evaluating the expression $e$. The type of $e$ is expected to conform to the type of $x$. If $x$ is a parameterless -function defined in some template, and the same template contains a -setter function `$x$_=` as member, then the assignment +method defined in some template, and the same template contains a +setter method `$x$_=` as member, then the assignment `$x$ = $e$` is interpreted as the invocation -`$x$_=($e\,$)` of that setter function. Analogously, an -assignment `$f.x$ = $e$` to a parameterless function $x$ +`$x$_=($e\,$)` of that setter method. Analogously, an +assignment `$f.x$ = $e$` to a parameterless method $x$ is interpreted as the invocation `$f.x$_=($e\,$)`. -An assignment `$f$($\mathit{args}\,$) = $e$` with a function application to the +An assignment `$f$($\mathit{args}\,$) = $e$` with a method application to the left of the ‘`=`’ operator is interpreted as `$f.$update($\mathit{args}$, $e\,$)`, i.e. -the invocation of an `update` function defined by $f$. +the invocation of an `update` method defined by $f$. ###### Example Here are some assignment expressions and their equivalent expansions. @@ -885,7 +885,7 @@ Expr1 ::= ‘while’ ‘(’ Expr ‘)’ {nl} Expr The _while loop expression_ `while ($e_1$) $e_2$` is typed and evaluated as if it was an application of `whileLoop ($e_1$) ($e_2$)` where -the hypothetical function `whileLoop` is defined as follows. +the hypothetical method `whileLoop` is defined as follows. ```scala def whileLoop(cond: => Boolean)(body: => Unit): Unit = @@ -1013,7 +1013,7 @@ The for comprehension is translated to: ###### Example For comprehensions can be used to express vector and matrix algorithms concisely. -For instance, here is a function to compute the transpose of a given matrix: +For instance, here is a method to compute the transpose of a given matrix: @@ -1024,7 +1024,7 @@ def transpose[A](xss: Array[Array[A]]) = { } ``` -Here is a function to compute the scalar product of two vectors: +Here is a method to compute the scalar product of two vectors: ```scala def scalprod(xs: Array[Double], ys: Array[Double]) = { @@ -1034,7 +1034,7 @@ def scalprod(xs: Array[Double], ys: Array[Double]) = { } ``` -Finally, here is a function to compute the product of two matrices. +Finally, here is a method to compute the product of two matrices. Compare with the [imperative version](#example-imperative-matrix-multiplication). ```scala From 464df60bb23b034c706b0a08615a7b7bb75924f4 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 12 Apr 2018 11:10:19 +0200 Subject: [PATCH 1328/2793] Spec: update dynamic selection The implementation had diverged --- spec/06-expressions.md | 29 ++++++++++------------------- 1 file changed, 10 insertions(+), 19 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index 174438670ac3..e64a950626d7 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1742,27 +1742,18 @@ a sub-expression of parameterless method type, is not evaluated in the expanded ### Dynamic Member Selection -The standard Scala library defines a trait `scala.Dynamic` which defines a member -`applyDynamic` as follows: +The standard Scala library defines a marker trait `scala.Dynamic`. Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. Under the conditions of [implicit conversion](#dynamic-member-selection), the following rewrites are performed, assuming $e$ has type `Dynamic`, and the originally expression does not type check under normal rules: -```scala -package scala -trait Dynamic { - def applyDynamic (name: String, args: Any*): Any - ... -} -``` + * `e.m[Ti](xi)` becomes `e.applyDynamic[Ti]("m")(xi)` + * `e.m[Ti]` becomes `e.selectDynamic[Ti]("m")` + * `e.m = x` becomes `e.updateDynamic("m")(x)` -Assume a selection of the form $e.x$ where the type of $e$ conforms to `scala.Dynamic`. -Further assuming the selection is not followed by any function arguments, such an expression can be rewritten under the conditions given [here](#implicit-conversions) to: +If any arguments are named in the application (one of the `xi` is of the shape `arg = x`), their name is preserved as the first component of the pair passed to `applyDynamicNamed` (for missing names, `""` is used): -```scala -$e$.applyDynamic("$x$") -``` + * `e.m[Ti](argi = xi)` becomes `e.applyDynamicNamed[Ti]("m")(("argi", xi))` -If the selection is followed by some arguments, e.g. $e.x(\mathit{args})$, then that expression -is rewritten to +Finally: -```scala -$e$.applyDynamic("$x$", $\mathit{args}$) -``` + * `e.m(x) = y` becomes `e.selectDynamic("m").update(x, y)` + +None of these methods are actually defined in the `scala.Dynamic`, so that users are free to define them with or without type parameters, or implicit arguments. \ No newline at end of file From 3c81f1bfb6f942c4c3316309b5852e8d65addae0 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 12 Apr 2018 11:41:40 +0200 Subject: [PATCH 1329/2793] Spec-ify the spec update --- spec/06-expressions.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/spec/06-expressions.md b/spec/06-expressions.md index e64a950626d7..dd267558a89d 100644 --- a/spec/06-expressions.md +++ b/spec/06-expressions.md @@ -1069,7 +1069,7 @@ a return expression is `scala.Nothing`. The expression $e$ may be omitted. The return expression `return` is type-checked and evaluated as if it were `return ()`. -Returning from the method from withing a nested function may be +Returning from the method from within a nested function may be implemented by throwing and catching a `scala.runtime.NonLocalReturnException`. Any exception catches between the point of return and the enclosing methods might see @@ -1742,7 +1742,9 @@ a sub-expression of parameterless method type, is not evaluated in the expanded ### Dynamic Member Selection -The standard Scala library defines a marker trait `scala.Dynamic`. Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. Under the conditions of [implicit conversion](#dynamic-member-selection), the following rewrites are performed, assuming $e$ has type `Dynamic`, and the originally expression does not type check under normal rules: +The standard Scala library defines a marker trait `scala.Dynamic`. Subclasses of this trait are able to intercept selections and applications on their instances by defining methods of the names `applyDynamic`, `applyDynamicNamed`, `selectDynamic`, and `updateDynamic`. + +The following rewrites are performed, assuming $e$'s type conforms to `scala.Dynamic`, and the original expression does not type check under the normal rules, as specified fully in the relevant subsection of [implicit conversion](#dynamic-member-selection): * `e.m[Ti](xi)` becomes `e.applyDynamic[Ti]("m")(xi)` * `e.m[Ti]` becomes `e.selectDynamic[Ti]("m")` From a84cbfb8bfabb9b3f4a167f21760b4adda78039c Mon Sep 17 00:00:00 2001 From: Viktor Klang Date: Mon, 16 Apr 2018 15:14:49 +0100 Subject: [PATCH 1330/2793] Fixes #10810 by tracking concurrent blockinginstead of max number of threads. --- .../impl/ExecutionContextImpl.scala | 84 +++++++++---------- 1 file changed, 42 insertions(+), 42 deletions(-) diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index ae6f9d6fd2b2..4c83a9b8032f 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -9,7 +9,7 @@ package scala.concurrent.impl import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, ForkJoinTask, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit } -import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.atomic.{AtomicInteger, AtomicReference} import java.util.Collection import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService } import scala.annotation.tailrec @@ -24,26 +24,25 @@ private[scala] class ExecutionContextImpl private[impl] (val executor: Executor, private[concurrent] object ExecutionContextImpl { - // Implement BlockContext on FJP threads final class DefaultThreadFactory( daemonic: Boolean, - maxThreads: Int, + maxBlockers: Int, prefix: String, uncaught: Thread.UncaughtExceptionHandler) extends ThreadFactory with ForkJoinPool.ForkJoinWorkerThreadFactory { require(prefix ne null, "DefaultThreadFactory.prefix must be non null") - require(maxThreads > 0, "DefaultThreadFactory.maxThreads must be greater than 0") + require(maxBlockers >= 0, "DefaultThreadFactory.maxBlockers must be greater-or-equal-to 0") - private final val currentNumberOfThreads = new AtomicInteger(0) + private final val currentNumberOfBlockers = new AtomicInteger(0) - @tailrec private final def reserveThread(): Boolean = currentNumberOfThreads.get() match { - case `maxThreads` | Int.`MaxValue` => false - case other => currentNumberOfThreads.compareAndSet(other, other + 1) || reserveThread() + @tailrec private final def newBlocker(): Boolean = currentNumberOfBlockers.get() match { + case `maxBlockers` | Int.`MaxValue` => false + case other => currentNumberOfBlockers.compareAndSet(other, other + 1) || newBlocker() } - @tailrec private final def deregisterThread(): Boolean = currentNumberOfThreads.get() match { + @tailrec private final def freeBlocker(): Boolean = currentNumberOfBlockers.get() match { case 0 => false - case other => currentNumberOfThreads.compareAndSet(other, other - 1) || deregisterThread() + case other => currentNumberOfBlockers.compareAndSet(other, other - 1) || freeBlocker() } def wire[T <: Thread](thread: T): T = { @@ -53,39 +52,42 @@ private[concurrent] object ExecutionContextImpl { thread } - // As per ThreadFactory contract newThread should return `null` if cannot create new thread. - def newThread(runnable: Runnable): Thread = - if (reserveThread()) - wire(new Thread(new Runnable { - // We have to decrement the current thread count when the thread exits - override def run() = try runnable.run() finally deregisterThread() - })) else null + def newThread(runnable: Runnable): Thread = wire(new Thread(runnable)) def newThread(fjp: ForkJoinPool): ForkJoinWorkerThread = - if (reserveThread()) { - wire(new ForkJoinWorkerThread(fjp) with BlockContext { - // We have to decrement the current thread count when the thread exits - final override def onTermination(exception: Throwable): Unit = deregisterThread() - final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = { - var result: T = null.asInstanceOf[T] - ForkJoinPool.managedBlock(new ForkJoinPool.ManagedBlocker { - @volatile var isdone = false - override def block(): Boolean = { - result = try { - // When we block, switch out the BlockContext temporarily so that nested blocking does not created N new Threads - BlockContext.withBlockContext(BlockContext.defaultBlockContext) { thunk } - } finally { - isdone = true + wire(new ForkJoinWorkerThread(fjp) with BlockContext { + private[this] var isBlocked: Boolean = false // This is only ever read & written if this thread is the current thread + final override def blockOn[T](thunk: =>T)(implicit permission: CanAwait): T = + if ((Thread.currentThread eq this) && !isBlocked && newBlocker()) { + try { + isBlocked = true + val b: ForkJoinPool.ManagedBlocker with (() => T) = + new ForkJoinPool.ManagedBlocker with (() => T) { + private[this] var result: T = null.asInstanceOf[T] + private[this] var done: Boolean = false + final override def block(): Boolean = { + try { + if (!done) + result = thunk + } finally { + done = true + } + + true } - true - } - override def isReleasable = isdone - }) - result - } - }) - } else null + final override def isReleasable = done + + final override def apply(): T = result + } + ForkJoinPool.managedBlock(b) + b() + } finally { + isBlocked = false + freeBlocker() + } + } else thunk // Unmanaged blocking + }) } def createDefaultExecutorService(reporter: Throwable => Unit): ExecutorService = { @@ -99,8 +101,6 @@ private[concurrent] object ExecutionContextImpl { def range(floor: Int, desired: Int, ceiling: Int) = scala.math.min(scala.math.max(floor, desired), ceiling) val numThreads = getInt("scala.concurrent.context.numThreads", "x1") // The hard limit on the number of active threads that the thread factory will produce - // scala/bug#8955 Deadlocks can happen if maxNoOfThreads is too low, although we're currently not sure - // about what the exact threshold is. numThreads + 256 is conservatively high. val maxNoOfThreads = getInt("scala.concurrent.context.maxThreads", "x1") val desiredParallelism = range( @@ -116,7 +116,7 @@ private[concurrent] object ExecutionContextImpl { } val threadFactory = new ExecutionContextImpl.DefaultThreadFactory(daemonic = true, - maxThreads = maxNoOfThreads + maxExtraThreads, + maxBlockers = maxExtraThreads, prefix = "scala-execution-context-global", uncaught = uncaughtExceptionHandler) From b0b684e578863a0ff15ee0638431c30a9c00a965 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 23 Apr 2018 11:26:07 +0200 Subject: [PATCH 1331/2793] Update test case to changed JDK behavior --- test/files/run/t2873.check | 1 - test/files/run/t2873.scala | 4 +++- 2 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 test/files/run/t2873.check diff --git a/test/files/run/t2873.check b/test/files/run/t2873.check deleted file mode 100644 index 209b679c0719..000000000000 --- a/test/files/run/t2873.check +++ /dev/null @@ -1 +0,0 @@ -RedBlack.Empty$ diff --git a/test/files/run/t2873.scala b/test/files/run/t2873.scala index 3a3cc59b465c..d8cf21e75303 100644 --- a/test/files/run/t2873.scala +++ b/test/files/run/t2873.scala @@ -5,6 +5,8 @@ abstract class RedBlack[A] extends Serializable { object Test { def main(args: Array[String]): Unit = { - println(classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType) + val r = classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType.toString + // Output changed in JDK 1.8.0_172: https://github.com/scala/bug/issues/10835 + assert(r == "RedBlack.Empty$" || r == "RedBlack$Empty$", r) } } From 340b899536f767ccb6fc49d13879cdcacab3999d Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Mon, 23 Apr 2018 07:54:06 -0400 Subject: [PATCH 1332/2793] Deprecate Float range and Double range Ref scala/bug#10781 This is in preparation for Float range and Double range removal in 2.13.x (scala/scala#6468). --- src/library/scala/collection/immutable/Range.scala | 2 ++ src/library/scala/runtime/ScalaNumberProxy.scala | 8 ++++---- test/files/run/t3518.check | 12 ++++++++++++ test/files/run/t3518.flags | 1 + test/files/run/t4201.check | 3 +++ test/files/run/t4201.flags | 1 + test/files/run/t5857.check | 6 ++++++ test/files/run/t5857.flags | 1 + test/files/run/t9656.check | 6 ++++++ test/files/run/t9656.flags | 1 + 10 files changed, 37 insertions(+), 4 deletions(-) create mode 100644 test/files/run/t3518.check create mode 100644 test/files/run/t3518.flags create mode 100644 test/files/run/t4201.check create mode 100644 test/files/run/t4201.flags create mode 100644 test/files/run/t5857.check create mode 100644 test/files/run/t5857.flags create mode 100644 test/files/run/t9656.flags diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 82203b3d1a53..9f490f3e86b3 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -497,9 +497,11 @@ object Range { implicit val doubleAsIntegral = scala.math.Numeric.DoubleAsIfIntegral def toBD(x: Double): BigDecimal = scala.math.BigDecimal valueOf x + @deprecated("use Range.BigDecimal instead", "2.12.6") def apply(start: Double, end: Double, step: Double) = BigDecimal(toBD(start), toBD(end), toBD(step)) mapRange (_.doubleValue) + @deprecated("use Range.BigDecimal.inclusive instead", "2.12.6") def inclusive(start: Double, end: Double, step: Double) = BigDecimal.inclusive(toBD(start), toBD(end), toBD(step)) mapRange (_.doubleValue) } diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala index 9b4899aef6e7..f54ef8629f11 100644 --- a/src/library/scala/runtime/ScalaNumberProxy.scala +++ b/src/library/scala/runtime/ScalaNumberProxy.scala @@ -64,10 +64,10 @@ trait FractionalProxy[T] extends Any with ScalaNumberProxy[T] with RangedProxy[T type ResultWithoutStep = Range.Partial[T, NumericRange[T]] def isWhole() = false - def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _)) - def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step) - def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _)) - def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) + @deprecated("use BigDecimal range instead", "2.12.6") def until(end: T): ResultWithoutStep = new Range.Partial(NumericRange(self, end, _)) + @deprecated("use BigDecimal range instead", "2.12.6") def until(end: T, step: T): NumericRange.Exclusive[T] = NumericRange(self, end, step) + @deprecated("use BigDecimal range instead", "2.12.6") def to(end: T): ResultWithoutStep = new Range.Partial(NumericRange.inclusive(self, end, _)) + @deprecated("use BigDecimal range instead", "2.12.6") def to(end: T, step: T): NumericRange.Inclusive[T] = NumericRange.inclusive(self, end, step) } trait OrderedProxy[T] extends Any with Ordered[T] with Typed[T] { diff --git a/test/files/run/t3518.check b/test/files/run/t3518.check new file mode 100644 index 000000000000..1500b22b97c3 --- /dev/null +++ b/test/files/run/t3518.check @@ -0,0 +1,12 @@ +t3518.scala:2: warning: method to in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val r1 = 1.0 to 10.0 by 0.5 + ^ +t3518.scala:3: warning: method to in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val r2 = 1.0 to 1.0 by 1.0 + ^ +t3518.scala:4: warning: method to in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val r3 = 10.0 to 1.0 by -0.5 + ^ +t3518.scala:5: warning: method until in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val r4 = 1.0 until 1.0 by 1.0 + ^ diff --git a/test/files/run/t3518.flags b/test/files/run/t3518.flags new file mode 100644 index 000000000000..dcc59ebe32ef --- /dev/null +++ b/test/files/run/t3518.flags @@ -0,0 +1 @@ +-deprecation diff --git a/test/files/run/t4201.check b/test/files/run/t4201.check new file mode 100644 index 000000000000..d5258453a611 --- /dev/null +++ b/test/files/run/t4201.check @@ -0,0 +1,3 @@ +t4201.scala:3: warning: method to in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val f = 0.0 to 1.0 by 1.0 / 3.0 + ^ diff --git a/test/files/run/t4201.flags b/test/files/run/t4201.flags new file mode 100644 index 000000000000..dcc59ebe32ef --- /dev/null +++ b/test/files/run/t4201.flags @@ -0,0 +1 @@ +-deprecation diff --git a/test/files/run/t5857.check b/test/files/run/t5857.check new file mode 100644 index 000000000000..2fda7fad3af5 --- /dev/null +++ b/test/files/run/t5857.check @@ -0,0 +1,6 @@ +t5857.scala:25: warning: method to in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val numeric = 1.0 to sz.toDouble by 1 + ^ +t5857.scala:29: warning: method to in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + val numdesc = sz.toDouble to 1.0 by -1 + ^ diff --git a/test/files/run/t5857.flags b/test/files/run/t5857.flags new file mode 100644 index 000000000000..dcc59ebe32ef --- /dev/null +++ b/test/files/run/t5857.flags @@ -0,0 +1 @@ +-deprecation diff --git a/test/files/run/t9656.check b/test/files/run/t9656.check index 03e3ff3b5fcb..8cbae611650b 100644 --- a/test/files/run/t9656.check +++ b/test/files/run/t9656.check @@ -1,3 +1,9 @@ +t9656.scala:17: warning: method until in trait FractionalProxy is deprecated (since 2.12.6): use BigDecimal range instead + println(0.1 until 1.0 by 0.1) + ^ +t9656.scala:19: warning: method apply in object Double is deprecated (since 2.12.6): use Range.BigDecimal instead + println(Range.Double(0.1, 1.0, 0.1)) + ^ Range 1 to 10 Range 1 to 10 inexact Range 1 to 10 by 2 diff --git a/test/files/run/t9656.flags b/test/files/run/t9656.flags new file mode 100644 index 000000000000..dcc59ebe32ef --- /dev/null +++ b/test/files/run/t9656.flags @@ -0,0 +1 @@ +-deprecation From 694f620f362b0b7ec18547e4dee48f908798cfa4 Mon Sep 17 00:00:00 2001 From: Jasper Moeys Date: Wed, 25 Apr 2018 11:32:17 +0200 Subject: [PATCH 1333/2793] [backport] Unmangle the output of //print in the REPL The old behavior is available as //printRaw Backport of scala/scala#6553 --- .../nsc/interpreter/PresentationCompilerCompleter.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala index a912ec9749f8..0ae867637421 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala @@ -33,6 +33,7 @@ class PresentationCompilerCompleter(intp: IMain) extends Completion { // secret handshakes val slashPrint = """.*// *print *""".r + val slashPrintRaw = """.*// *printRaw *""".r val slashTypeAt = """.*// *typeAt *(\d+) *(\d+) *""".r val Cursor = IMain.DummyCursorFragment + " " @@ -118,7 +119,10 @@ class PresentationCompilerCompleter(intp: IMain) extends Completion { case Left(_) => Completion.NoCandidates case Right(result) => try { buf match { - case slashPrint() if cursor == buf.length => print(result) + case slashPrint() if cursor == buf.length => + val c = print(result) + c.copy(candidates = c.candidates.map(intp.naming.unmangle)) + case slashPrintRaw() if cursor == buf.length => print(result) case slashTypeAt(start, end) if cursor == buf.length => typeAt(result, start.toInt, end.toInt) case _ => candidates(result) } From 70d41ffe633d00f022257b1d21f30a70e236e40d Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 26 Apr 2018 11:29:28 +0200 Subject: [PATCH 1334/2793] Fix tests on windows --- .../files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala | 2 +- test/files/run/t7634.javaopts | 1 + test/scalacheck/sanitycheck.scala | 3 ++- 3 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 test/files/run/t7634.javaopts diff --git a/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala b/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala index 06902755ae5e..91dac2598593 100644 --- a/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala +++ b/test/files/run/t4841-isolate-plugins/t4841-isolate-plugin.scala @@ -33,7 +33,7 @@ object Test extends DirectTest { override def show() = { val dirs = 1 to 2 map (compilePlugin(_)) - val plugins = dirs.map(d => s"$d:${testOutput.path}").mkString(",") + val plugins = dirs.map(d => s"$d${java.io.File.pathSeparator}${testOutput.path}").mkString(",") compile("-Xdev", s"-Xplugin:$plugins", "-usejavacp", "-d", testOutput.path) } } diff --git a/test/files/run/t7634.javaopts b/test/files/run/t7634.javaopts new file mode 100644 index 000000000000..b0c90bb1f73a --- /dev/null +++ b/test/files/run/t7634.javaopts @@ -0,0 +1 @@ +-Dneeds.forked.jvm.for.windows diff --git a/test/scalacheck/sanitycheck.scala b/test/scalacheck/sanitycheck.scala index 3b6a7a3d9f2e..cedd52ffbc28 100644 --- a/test/scalacheck/sanitycheck.scala +++ b/test/scalacheck/sanitycheck.scala @@ -6,7 +6,8 @@ object SanityCheck extends Properties("SanityCheck") { property("classpath correct") = { val codeSource = classOf[Option[_]].getProtectionDomain.getCodeSource.getLocation.toURI val path = new File(codeSource).getAbsolutePath - if (path.endsWith("quick/classes/library")) + val s = java.io.File.separator + if (path.endsWith(s"quick${s}classes${s}library")) Prop.proved else Prop.falsified :| s"Unexpected code source for scala library: $path" From 82a3a2e729a764f958fa85eb803bc555dbdfa1d1 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 27 Apr 2018 17:17:30 +0200 Subject: [PATCH 1335/2793] bump version to 2.12.7, restarr onto 2.12.6 --- build.sbt | 2 +- versions.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 2ab4ef3f6b40..3793e5128284 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.6" +baseVersion in Global := "2.12.7" baseVersionSuffix in Global := "SNAPSHOT" mimaReferenceVersion in Global := Some("2.12.0") diff --git a/versions.properties b/versions.properties index ba8a94d56261..eefe3afcf1f2 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.12.5 +starr.version=2.12.6 # The scala.binary.version determines how modules are resolved. It is set as follows: # - After 2.x.0 is released, the binary version is 2.x From dabe47c60f0155b3d3686e61ca7867dd21c31deb Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 27 Apr 2018 19:01:22 +0200 Subject: [PATCH 1336/2793] bundle newer scala-swing version just keeping current. --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index eefe3afcf1f2..ed01a92413cf 100644 --- a/versions.properties +++ b/versions.properties @@ -21,7 +21,7 @@ scala.binary.version=2.12 # - partest: used for running the tests scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.7 -scala-swing.version.number=2.0.0 +scala-swing.version.number=2.0.3 partest.version.number=1.1.7 scala-asm.version=6.0.0-scala-1 jline.version=2.14.6 From f564a80a67448fddc8f7b6645d87ff6fd134806d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 2 May 2018 19:08:14 +0200 Subject: [PATCH 1337/2793] add code of conduct to root of repository this is a GitHub standard now, see https://help.github.com/articles/adding-a-code-of-conduct-to-your-project/ --- CODE_OF_CONDUCT.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 CODE_OF_CONDUCT.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 000000000000..8bef56b65522 --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1 @@ +all repositories in the [scala](https://github.com/scala) and [scalacenter](https://github.com/scalacenter) organizations are covered by the Scala Code of Conduct: https://scala-lang.org/conduct/ From b62fa5d3313906075ed935aefb3fe7594f713c4f Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 2 May 2018 20:09:45 +0200 Subject: [PATCH 1338/2793] Scala hasn't included sizzle for quite a while now --- doc/LICENSE.md | 1 - doc/License.rtf | 1 - doc/licenses/mit_sizzle.txt | 13 ------------- 3 files changed, 15 deletions(-) delete mode 100644 doc/licenses/mit_sizzle.txt diff --git a/doc/LICENSE.md b/doc/LICENSE.md index d50407882738..fd489c64b7ac 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -56,5 +56,4 @@ This license is used by the following third-party libraries: This license is used by the following third-party libraries: * jquery - * sizzle * tools tooltip diff --git a/doc/License.rtf b/doc/License.rtf index e653960ebb42..30e6912281d5 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -53,5 +53,4 @@ THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \'93AS IS\'9 \fs26 This license is used by the following third-party libraries:\ \'95 jquery\ - \'95 sizzle\ \'95 tools tooltip\ diff --git a/doc/licenses/mit_sizzle.txt b/doc/licenses/mit_sizzle.txt deleted file mode 100644 index d81d30aa0f5c..000000000000 --- a/doc/licenses/mit_sizzle.txt +++ /dev/null @@ -1,13 +0,0 @@ -Scala includes the Sizzle library: - -Copyright (c) 2010 The Dojo Foundation - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. From 081573777927267bd0234055c15036de65917d47 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 3 May 2018 11:01:42 +0200 Subject: [PATCH 1339/2793] Split out NOTICE file from LICENSE So that `licensee` detects our license properly. --- LICENSE | 2 -- NOTICE | 1 + 2 files changed, 1 insertion(+), 2 deletions(-) create mode 100644 NOTICE diff --git a/LICENSE b/LICENSE index 01e01d7fa7f4..57f166ceab73 100644 --- a/LICENSE +++ b/LICENSE @@ -1,5 +1,3 @@ -This software includes projects with other licenses -- see `doc/LICENSE.md`. - Copyright (c) 2002-2018 EPFL Copyright (c) 2011-2018 Lightbend, Inc. diff --git a/NOTICE b/NOTICE new file mode 100644 index 000000000000..a15b912aa44f --- /dev/null +++ b/NOTICE @@ -0,0 +1 @@ +This software includes projects with other licenses -- see `doc/LICENSE.md`. From 88e34c2597cc4dbd326572ebb3098d54e1cdf9b4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 23 Apr 2018 08:35:32 +0100 Subject: [PATCH 1340/2793] Avoid unneeded tree duplicate/reset in default getter, case class synth More of the same as #5875 The change to default getters reduced tree churn by 6x in a real world project. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 6 +++--- src/compiler/scala/tools/nsc/typechecker/Unapplies.scala | 3 ++- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index d36a91669fdb..806025c026c8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1436,8 +1436,8 @@ trait Namers extends MethodSynthesis { * typechecked, the corresponding param would not yet have the "defaultparam" * flag. */ - private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overridden: Symbol) { - val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(ddef.duplicate) + private def addDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef], overridden: Symbol): Unit = { + val DefDef(_, _, rtparams0, rvparamss0, _, _) = resetAttrs(deriveDefDef(ddef)(_ => EmptyTree).duplicate) // having defs here is important to make sure that there's no sneaky tree sharing // in methods with multiple default parameters def rtparams = rtparams0.map(_.duplicate) @@ -1523,7 +1523,7 @@ trait Namers extends MethodSynthesis { return // fix #3649 (prevent crash in erroneous source code) } } - val ClassDef(_, _, rtparams, _) = resetAttrs(cdef.duplicate) + val ClassDef(_, _, rtparams, _) = resetAttrs(deriveClassDef(cdef)(_ => Template(Nil, noSelfType, Nil)).duplicate) defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT))) nmr } diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index 909157212578..0945c68add20 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -60,7 +60,8 @@ trait Unapplies extends ast.TreeDSL { } private def constrTparamsInvariant(cdef: ClassDef): List[TypeDef] = { - val ClassDef(_, _, tparams, _) = resetAttrs(cdef.duplicate) + val prunedClassDef = deriveClassDef(cdef)(tmpl => Template(Nil, noSelfType, Nil)) + val ClassDef(_, _, tparams, _) = resetAttrs(prunedClassDef.duplicate) val tparamsInvariant = tparams.map(tparam => copyTypeDef(tparam)(mods = tparam.mods &~ (COVARIANT | CONTRAVARIANT))) tparamsInvariant } From 55a08daab2f4fc01a7f2f1a5c7d770c91b17aa04 Mon Sep 17 00:00:00 2001 From: Jasper Moeys Date: Fri, 4 May 2018 13:56:00 +0200 Subject: [PATCH 1341/2793] Support underline markdown in scaladoc Fixes scala/bug#10861 --- src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css index bbff0c937c0d..488bf3b8b56d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.css @@ -111,6 +111,10 @@ margin: 0px; } +u { + text-decoration: underline; +} + a { cursor: pointer; text-decoration: none; From 7190b76083f77c66e06e76242caf99e7c3769010 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 4 May 2018 17:17:03 +0200 Subject: [PATCH 1342/2793] Revert part of hastily merged #6449 --- src/reflect/scala/reflect/internal/Trees.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 288478a9b152..7b78fca09b58 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -545,8 +545,6 @@ trait Trees extends api.Trees { object Select extends SelectExtractor case class Ident(name: Name) extends RefTree with IdentApi { - if (name.string_==("rc6")) - "".reverse def qualifier: Tree = EmptyTree def isBackquoted = this.hasAttachment[BackquotedIdentifierAttachment.type] } From 80f165a631c675eeb593031f8cff541c7e8ca15e Mon Sep 17 00:00:00 2001 From: Yang Bo Date: Fri, 4 May 2018 23:44:26 +0800 Subject: [PATCH 1343/2793] Replace UnApply to Apply in resetLocalAttris --- src/compiler/scala/tools/nsc/ast/Trees.scala | 4 ++++ test/files/run/idempotency-extractors.check | 2 +- test/files/run/idempotency-extractors.scala | 5 +++-- 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index c93c3ddca23f..80f4ac9f1c18 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -287,6 +287,10 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => transform(fn) case EmptyTree => tree + // The typer does not accept UnApply. Replace it to Apply, which can be retyped. + case UnApply(Apply(Select(prefix, termNames.unapply | termNames.unapplySeq), + List(Ident(termNames.SELECTOR_DUMMY))), args) => + Apply(prefix, transformTrees(args)) case _ => val dupl = tree.duplicate // Typically the resetAttrs transformer cleans both symbols and types. diff --git a/test/files/run/idempotency-extractors.check b/test/files/run/idempotency-extractors.check index fcd50faa7905..e9bcb720077b 100644 --- a/test/files/run/idempotency-extractors.check +++ b/test/files/run/idempotency-extractors.check @@ -2,4 +2,4 @@ 2 match { case Test.this.Extractor.unapply() ((x @ _)) => x } -error! +2 diff --git a/test/files/run/idempotency-extractors.scala b/test/files/run/idempotency-extractors.scala index 590147cae6a3..c7df2e734121 100644 --- a/test/files/run/idempotency-extractors.scala +++ b/test/files/run/idempotency-extractors.scala @@ -14,9 +14,10 @@ object Test extends App { println(textractor) val rtextractor = tb.untypecheck(textractor) try { + // should print 2 without error println(tb.eval(rtextractor)) } catch { - // this is the current behaviour, rather than the desired behavior; see scala/bug#5465 + // this is the old behaviour, rather than the desired behavior; see scala/bug#5465 case _: ToolBoxError => println("error!") } -} \ No newline at end of file +} From ace992697c018b49483de0d2d1f887efa4077c43 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 8 May 2018 10:28:11 -0700 Subject: [PATCH 1344/2793] Support old info method in limiting reporter The forwarding reporter knows the new API, so let limiting reporter decode calls to info0 via info. `info0` is protected and can't be forwarded. --- .../scala/tools/nsc/reporters/LimitingReporter.scala | 10 ++++++++++ src/reflect/scala/reflect/internal/Reporting.scala | 2 +- .../tools/nsc/reporters/ConsoleReporterTest.scala | 7 +++++++ 3 files changed, 18 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala index 68a1319b4d4e..46f35d1d7439 100644 --- a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala @@ -13,4 +13,14 @@ class LimitingReporter(settings: Settings, override protected val delegate: Inte case WARNING => warningCount < settings.maxwarns.value case _ => true } + // work around fractured API to support `reporters.Reporter.info` + override protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = delegate match { + case r: Reporter => + severity match { + case ERROR => r.error(pos, msg) + case WARNING => r.warning(pos, msg) + case _ => if (force) r.echo(pos, msg) else r.info(pos, msg, force = false) + } + case _ => super.info0(pos, msg, severity, force) + } } diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala index 8238327cc730..56a627f41724 100644 --- a/src/reflect/scala/reflect/internal/Reporting.scala +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -126,7 +126,7 @@ trait ForwardingReporter extends Reporter { protected val delegate: Reporter /* Always throws `UnsupportedOperationException`. */ - protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Nothing = + protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = throw new UnsupportedOperationException(s"$msg ($pos)") override def echo(pos: Position, msg: String) = delegate.echo(pos, msg) diff --git a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala index de907fb9db51..70958c20fbf2 100644 --- a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala +++ b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala @@ -197,4 +197,11 @@ class ConsoleReporterTest { testHelper(posWithSource, msg = "Testing display for maxerrs to pass", severity = "error: ")(filter.error(_, "Testing display for maxerrs to pass")) testHelper(msg = "")(filter.error(_, "Testing display for maxerrs to fail")) } + + @Test + def filteredInfoTest(): Unit = { + val reporter = new LimitingReporter(new Settings, new StoreReporter) + // test obsolete API, make sure it doesn't throw + reporter.info(NoPosition, "goodbye, cruel world", force = false) + } } From dfaf865ae8e3a14c595f0fa9a6b631d3fb1fb411 Mon Sep 17 00:00:00 2001 From: Kamil Duda Date: Thu, 10 May 2018 20:06:03 +0200 Subject: [PATCH 1345/2793] Fixes scala/bug#10864 Remove `EXPRmode` references from `type TypecheckMode` comments --- src/compiler/scala/tools/reflect/ToolBox.scala | 4 ++-- src/reflect/scala/reflect/macros/Typers.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index a37dd609f63e..fc3b78e37cf1 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -23,7 +23,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { /** Represents mode of operations of the typechecker underlying `c.typecheck` calls. * Is necessary since the shape of the typechecked tree alone is not enough to guess how it should be typechecked. - * Can be EXPRmode (typecheck as a term), TYPEmode (typecheck as a type) or PATTERNmode (typecheck as a pattern). + * Can be TERMmode (typecheck as a term), TYPEmode (typecheck as a type) or PATTERNmode (typecheck as a pattern). */ type TypecheckMode @@ -47,7 +47,7 @@ trait ToolBox[U <: scala.reflect.api.Universe] { typecheck(tree, TERMmode, pt, silent, withImplicitViewsDisabled, withMacrosDisabled) /** Typechecks a tree against the expected type `pt` - * under typechecking mode specified in `mode` with [[EXPRmode]] being default. + * under typechecking mode specified in `mode` with [[TERMmode]] being default. * This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings. * * If the tree has unresolved type variables (represented as instances of `FreeTypeSymbol` symbols), diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index d242e3a54ac2..37a075dc9c4a 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -25,7 +25,7 @@ trait Typers { /** Represents mode of operations of the typechecker underlying `c.typecheck` calls. * Is necessary since the shape of the typechecked tree alone is not enough to guess how it should be typechecked. - * Can be EXPRmode (typecheck as a term), TYPEmode (typecheck as a type) or PATTERNmode (typecheck as a pattern). + * Can be TERMmode (typecheck as a term), TYPEmode (typecheck as a type) or PATTERNmode (typecheck as a pattern). */ // I'd very much like to make use of https://github.com/dsl-paradise/dsl-paradise here! type TypecheckMode @@ -58,7 +58,7 @@ trait Typers { typecheck(tree, TERMmode, pt, silent, withImplicitViewsDisabled, withMacrosDisabled) /** Typechecks the provided tree against the expected type `pt` in the macro callsite context - * under typechecking mode specified in `mode` with [[EXPRmode]] being default. + * under typechecking mode specified in `mode` with [[TERMmode]] being default. * This populates symbols and types of the tree and possibly transforms it to reflect certain desugarings. * * If `silent` is false, `TypecheckException` will be thrown in case of a typecheck error. From 83576634d5eca43ab064f4b90d535007723a9362 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 11 May 2018 15:49:27 +0200 Subject: [PATCH 1346/2793] don't let Travis-CI fail on every PR in order to test #6621 we needed to enable Travis-CI on pull requests, but without this change, every PR failed --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 4abdda13c070..2d2da13b8cc5 100644 --- a/.travis.yml +++ b/.travis.yml @@ -12,7 +12,7 @@ cache: before_script: - - (cd admin && ./init.sh) + - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then (cd admin && ./init.sh); fi' stages: - name: build # also builds the spec using jekyll From e3b2ae9291e757baf861f737627e8eeaa4de1aa1 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 11 May 2018 11:49:31 +0200 Subject: [PATCH 1347/2793] use Travis for (vastly simpler) PR validation use only sbt, avoid using external shell scripts and environment variables. we just need a few simple commands right here in .travis.yml doesn't publish anything to Artifactory. let's discuss the way forward on that at https://github.com/scala/scala-dev/issues/507 --- .travis.yml | 78 +++++++++++----------------------- scripts/travis-publish-spec.sh | 19 +++++---- 2 files changed, 36 insertions(+), 61 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2d2da13b8cc5..e678559fce58 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,42 +1,30 @@ -sudo: required # GCE VMs have better performance (will be upgrading to premium VMs soon) +# GCE VMs have better performance (will be upgrading to premium VMs soon) +sudo: required language: scala jdk: openjdk8 - cache: directories: - $HOME/.ivy2/cache - $HOME/.sbt - - build/ - - -before_script: - - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then (cd admin && ./init.sh); fi' stages: - - name: build # also builds the spec using jekyll - # tests are running into time limits (will re-enable once Jason's partest speedups are in) - - name: test - if: env(bla) = thisVarIsNotSet AND type != pull_request # just disabling tests for now, but commenting the stage here doesn't do the trick - - name: publish - if: type != pull_request + - name: build -# see comment in `bootstrap_fun` for details on the procedure -# env available in each stage -# - by travis config (see below): secret env vars -# - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl -# - by `bootstrap_fun`: publishPrivateTask, ... -# env computed in first stage, passed on to later stages with the `build/env` file -# - by `determineScalaVersion`: SCALA_VER, publishToSonatype -# - by `buildModules` / `constructUpdatedModuleVersions`: updatedModuleVersions jobs: include: + + # full bootstrap and publish - stage: build - # currently, not touching PR validation - # (also, we couldn't even, because the password to publish to artifactory is not there :-/) if: type != pull_request script: + # see comment in `bootstrap_fun` for details on the procedure + # env available in each stage + # - by travis config (see below): secret env vars + # - by `common` script: WORKSPACE, IVY2_DIR, SBT_CMD, integrationRepoUrl + # - by `bootstrap_fun`: publishPrivateTask, ... + - (cd admin && ./init.sh) - source scripts/common - source scripts/bootstrap_fun - determineScalaVersion @@ -45,38 +33,26 @@ jobs: - if [ ! -z "$STARR_REF" ]; then buildStarr; fi - buildLocker - buildQuick - - set | grep -E '^SCALA_VER=|^SCALA_BINARY_VER=|^XML_VER=|^PARTEST_VER=|^SCALACHECK_VER=|^XML_BUILT=|^PARTEST_BUILT=|^SCALACHECK_BUILT=|^updatedModuleVersions=|^publishToSonatype=' > build/env - - cat build/env - triggerScalaDist - # this builds the spec using jekyll - # based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html + # pull request validation (w/ mini-bootstrap) + - stage: build + if: type = pull_request + script: + - sbt -warn setupPublishCore generateBuildCharacterPropertiesFile publishLocal + - STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR + - sbt -Dstarr.version=$STARR -warn setupValidateTest test:compile info testAll + + # build the spec using jekyll - stage: build - script: bundle exec jekyll build -s spec/ -d build/spec rvm: 2.2 install: bundle install - # the key is restricted using forced commands so that it can only upload to the directory we need here - after_success: ./scripts/travis-publish-spec.sh - - # be careful to not set any env vars, as this will result in a cache miss - - &test - stage: test - before_script: - - source build/env - - if [ -z "$SCALA_VER" ]; then echo "Environment not propagated. Caching issue?"; cat build/env ; exit 1; fi - - source scripts/common - - source scripts/bootstrap_fun - # - find build -type f -exec touch {} + # "set antStyle := true" seems to cause really long compiles for the test suite?? - script: invokeQuick testRest # shouldn't rebuild, since build/ is cached - - <<: *test - script: invokeQuick testPosPres - - <<: *test - script: invokeQuick testRun - - script: testStability + script: + - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then (cd admin && ./init.sh); fi' + - bundle exec jekyll build -s spec/ -d build/spec + after_success: + - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then ./scripts/travis-publish-spec.sh; fi' -# cat /dev/urandom | head -c 10000 | openssl sha1 > ./secret -# openssl aes-256-cbc -pass "file:./secret" -in id_dsa_spec212_b4096 -out spec/id_dsa_travis.enc -a -# travis encrypt "PRIV_KEY_SECRET=`cat ./secret`" env: global: - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh @@ -86,10 +62,6 @@ env: - secure: "dbAvl6KEuLwZ0MVQPZihFsPzCdiLbX0EFk3so+hcfEbksrmLQ1tn4X5ZM7Wy1UDR8uN9lxngEwHch7a7lKqpugzmXMew9Wnikr9WBWbJT77Z+XJ/jHI6YuiCRpRo+nvxXGp9Ry80tSIgx5eju0J83IaJL41BWlBkvyAd7YAHORI=" # GPG_SUBKEY_SECRET - secure: "ee0z/1jehBjFa2M2JlBHRjeo6OEn/zmVl72ukBP1ISeKqz18Cswc4gDI5tV9RW9SlYFLkIlGsR2qnRCyJ/pqgQLcNdrpsCRFFc79oyLhfEtmPdAHlWfj4RSP68zINRtDdFuJ8iSy8XYP0NaqpVIYpkNdv9I6q7N85ljmMQpHO+U=" # TRAVIS_TOKEN (login with GitHub as lrytz) - -# using S3 would be simpler, but we want to upload to scala-lang.org -# after_success: bundle exec s3_website push --headless - before_cache: # Cleanup the cached directories to avoid unnecessary cache updates - find $HOME/.ivy2/cache -name "ivydata-*.properties" -print -delete diff --git a/scripts/travis-publish-spec.sh b/scripts/travis-publish-spec.sh index a9f3bcca84f8..fe29ea06783a 100755 --- a/scripts/travis-publish-spec.sh +++ b/scripts/travis-publish-spec.sh @@ -1,11 +1,14 @@ #!/bin/bash -if [ "${PRIV_KEY_SECRET}" != "" -a "${TRAVIS_PULL_REQUEST}" = "false" ] ; then - openssl aes-256-cbc -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a - chmod 600 spec/id_dsa_travis - eval "$(ssh-agent)" - ssh-add -D - ssh-add spec/id_dsa_travis - rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.12/ -fi +# based on http://www.paperplanes.de/2013/8/13/deploying-your-jekyll-blog-to-s3-with-travis-ci.html +set -e +openssl aes-256-cbc -pass "pass:$PRIV_KEY_SECRET" -in spec/id_dsa_travis.enc -out spec/id_dsa_travis -d -a +chmod 600 spec/id_dsa_travis +eval "$(ssh-agent)" +ssh-add -D +ssh-add spec/id_dsa_travis + +# the key is restricted using forced commands so that it can only upload to the directory we need here +rsync -e "ssh -o StrictHostKeyChecking=no" -rzv build/spec/ \ + scalatest@chara.epfl.ch:/home/linuxsoft/archives/scala/spec/2.12/ From 64a1a5064b9ae035df4ab448f1a55b1e8e37d39c Mon Sep 17 00:00:00 2001 From: Georgi Chochov Date: Sat, 12 May 2018 20:18:53 +0200 Subject: [PATCH 1348/2793] Improve documentation on BufferLike::remove --- src/library/scala/collection/mutable/BufferLike.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index c78d59297be9..4b3cad0ba1e3 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -100,7 +100,7 @@ trait BufferLike[A, +This <: BufferLike[A, This] with Buffer[A]] * * @param n the index which refers to the element to delete. * @return the previous element at index `n` - * @throws IndexOutOfBoundsException if the if the index `n` is not in the valid range + * @throws IndexOutOfBoundsException if the index `n` is not in the valid range * `0 <= n < length`. */ def remove(n: Int): A From b45e01373f2f778d9bcf69699d8fa9097cdb3659 Mon Sep 17 00:00:00 2001 From: Philippus Baalman Date: Tue, 15 May 2018 12:39:42 +0200 Subject: [PATCH 1349/2793] Create png version of diagram and remove ScalaObject from it see scala/docs.scala-lang/issues/615 --- spec/12-the-scala-standard-library.md | 2 +- spec/public/images/classhierarchy.png | Bin 0 -> 117555 bytes 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 spec/public/images/classhierarchy.png diff --git a/spec/12-the-scala-standard-library.md b/spec/12-the-scala-standard-library.md index d17bf757eff7..76165b8a2c45 100644 --- a/spec/12-the-scala-standard-library.md +++ b/spec/12-the-scala-standard-library.md @@ -10,7 +10,7 @@ The Scala standard library consists of the package `scala` with a number of classes and modules. Some of these classes are described in the following. -![Class hierarchy of Scala](public/images/classhierarchy.pdf) +![Class hierarchy of Scala](public/images/classhierarchy.png) ## Root Classes diff --git a/spec/public/images/classhierarchy.png b/spec/public/images/classhierarchy.png new file mode 100644 index 0000000000000000000000000000000000000000..3da25ecbf2d579a8330754ff3d6173c4a6dcfa57 GIT binary patch literal 117555 zcmeFZg;$i(_c!_g0*WAtl$02NG=g*(2nYyB%nUW8)KC&bmr?=(5(CoGFf$C@5~6^B zGy~ElHMB}dzR&pm{@(Ylb?;sGAGogNa-KS8?|t@X=Q&2a&{UzJVxa;6fJXJ%Qyl=H z!T|s!#YHmkjv7h~0{#Pg@mx>o{QNv6B?StFMnpu+&dzdja_;W#f)AILmS|{bgoK1F zEG&9^d+X}zo;-PymzO6cC3SRkR8mr+uC9(mBHP;97#SHyM@JC|#DxnN0s;c!$+5%t@*K$Rqm9V{{s8|q$zLVwSB8}`8=OQxnbbMmci_gQEuUI% zHcXoTo@-cvx9}Q|vFo6OHkU=TH~Vm`=bg+)C$}(mwM`}nj{qO}7LgfCKeyPGdG_Pl zgTpLKP-%*c{Wn(;f>z9`H$XLMeKOBmd?gL)vgDf~Kf+hxF*(RDW*D}VmM~+4q;%Lb zDU{{Jry11Nj#7q97rfYPf3pK#6M&qY-6!=w%bsK++*vnvPVy1k)RZCI zncmXD&B%H9?)^n?Y->ATIVjS)+|V1<{Ays<^0z5S1SgEfb+1^5U!~h^u&={(e>K*a z{Lt=!p$}KHvbrkDBQ0*my|EXa5I?IM*MuE!2Of0CrL6<=_(fAId8Wi4UU?dY6K@F!nuti?+IHP7=+ zu4zB@sVz2By4tb2hpk_YI(EQ`R_q>IW`WD^uSiV$jQ$!r7Qz`;*_h&7k&}pdkqxoZ zh7G8A(NB%W%11cTlUqWV+nQLume5Qi4sIu*JB%{>_4l8fMl6v-LksI<*Z0}qF4;TZ z%sowz=ufQy0Ka16)9sf*b@-_1+kp!-u%5grd-4?7k{~>t$b0mVY)Ed;4WIf6CD>j) zTosYt`zG&tPQM|399Y2^qX<7t9AH;us#7fL!fBtmf_CqOxn^b!I$-dm(DRLZ_9kO^ zRHzR;u2}2ctrOqg#>9*;vl5Z%LXmK-?G~?NX8+XBUF~;43kpi!)4f_JH#!ijZe}#j zXEjWz=((A2lphsyh3-Z2*cM2Fd|Ot5o3NThk2-B8Fj(a3TTj?$axRVgLrL>w>=d|6 z0*YSi7IdgmNzD0ft_ysIBb&bFD{1f(Kc{y}VXyDA@CmH{X=9`pr*xbZ?>OOCFaUNy z$%_#>q0$_&p0YAZ-d9rppe0o5(JNmqMe+eJQGMeHRyLRsi2&^|d7Iyaoab$W;sC8f z5{i;Ap zFW(ddyrk=oX1f3tuC%nOVY8v6iLs;Q{3%_@K|DWGbt0xjZBOPFx{?H~n#;b`)&`_2qZVuBtNUXYQIR3I%ahLgeJbl;~4$XGbxGYz92P1hg4* z?f+r^ZTXB62ue=CoU_9>scQ~aywd00+luGQj7N&oig_D+HW8?(yYk*)m2AZnaV#23 z=p~3*tScVXy>>2(Fa*W8%`@xUb?RDsAFhWjs!^$i77)00PnZkt28Ui@lsE^5jf~>H z*@s%wcGUS>GX3ELPGssEczteSK5ob|aU<43=x!=XyRyPHZkI}aHdKarZZg2W5WT2 zpq;^i;e04)R!@I2E)FW)9|?ZKXIW5KcC+DP6E61F^9VBaE&&IfNw#eUS~-(hsgBJh zd4~5_0H8%1=MfL_EK;jFUVEvE+7_zE<)q#%_~b&dDqwVM=fVrCjv_ee#HpeGXyTM} z6`I_~f${JkMt5K&0q>2gz3!KIYo#z9g-&y-eVC*=r0u+FU#@LVpTMYu_)c>*X#?0@ zzQ3iy=exoP1dYXE&NHZAdGNGPl1u$Iz_(!9tz3~B)6>eZ{;?Tg_X!S)n34L(_VOp( zUHqo0fMHYPdC>Mn(C`+1!&krIHg_8eoV&~UT>N*Kl|QHmo)i}>tQ<|SUjwq#XhaN} z6;KG@-yF3!-8h!mWQwg~U$npRXlLxV5lZXUYQYG%kaI!%_O-D6eLYAD)z%6g3- zUi($?vC#JeGd9a?t58$RCMuvs16PCIm0N`rVrI1rm&U_k{TVMx3L&S8uUO&Jnt1I9 zmLH`visR)QZlcTn{+YGmm-F1?IS^_lkL{@I@?`eBZcH8o15s^?KoYP; zZ=kQyAH@`!LV^Uc@`H}rnU5q~2=9uB$H5j`_$+i=OU7#KV zf3<(kT6P=Mpby3j>$jekHwd+0--tA|qQn~?V-w#` zv%Ll9o#lZ^;#jcSm7fdFMSm1ad}*(Je2|~)B&3mbMZ}|823d#}S)(JDsh?1R_5Z?+ zw`w96#y(J&b+OiOXas>`ZkO65+fYNs>0gFL;v4RmJ2RTOH__D?a+^1in=2UVJ1gt6 z!vv>GLj=hl9i9bukTiqdtQGSQ@nz!-g^T4f{XS|DzjwXV3Z{nD53D?G8=4~CP#t)= zH=Y3H)6FfskUzHyFrZ<+`NiDyZ$US=rD~#nC0^iq9WaA$8L*W}{AJ0l51K&QTY*@C_SE)74SK`NIO-Bi$oR;t zkZ%bItb8=htew-0E75*rz3tG%*^NCu%-||;7Gr!WLtu@Cf*+f{$5?QsBTO@xlG^;t zOR5Zw3Qf1~aY2zq*#{V}mt}m<`48J>GJ8&N&$}*G=BCd`Tm|NkKM8*&KhfHVuo3+@ zdp*M8o)tBY%F^ zDG8tedeXyHLdRNg{xBE(V;ri zn_s{V#3i&G+vf^eWiVuZ^l|5k8V7DPjg;EM1Myg%L_ z8=0C4DDz^U z;z#UqcJpy~AQ-AH0l-g!765utu@4P~5qJuXp0C4ozm62R+k=}6n(z0IWUBFDuT7^- zK1YAIPaTlH3H7@Wl9gxoNynHAN`-m;x3(<_!=HpR0f!=bR-5L_L`CC+ZE7eBu1_tb zcg0!s>ddavTHRZ#@Q|qvfiLQ;QQ#Z0Okc@i^-j0*Wmt7xL#6Mx4sb!`SWTNAUInv} zQf^oTjtE9hO<~yP=mEubyuu|jZdfQ+?g)ozb-_0(d$KowY4E)p6nQYCPTM84oNxt6 zA1)2M&miJH*HHf$v-er!;tg=9Z^WeXM6!AX%NjGo|DE^JoJ0d*vM2j%Bn>7zvTMr< zS+X-!`@UW^!{JUzdQ&h`sU;cX*=SU5G>)to1k~l0968^DcvJ6z#FKzJ@Gx%br?wT+ zHkFHHN*NYf;)^LKAbOie*z%hGRv^NW;Y=3(Dk{I}nm#DOR<;YYn~dSRU3B|YeCC78 z2fO%*s6#KJyN2Za!{gsCh&>4WDYJ6|z&MCUy$r%P1eV9&Mtn-L&2ulv2v zw==dWozzZPPcGc=nP9}S|3uf-`{e%YqMKlFwskZ6&BUBm#@Ap-(i+wEU*`01EK6&S zc7xKoIuk$9B8}^5?pfnf5CNaE)aN=po0zmzHXJ3_W^(b2%m`ARW8A58Y!#t{vZ`Wd zyMTZfA+hVhrk<2EinYg+a39lSh>t)9I&=21pGS9vnt%aG0X3xxAJM8L8FMudK@|58 zd*%~;^8SJu1mmd;UDhmV+cK$M=yt|TjJb-Z(0HLWGb-HksX8)HI)fh?S+-Hi6sUow z0-4JKy!gTXM5@B6xYzDJ6$qtgxvsuHparXXy=@Vhlo zr?aglX!evfOU`NxEr{Rm{=3!E@8aq1NMBd!JJba%RnVYbY~%4*o-WT{{R>=?SDUIL(5iA`^M zG90;I1l0QeT`IEZC51S9DnQqAUSSe27VmJ~w%_26rOf%r1w`p6w2h-?@?*xwn#r#Z zi!iy6DJN&ydNk=;#=6Y(0i(gugkgz{Qbu~}SrJ1ZCgo-OH!u^^_)j6^D^1ME0 zZ}1pQ00@tb#IW-{ZiSD7hcuRX$#vbUO(T-EXYpu!x<4GsbX$XB7Oz{2@ju8l^vu@@ z*Tkuz*tuJ0{EDzAjIsuQ3kraDWLtNlv3y-#JY z^T>R#sK4_?yO_(UuhD3qvADd)Yg0Vi^LmV;yLd{UMwkq)ptOv|fm9}N)IrKb1;*0? zVOMiJ@X=wX4L^J}5SWp#gE^4AwyovY49r#kS2j0`VeS4wuLir^fB1_WP~CI5QrtFD z`^Q7^$5Cx$zJz#f_}*e}(x8fYKRmkh)G&OK-3{h1sMJUSTjZba5dSzC_2YBd+))y3 z*i5;FwkLzCk{<(LqW)i%c~;*r@Vbq|S)7ZU-gA9V8ChRp9-(2#_kodrt1UL|78>ma zYuDI9uPzb1y3HeTXhdCG`2oeNPObn!SgDWnaLz%QR}bay=vl6bqnv3tgdh-2ZO;&i~e&|LffD9kgbElw+)63dR&65U7UF#$7r zHw`30oF7_icR$#Swc6$qhjXoQ71nW+U7W37#uW?~Hw>L~9<36*c3;?#*_sA5Y}fF0 zZjcP})Hmp&*VPn*{`yB@EUwtLfu4rzzn7!E_6gnz z|L&A+0%$p-aUMN~{-Te>n=Zle!Ud~{!0Lp=lzsAxrE4X*yO^Tm+GM-r*Sn2Zvuf9# zq_F6|qAd)GW%{p)&brHYo!>(H!}`Z%O_EUUA%$%-r00o7)zF+Rm-|;e^6+tpVS90y z4!<;8@t1r>cZm0}PLt$@gEb4Z6R~(YK>4vBj_jC^ISs{tn){Z*6l&hQ{dY2F;Gac^ zrzQpfuM>4@IhuI8Y9U9$=~~BmK^Ar!!ZSBbszVLi;k2W_ju6wUP{6081z)5T`0Q?J zJqGb2CnLS&)W{}i>n$`kW;PUu!N0#@}7Tmhik{`}&!rr>(D7(2pZ|L~q~*;gj#pkO zWaaKxb4wr~!R$=`VYWFyH0W91VDs80j$sf24A3iHhZ>z1)uNtW>QVUJVMu|w&t!Ic zw;{q|XB$>HUj^0?^p;FANc#G0cenAd@F0^#!n%xWQku`e>yh%nWENeDE8`>gTGg@a zmz};~ADEK|jU`}&+2Fm43qM07Pp^OJ=eSNRW$c8O9jm0g#EOJ4v|Feee{eJoZFzP3 zs&xkcI|V~syg&ZkR%!43W*NrB=^2r|T66t1GE9YlUu{D9=Uow#`Kj*dxd$ELrSs#s zghz8c{>M0h`kA6hqg!71&r@xPxSoL-%cqxwfRp?6eCdl_8VN^pEB#sS#f_-*$^F-G zO(Oe8H=*{g_9UY94y`v=Cq*G+^2GwB`jOSoqf4m{R;5%VuSp_yUpNf=cw2T!XoPTe zW!D-uE#nYkSgGlP-I6OM!;adM&aP{Y$V94H2pFi=vv_5FuiCktN za>??meGU6v4;)?^PL^-BtFrlZGc}J2^Y;2#!+nehPalrG!J(P!9lLfiJ7q_*4sVlz z=<6g*+v*d3fj3CrV)8DqS`P~>l)I9@b-MLTTY<_i%ukl&Badm68cR=6NV^i`2T0v& zp}w;kXHvdVTV80pj|Ucy;FQi^*}rt|=Wm`uy`zUAOSo4!LIs?09;+a#Q?Cf7A{~8y zHeLeQky*8N_IU_+Khv35sE)FjzxkCPRB45+y7J{AOH^&?IFV1IvWB4xM{S(2(DE(Q z4+*Bdv$x-2fjQv?1^rnq>&~qh%pip0FUd!nRRouxB|pELbx_$P#oKBf?{emq$MEDb z^s<%OT0(}YvwazuzJ6anEggjH{@{ZiF8hVd+Y{qz`74-f!y7{R53j(kDC|sM00U6? zC1Wbz`jOOig3opk!>mqlYPIYaD$BDr|DiW!Tpv5|e<3C<#S+` z7Z{V#t}R|6ylHxS@shSw2OOfCN%sT*0`*H%G3VRuW5MtAcAwt4S8mA4HQHDgDPCXi zO1bd)O_THn-@C^z&97H4OSy89fg#zP^SrZqr6J7_{x`v@10%Egwp+TPc|qE#t%?e4 z+Mt~iEG*#2R5}^Kp6#}z48D!6VvGFFpfYhLkz!l(tJsA&-Lc4HOmX9AMR<^@TGtT+ zaB^!ww@ARdXg-(k{KP%bTBNxO1Mf_$Ad_1$&IK7}kv%u}!W+s|zsI$WK0#EQ7UL7} zHH^;puZq}r@jPj5lVl1h8MhyYYs*3*Dyf(1nhcehz({_vmamp6BUk48NzM8yDh|xmHrbO$t2VXW6z4HB9 z&r-#woy`IqhM=SGQ2O<^0Ywe2JWMR2gtS%I1}3GEg_~fDvO}J|x&zCoeVNKV)`qUu z%U9RJ}%~8m6$nP@7`Z$VMxKcBLnUvB8 zyEcBYN0i4$nxnzMP-)WR8R@R@)J%SD%N*Q{aZdVy*R>%M1gT~N9FO%pdjwG4x zd>kdr!)FgnGWwo^Io$T*oON(LVtFDmJ*)#tXxb~7sFm?vS8e<%SZZ)F+bw1g3$%pF zX;8a7^R+2%40hO6ORElQ3P*|9Q0R+S)*p5&lZRHlUP4ccntrf|;${uhl^W9jU?tmM#2xA!n}#!ZAs;u1 z(6GNs+h0}nTKrR`4li1dWX|(yRz>!;*0Yn4{^IkRsp0-dMsRXn#W8;rQLe$`1srl=jjqKJQ-spS=Krs51J8B^Ol{y%S5?69;Lb0`hes`52UUOPTwU9v1)Xu`S|_(*2Q|5xLo1#`vT}(%HY+ zj-VEuEJCm;pC~mmV76i`fy$DWbL?*($gB&E0l-5b(CEtj?lN?iscv94{=j&$^2#{2 zWI;&l60!_o_UQUL)6a!$N|*8X(Tv?tgYhK%*YYm ztDnE=5>9w*J`N16PhKt{${s+Df76->c30!^<7?3tNCnJF-H%Grle=HfUK+bgLe!>J zZZ7IRjPqWgdE6P!ebP314Rfx=7MAVCdirhx9+72zd|ELUq`zk>u4jE?1M^kQ%u|q} zhfEGQnFG63;u$pw8hR&J9Bb9NeMe^qRKC!ln@>hOtLgSfwYX>XPHl|#Y0*}EqpVaH zF?SiP<#`<7j^;KON{RXnUxBL++WN*ZX+;*ljAs6?Ao8$QhF8I!B&$ct8Ar z2`fDGY^qilHH5D{apYRrjby>izzvpT@FXKERI}R z@1w+r@y2auLHJ>z*^fUrthJO?Q(o=&rupFOhCC-0J818gBB_E1$C$Gr>XR-4dfW5W z6j3vdTp{`(Ip+~trwBE-=TO(Dp?C=hHd5c5YN;?6R5LQgpW4YGH#bxLtzFej>B`>ot*TSmL)}^1ZerRXY8~h zU8^B^bD4~BM0GPdR_?-|!|?Iy$#L$gsOc4DQ{HPv2a1zI z*)kO~4xC;)>Pf(VXid~5r2Wa)cZOFdWt&}IW0Wf@L&uM!0+NRDQDC|jaf&>R^k}C7?=YDSXa*z<8Rj7)TZrs8%nC@C^5Tn|o zww4wX8284sog+tet?Ryke0(-A$MZ@xqnYEecjwPoO=EBW7+ejB$r{m8JlWvTKZ@ns zcs0|Lay`g-^F}qD@!o0SlhQW2FKo{3aT$-AmPt4E_?412JA?b1zCdd>n@46~cIR&< zy-kpJ^3C;AHOw~)O4|tE8om$gsTb?(kp?BN%vtBwqmReNLOk}q6V$w8qVNo@QF1SE zh7mFJioZ@B>4F5W@c9{)pscD*e!R;MDgLdOo#)=^h7@Y8r?p1Ud{blVUL&dBa2i|3 zlKmO<%@?C==!O2_7`u&4N;9mVm&F_MrIwMlMx9eUiwTywoStZF(f<;O=lT^kSYw?} zJ$Jl>9#nImY3Jn-OMSU46*nw)>@4jhJLq?M`eIK7Zn}9s;->1AdQ+5r?XE@s6Vsl4 zhvmyx*?j6{SPa%4J!Z~mu*!H*)p(n?iI;w|-J&t7=nwS%WyL$f^Q;!lZ8NXu3NB%8 zpI!n?lI;T_rBC9f1KlErn^P9BAfo+hjTvZ13B#uYcHSE%g?gqk=&)#) zZ^_yvJ!zw_r2>?_8oF>Rg^ug?n4P`5&yYevSK?y^@iWQQf#v~R_Zcv-mzLiQSuFafv2{~(ZD!TzY6Et2N+XMDFferWyO zdq0)8Kl$~M%k-yu!Xs`FbLV%s7DNkU4cD_nWf=^H)fZ~*9VCpx9N5nn zn61yJ@4+9YTjUR}S@K|NeLf1&RQf%<62@G-PhLd%8?DgbV|$P{EN&QA(rzcvSetUs z+kp%0+&M~qku`#t*9_3r9Ih{H95-6sCJSQ9(IOcb_WHekj0srqbg60}+#b<~llake zX3u>yw0!`xN8-m+eL9b{)$zzEeEkFZSg!So*hkVK>g?k%Pns?ur?h~o@>;2^=ITBB ziF_R6(MZmu9In_JjCMF@UQrRbA)nt0uQT(A{OfU2YOOKlI;z7dlK>&vsW(QK6wjme z1>$G}sS1S;##Bi>`?N4ln2s6f=$)?!Xt9S8L`xzX|3~FM?kABg zf?`hogP939@)^~8OPwHXTuEMLU$~D@1z(bMeojBdkNv2V={{{7BK(s>R(0V1bfEh> zpQ9DSQNYK`RfvpF6PdGE%0S2M)~n{?2honsLNB*SgSKN-!8q-h{A~IRo%Qw}+FvAP z(H6ts+zDj;`r+t-pc`fJ&Y&{f!*KS>@jepZU+L}BS7*IvPSa+Hv>dS_k@BA&nev3B z8FQ(h-nfT+mWZ9$ROZtz!bGEXXzrv=xCpLeLU`$A&n9QsP}IOq2HVv0Js14Hk&uZE6|eR>Pro)8#BdYIP= zrk#JeJW;MDzPg9yMMZa5vjM*b_UoN2vfVl52++fNx{S*-)uVs$=>W|yDl>Bjd7pz} ztrA+KmGad6#ye=}c$;0^l+0anFN;$-``MyN+YKL~R{zI(Tvn@9hTcAo4sAXJS%bfp zX+a8NbDUO;e^SVNbc=Ta*i8@l)z3QO1rq48tfc5g7eT9JswKC7JFkUMd*L;q(yiKN zPmO6zBQPPDAl;AHpHgyiy-8L-M}McKvClX~s5q-*tJS8~s z%KaqsrOJX#XRw}}(_>iZappf}vd=&? zMoIr`G~|NGsz@Hdrb=>1Lo6@Yj_wCKFq~M4kEpIx1R#ftb*x3yi-GiZgnS}FZL!Q~}W7Uk{*#;!mh&^Mu z)dqy4fK^0EB>t1O5w1|Wf;aP7tiP9CWh*@e3^JLv16xrrUV;nTWod6Xg8&|9bwWH^PKUzi{u)NE+fHVjB4;4sW#w$hp#)er({ zwJvV^7!=M)LPt7V%XoWYWqc#HoN*-njDzX;(#eq zx#)H+3we5k zv#`~gc=fW0mEjmuv-IAG!xTdM>-wegu!A}}Z@3nPh~Ka%uc`@okmoBYty75BA6RHw zzp~HwH`&=i)K&8Qv)2##v7NH*hP_QYj@+jf_~6-`$Za7Ws8P*?Vxh-ypt%#QTF5oD ziT4e!fIM5K+Kpf1q(QY5R|)okc){3DW=HS7PFfZnO#xiN@*rg@3V$Vrea(>zrkjrt z5r$N5c|#Lc-7k{31Rg`?OKGg$H(4~fV9J_aHllAO-;S@Ct0V`UA0NOre_vlKrJ@yI zkCtCA-uYY5>YR0}wp{S9E;d5KTvfb@7Gg-14jtBOH+%_YK4$(v$2=N;gBl5Md!Nfc zx;>e+m?pu{;sDHfU)c(HMe=R6luD}{)8F?!6E6+IuO%gyifXKn|yyH>6)rmR{7PJ@dS7Z$J#!`@y`knb|)w9qKv4j5FtCyq3nb}pdJFgBOUzN0U zPupa+VBV7HI8^xhN8ziKabUz>=Cxkt;CJ$&I+=&hNLsJ!I@8fa)kP}?LJpa)*r7C1 zA$A!42mB|v2sh7NE!XbLuORC9wTcpxeAkBF+qSg#e2d4c8Y@lo2sg&omC%KG2p^LA zF>n1Q1@}K%niG!Dqc*2n>36q|?vEg;d3!WZkd`4$MH?Nf zVjw1&H8y^j3I7mSs_7F`bohXz zl?)6bM!tnWqCW=Fp8Df94e&2n%Sv@_AM9WT&^no}(WCJ0H3@i==d2?mTrAqDaw~wU zL1Xar>BgVsf@2bC_r}Ju$3u>YcAna4>p!;d+*vGj*D>Oi_l2hSDc@x~&8oPaE6m7+ zy=&;b`y#u?nFG-0Qiwi<9N2_h@Oxv%j>TE0GGO=*m2U31K{9;HQO+2|2;2^m5oUbNpGxUyCDiX|a3|~kIf#D1QnC0Qxvd4Vu zt>BE!NuhX#e>X*Jl9d(p@lha*W-y%20;}&-R?~UTJ7SIO7O(4R2t0EO^ku04&~g1K^{x5U_lp8;DP&2fSRs?Q?K( zGh$T)Y=~iYYpi?gy)65?3t7Q|C@c7LnoIK$IU)4Gp+8ZeI-3uB&-BcY zEztnUQ#|Z4L*i@c-}c!;LHRNLoZEu5h@=jhGMsV#Bkhw2C^BIP*%78KD*&O6aTvks z{KFW?P_5gz%3AN6k>H9Bw|M{Kzq-g4&LC&obexJ zdG)oP?K?yfoaobv&Aq0H%9@8NDqtu94)^X8E{s0tV_pkiYv73h`%o7_t@zPoL?u{_1Sh-6)^~hsS_srk;P6qeVOHrrwqiVsijjDh zY(`CohFk6rjQo&(846SLS>m1Np#hgsRjBxSkr0l+p5UiQ2ecNRmT@_a7iH%L{b6Gw zs(X+M+myq_!wopqW1tymVcs;5odi~lf4{p3;)Yu*W{e(qBL|qiy~hp4@S6tzSgTuk z5(ojoVFcIspP&r@d=_*^8-b!zgPqHCbI0GnT+$keg76ZRVrOieOM93V3a;%Obu$q^$r-kEoxm&WF=UdShqkx_RJ)3as8>!9*mUJg|^t4M-pcg}}*>%@78U(&=_H{zsn8d!9~S@*r@Y z7Pgk6?kW$aeTCKLULK@{!_o%Kdi;CQWdZS_UqtL&bcskSJ%|fYv>Fv zwq!fdI5sDK@t(E8bbsdpRrUK}`^)|bBe*%ykeNOzXja<7%ex$VM8pPi^hV$S2@c`Il@SQ z?6e+>uS-)ceb20J%>=no00P&Ik1-V?l)!nuzGUOkhViCBHJ(A!iwK;rg;+^!4mg}2 z8?pPfkNsD91`kah`O|qfnS;ST$=u>@0vKTb`VTC;dbZ=h#2XsQ&?g4$7=YDHxXOx% zOoAKeXtZ8rg&{72CKfpK16}_=9<@MgSA_eDV6ZLbV)WkWEtgo8@C6Kq`yP7pf@VuChMcKLNESd$j`r{-KP=*Z* zDG2MJSw%l8t69kcIk6V9)_yCP7HEyYS`X0?wf1LCMHQ{Fl03M^3$DuL0q_uqFSOKs zjhzPAak~cAUB5#h&xmpb8`q%zoP7pYc8({0tngKl`aLjq23mf_ePYJq5}kbM{uM!h z0P4mEEXNpSq8Mi1fwr=S{$eaxBLhAuJUM`&`~x9GU8My*+oCNpU+-Ui8t|HyZp4LA zT_7%p=`eQXUV7U-aSKHvp{gt-;0zW9ut349180g~8;5TJ)Gv5p4o8Ngx1x5q9HKAn6Zg#cFT zgA0q5?xmz5C=7I!LH+HsNnl%-Xf}a4-rk1jHq}ZB17a%^R8+C8AErbr0nBM^fNq(n zn>#6Dj`#Orjxpr+2B5~Ls9yIRz@qa%6R3c>FIQJi%VJPOa|!G`GzM*a^h(_&2)z?~Q_*eCv8*RIsyGNokL{R{-bc(nLRvt-D4h=wBIejzMA zqLl0=HQ;3jx~f0JwXi{HZ~$pkqLur*=GFdgek;a+`DpB!9Wkx${<<|{if?BC&q=&7 zhPcbu-%$k1ijN@C|fzQ=*0L@z9&{m8TK}h0Bm)9#kz#(_NnmfvX8G!C` z4MH;4Nx*%$M1rKqfL68~jNBMm>p?e2_V+fRObFHmUm}k3kHBuoXdPfC1XA7j z1Ah=FYXEnRce}|0ei-5p0crqq_!Q=iqYQFJVV=TLLP^XR;z_)~Yp~w;6vp)8F#v8j zCe8~sTn8c6=74@O=I8$z66Dn07!V1b`uC16@c=sUozRy6@tgm-e)R7HVS)ccgC~l} z928^@Ny2ae5DEW(9{`_kY!!VOc>j;t9^xAP-yV$w7cg0RRa%8tbcFZD|Lg^DR*FeN%irw+4}oJ{NLs1?^BpL;UrIj_1q>^@8cZ!H zOa((;@m{UQaZYV)Rx^VFGa!;?{~vBVyYWmBxc#BZu31-?LBzn9GXDsz*NLy4r60t( zp8u%^mjkFLuGXg*WB<5SLa}@qKCR102*>5*cSki}P+J03VF8kS(Di>InFAi3DJ^th zxOAhd_Ec?WNxVVk+xHRgUM|z z&CVjVQ;actE~c@{e@V~y9g}7r^`7ZxrwQF!rfCLoup`MQBo-R?AXf8~OY&CETB@ro+4xfhOSD_lkR7M`X+B=tWY zsbOuRvHFXo;FfYCmPB3mXa@03j6o(mw`!+j8W;**Uth~P!$uj)7&1y^GFH#zCzJ*o z_wL;v8YV!kdN@~v-gmdq#MZd(@Ev_Y>b%lO@pYzitdI`wk${Hc_CG*ockb8o7< zGY3`N=*a(EcR0W2%#WA59-ZiFRG#wSOVnmj||{qc6Olu8yX5X z;WP#M%jPYfzAn<4lWJWi$}uQCwJ^8(iEn192gw)&rTB-V=5L`I1hB zC?N#c+Nvh>XnJbaV;Xzvu4XYWpCa^9_wcI7Bm`m4b&nH8=$O6I0C_%*_356z7f^UC z^UZ|kI-ES{py&Unc)tpaAN~l!m2e$DYLU<&OTY3$a}Ym1bj~F-)giL&n_#`qy@)Yj zu@|%b@nTPV`1Gngeh}ZWzLtZ@WZqgww6M4<88|2Y!|XfZZ*Yy+!G!9M$*auu&h_py znJ=a`q?A>@gsdtL&eJ6Sv#2j{prm?x6yDx|p5EXoW@N`KSpJ|c?@N0b)XN>wHlw8` zxa>b2!-wD**}v(G_$reCo?(ED*{|seBcznStCxiAU;A<3tV%xf?ftWzf|mfe${&7% z8(cdJjZ)J08})cbv+-0R<1HIM?bZ+z}d6KM_NB*mvV2i>96$~hI*?1SQdU?Jn4C{cJE9#{)^XX&ib_%v@K5xp2IZcn914lCzzBB z-Op8DlI6cgLGsX2{$P#qsN&7s|3%bShcz9w?~js_5D95e=@Jl;Mq0YYsDX&oq+xVR zBO%=gNNq40q(f4r87bYPLArj!_x--VKe;Zh^EvUvec#VH&n}FrR8I&dxM`Wbth3{w zFP?jV8!la4PqfL+*>%o}MK?u;GB}!}+|p?h45DT~0IUQ)AgRj--;cU%m;q{A65u+n z&?4P92k)`Tfhsug%3^e&`)3nseAg`woe(h=eVa7}0$!1lFTJ<dU*Un%8#Q|N7if4AWx0w*FH}E~U}v}b6Ye*zDGFCdsU}9-YbO&DRIP6i z>rOMRPq}~Uuwj0$*6&!a8lx(kJ-qaEVCyF{{a_Ab zPHWIUc?4;>xli-9cBq#YXzog3g=Pdr#=hvMCu>s|UzMPAR+V}6s#*gFi>+cf%KfJr z$;N9CVY{`xMFd%4p-R2rVx6}xBimjU@$+#L_u8A>oG0bgUQ<#fj8@Zt{)5c6C&j*+ z{yGAHds~E}ZLt+@BAnKJCHV|k|C;B<4PuXkES|C6d0CPxO6a(g+)e&L?o~>-)woED zsOKkT3$)C(Z}4swl<_yFyN_*qoM#EP_vo~+QtWEQ5F64}X%XCq5BKAqKfnc@e37?P z8s?3!r0~Q(CfSqCg@E`HZ{K?T1`nJ~f;c~s>SaX$-3FRR(d`OgDZSAQ_WlybFUYaF zs{4!}0=Vwc*opVu|nV(7Vbp^p*H;d4%Rd`9_=UKWz3{m=&u= z&`i&mcUjy`zQl;%b2R!cv%{bA0NnekAiNF~r{;h>e4`kNDtwe%3J25^BE79TpMW%vAIUV_D$L2G_jX$VAW>L}|nbuqm)vt94 zfIeXPjF!H9z8Kpve#`JXJ;FjtM2DG+Ci3dov6%U(UGs$*d`yJTlP|xdG!609kTO;G zENtR187>R3^ICwNuOAF>&jJn_(;6ftpq#cK`$0cy&T`tl3y~lM4)x^|D|bFwR7fiV<0Ov& z>FeiCJ;2a*CXjHwGce;~tCN#|8}|ik4MTZ0<{8$PCiGh`E|9;vt55{V2(5QRv(A6{ zOY4aH$>Kv`$(w?)txp+x>@@xh)5dS9y#yOz-MbdzNWzo`ctPeEty#UZ(DxL$ES0T0 zL(cAFL#R((&QZ!g3++SR^^PQXA769;Iq?9Cdf?U;I8f_k0yg6TWd`xhj60axJvN2+ z@lwC*L-!pqs77UmyE(VUB231kjdy=kvNnizxm0`V)^9dr6fz(~@9hiVe1f$Kd({!s z_KxXa!aZdz&1&SXX)r-TUo>P{#wvS-9r9nZQn%aX^oJQB3|A3#e3mJ52Sba77Rm6W z*7UgJpStv*=yDO<&_Krr=vEg=a49zT6HRjHEd^F(4-Q~I1^zUqHx0Hap!zG9x+b~8 z%&nP!)y3WgRK1Si-u%D;Z+af;8DL7J@&?{z>(Jp6MSiD19sR z;~q0d1AKM%AVN{vZWG>!_^YMu>d}j{vQj$kykjw|00W&w`8|lwEaot+*Msy`n54mn zdJ()z`+SMH2s5VpObA^=4IP1RXd3RBo#1?OQ@V-wKE0^$T@+23*0BkTNroZtXu@46 z^V^=MbxGqig?uF~xt(WzWK!KXfnUHm&4Zb;ciP3%q7RJD^OsE=#egP$-=bCi46-srqKbbM1 zNj%!Ck~Z(1sq`08_!5D7ZRsGo_80TGim|z3bvU4#Sd+=uuH>h$^%Z z2+r>`c{!ftQtiRv$0p0cChOl;u-(E1_+0Z{3~u>qM~8Hq)>Q2)1KRTF{vBIaXN$DfjiY`=b~i#_{`MY@AOr zo;}*yh-tr<3bryWDeatAz@l5H{>~@Y7%qj3bx+YD^+&E-(||q2LPIP)UCKvO)+0?X zaKJ14Dnj(eizmY!euHoPjuc+y(Ea%@h&IfFdrqIpPG{fpQ7>ZU!5f}gEE4}b8lA7$ z(L4O*-C&rRt^6Q$71Elrex`8GRmBDF&`%~a_n&giX)uJ`O9nCxkO%~8p!-Gmwts*z zPonPR$5ll9oA>eXA=rWz!d`ot4y~LmK9b_5mKFo_S)TRhAE5OQG%g!JV@Ch|KxYnE zmFu6WS2%lQ^}aWW=r%jRZ(3WgXJyOzkF##~Sz@}siMWS64poo8>K~9lQZ0e7s=8mz z4;H9A==0U#nB)~nRtK7zzRvu0bF1qvdzlOADwzG7$bZ{TgT7Ah*g!LG`hpf>| z0TP2?HJ{dY+8ejb_G(cyt(Xjp`e8;M*YMifB}sG}`~c`NOKI^NJlCMMFZ<+m7M04{ z?(uiqna5Qf25ZZ%8*ppyj8BHRRfqd~YWwUKsZ>`AB?nWzO54YEBzSCQQloy~KQkDS zFvz`M2~)dLNVl~1ezJGJ`Y*;e1`Tj|=**s^FZsO?gYHBn(ugX4(??XGSEAj~D!RqX zlik*TrX1}y8ll@ez)Ea5X8`Pd@!h+aF&~~}Kc}6gtnt%#o=XMOEN?D{djs>vxY|6DhUa?G?=*-|NPm{cGvG&HB zbIN;z0^DtgVEIViTQt>AG>inkJgQ0SM}k>1>^XehwIOixLQWcD3Abp?r_{D$?!_-P z6+ao<@jYR90G!#HJ`Gl|u&K1eaV+{gr`KDjxEevcGPs7S`tUW;>rvZ{nCA2=q$Ng{ z3H>UnBf(^DPFbuSkuW2m>Xvc+)xzFnGTS!3F>3NMTJ&B4!@_p{ulNM{jG%;Y+oRSX zbbTEOccaBHogCSQ_yy})&%?`9n@L)4hdB6_E)NiQj+p*X9HkOop;Oda^rMbuj>eG{ zbbNZ!Bqk0e!m2fuX8Ur}f@;Qm&_JVh<`*yO(MWhT%6-FIBRU`}r1zy(_#@|JUJw8+ zV#TgvbA8AlwAva6cS!lC zH%|5gZ4%)}dWfE~zK2z#a||d+>UTP4!~k`xz{W$tvJbx!XrtC&HHxkxCrYww-B8ziT;D=$I1@`0d_rkKpP zrG*8Hi@2f`{L5TGd}BTzUsk>Urvz{RH>5|m!bgzqKr|auAh{DKS{=sYTu|Y&59KVr zdBRoJGD5xRxLJjJ`3@qk+;d&y_+S%s1XZmSyqN@TCY8RX6XnUQ@6^qDfKQX6{c7%4 z!A`BSdx*8*Is)ac3pKwmNRj`VU!8?$Nd&hAq}AEu61m)qb4>qVW~cE5)sK1k6Xz%$NPL~ zg30E-X_!+8BnaDtyyDzanT8k-IgO3;7&ZL)i_|jgQh2(sLr}iIE$Ui(bhdkY>CE0) z!m0Y8_bG?rv?2Pi2blMDVNbLj?p1pnXOQrHbgjRSuC;n7@f3JoQrw#{_FAdUMJxBv z1tk&Q>iwT?<$e;Svt@{hlABnmex%x*5)#Y=>CQhbZ5#s6*~V)^__(2TSr}C%w*JCX z*3jS|gW6u%wx%Ck1AXZj^1|)4(eBOCYrdT-Lb3v~`>g@=1O8i) zFn5Jg#UvarHgVGJ$|YsFXOwLagdM6Rj-FXF)SJ`E_ zBy+dp`doW1{yn;`LSY1$0BM9|JX|&LUP?ed0o%EgtSOm4p19VWY&b;0L5=pLfFSBCr9{;e2=E{)mcz5iD`gq z#KRnW(evp;y7&L9%xtg1SQ;BeN~|BJ>Px$|(CM1q7Dd1X*SX-Gd3g+TG z#r*FpDPR6m*!s%muqsEItSDSo8xqd<-%^>*eRb$xsqB`$pjq)P-^a687F+F^=x$#B zE9F(cXlGpGR%6F!J#PlIgYNrIsh|s2=sOo=`EPQcp(i@DjMykqXjZrYX?nH}qz2Uc*&*$(Qe^AgcwLBM~o$7v-zc{l000Y{7LXIFh+%ae1 z^75WYHPCEbc`7t(q4+neMAAi~E^OxV^X}TZO3ALG56EpvZ7nUHU1BQzIn<HY%DY{^OKO0Om%f5XYRyL+@ zh{+CxKjuRet>eN9N^}^eLu|95d})~RMSBzr;`|1|w&fqzCeC-}{qKad5%Em~2kw0&R`kGKAjmW3?AH{^hBEU{L#Qi8 z<+xkErfbTxi60h3OT*Pd&$nlS4=wSU(@j*$y^ikKF0-tLaHft#FatD_;QOBux0Y9A zX!h(Ma&D}yjTCXu{WazrE|e+%a=VW_0KC~X2Rtr(FfDAnQj@74mVuuvXgihjP1Wr8 zArCj}@sAx=jk}uxGQG**I1Ms@55gFDwAuP4KS9X*>}(m`jmS^Rb0@hY4nCudv}tH- zC8T3u+xjk+v0X8KqkP43)jOZU{vEdclUJelw_y0{U7O2vT%|0Tt?i2lRF=#9TvO9y znF{0h0@&C51bcv3DMQLdjhdv&{qPo}JAqC&OJm&rekDzkDKb~WA-`!m z{7~Y3d*#zc+^?XStvIhFwa{bnz5iC|Q57P6{RF7So3E{AEkXE#<#8dr$o#;dhB!e^)pN&k@QtzQ6K)%32E?bZ4j9`-Vkj{9RuEFIId z5kGm0l=3kz!6Qja^n~rc0rr3AZjDnL`q^eb+i}fIa!ecgzqg>Wjmq9gY^iCidy>3OWt1nq?9U6`b=@T|qn zmimB{^A4+ttJSVzRPgI35S%AokMO!0!{;lJ5!9OXnMD*5gA?jw$n!b8NhPH)rRyVl zL;_xPh}Pr4bEnM6Jp~|_R$1&daKfl#Hr!f`PIZov4ZK^*>bYT#bMhup$oINQDjVcK zUcJN|S67YI(0^<45kP?H=ug}6MYa|s?DV`*}{cThivvbJmpGs@K|e^g}hxCepKonAJFn;y5%Nv;J}qy3)Z(E2dwLijouPxjvHJD7Tkg=~ z>-h{1V+pGx5`UCGx1+fr+n0w?w|ZZV9)1d{!7>ti&sCEXds?g0A^X|q0WLu!ns6XZ@Od*4b z9addgSZzS3Y zB|5zI`4B60N#gP!D)?=mlA31e8=bO{zF8jkBFQ6E;COtOGb?_jRbMVDS3Xy=yV^H) z^3WQVw0MM)XqMPYCzx>0b%jiD>X1?&FP5K7Xr?|f)EBcglV>8`Za>RDzVNo$sw~g` zy@PWgcoc)s8>0z{b(BnYlJHf2d!U`wo6Qdh8m^#k_Qx9(a6HC6g=9BDxXeN#1m&u_Iy*_m*sLGXr|3U2R3{Q`hG+M9M%R)9+z)FOaQJ z;g3oBB-)jtUyg>x-`$_ua7g$W#>)wFyn>CECEX4}WxR5?*tVIewFR}l4B>K$=A;06 zQ&bWym4^p+i1bb>Y(m{R%b}{QHl=!KYJEZWZ=~u*J1&#XJcp;T8j+FbJ~i9E7lA4D zdsE(3pi=AlUn<1!YPX)IrX>5B;Wdh{w?gP|2I{zPcgHSmT^D~{O)&Yjka|)NUE~9+ zS-G{}mPieknEFtB7o@9~VRBOeUU30WCW>I0fe#+X_}yJ@b($%Db#UM;J>wkl`-79f zi#ZRkfT(S2M*Dm1E!G!B*5yJc$Gq+Rc0CM`1&lkm6rJ?EWpYqTY|LCb8`}t@=O2uqym;3(2Fq!#xdB+{zplH7yn-Qv7ymR%cZtuJ9!YoT9(}KEylH zK{z%5asA^VlzOv=3{hzOG-E25q{Fs_4Ba-|`a9cLaXCoLP$YNU)~%zQ(R19))EAF_ zZaLYfln~H9J}u_0@;wxa*z-5u-!o{&TsAm={frb3+Vn%3XZPv{+{py#{aQKav}dr; zR4~N-2uY_wHo-^cbeC1JXimLUKFj1+i`O+@3`rWH1xna@aPU)Xv5I0eKsFdf12$Mo zRy=}N4f7p$7DRl*Zs4*;MT!bw!PtqrnebdFcv9*DBX`KwYg1VbmPh~jVPUu#@!HZ7 z$vgSI*!yrR!}EZTY~Lx9i803c)=iXtVG8Fq;@d$+-CWc7G{oB???p-4 z(G!lQjL8X3$6s$cNdNgREL6i;8}b`j#)_s zo54vj0V)8aHIFwJAa68oZ^PE}H)U59rthJi=fh+9J5o-zk5)xt|d{%}ukcK>K; zCQ4=o>@2q9T^#}Ho9kW}JDn-^JO}>xm1(6_Mgyhb30_c|3_4wQvcr_$J%_&mral$# zEh>~)DaI2!XqDpd=v9(JGbTH?7-ita7a&*fYnm@+H-JECrImM4vujzmc;%{g ze0*}6`&%$EQ?g&_KD8UDt(J`3vrcvNf2BSS7(Gb;U^+F|yobg!m-tAV5PLGcb;zUvWM>9iM=P^)q3o_elvzfoH&+ zXOX%;>b3Y=P)3{Fg=S`Pt9}PC985!_oP#)L^n<`#c$l+=wIPth7m!tr>T*k!Q zt_1mZU^ddU92DIWQc_o+oZKwGbC$sjlxC95Cmx*lk=YD*X+Pdh`<7$DJJ;S<*vc{0 z)io=*%F9CkWW3bzgr(xf>?kttjuX?2>CYQqS0;Fm7Scbz^Cg8?lba-M=EPXgNd5N< z1uw4$;1g2_!L8XqM-(waDDv4|e7nn{;Qo~3{dZ0Sx z%SMoTu)nOS&+VIDPusy|?@ySVc)@16$VP%SH2!%oF?;^n=9$~OJ>eW2wJefdoINAa zk8Q*68qDTPk zd!VZyrpLFOsZ_&1NGwcz-6{XM#gn`dXJGTraJx{R=#RqVb=zKzgcR3We=WYOon5bF z>Vw9uJk>kLw?zSQ4^M7M@pOtg+ou5UxPfaOUkXM8neu;Hk%L@33dY?PF!sOU>5twn z(Z{%i{qRW^upOT?Jy76B{mU(>6lI~Z)JDw*eMnu0Ip80DzQokLSyPybC+ zn~qVD-omuSqVH*6V;`vpcn7exK>6fPTR}XDe}FWTD|2jSe2hFg3dDZ7uxdAc$@jba zjzCiQlVwi2MpJ!I@!ZDXc))O4%3f!$Vc8(IENU?bzGKbn%L*A1>I(_&jA}wE@yy@y;7b;*{uwE(iQr)a+U&eCotED(}g@ zF=soFRue1bmdxsq1ca0LYTj918G@gjCfhXcSLlXfTdxL5GZi{68sX_WbL{qKxGbLF zF3C0}=cuDkjm z1AkgP`6d9ijM%*j@CUHC8wop#(g^>0h@z$pq~_l`ZqPigGx~aG4EA-1I?+`Ic+b)f zu90JiCep|;k;YEDwp*wb@EqtnehfxtMTvxc00iP;*;~dwHoklQ=6+z|*{G`>ZJ>oN z)E$4Bne>&3L@(983uvp&pN)xe+4y!|4w*JGzKZdiIEIa&;>5t`XO>6Cvw!n8;$Gh{ zCM28AACK@7k&T|gRgyCiBnN#zomL=!M;t3{oeu-{SiQ!D(G+@%|8k^8e*Bgms-5bx zsN<6uBibVM+Sw9Y(&)qd!qY`NHFDoDDZzh13}Kt_@r}h1eP*Su4wZRc4p%N>F-@0W zHts5-R50LC#lm4u;F74(0Gj#$QYTwd)4=RznQ;|b%_)S*LxnK#J6+pM z4un`p`@nw6mh>bz4!yU%=>T1a5_X?&d6jXO{!Jz4sNH!ILo~Ep39kN$zM8dli`c=s1L>AEI!|<5HZrt~m{caFJg12pnOwI+Y;Q9;hw8VTEl3k-hOtN{y%{W6z8 zwx>58~2ZZDJdYm zY@CPGHO_G)+=W%{>a0zGtDNefb~(JB(zHb$DoLnnd(Yea$o;wrnNF)*MD5Kvg&NYn zh&;nXHaq&ay#e(3=8 z6fK~09iR_(Ze-p4u9>s^I&EMK>0%tF9&tHSt1#Vvx+Mu76n&G>L{u-D|y^UcKlqb5vJ33}W_cxlIb zJ4AA(ZOn1M{GQYhN;Am~8=gkPP7mFYtsY8W2?uk6(h6CYK0hryn~xUFX32f}>Z4Hn zhu*JY1M4_Gv{3|jUHeSy(b5A)BTs2R+^FQIL2)&V6@(J$(?vb4sRE?KHIeJw0Ki*Q z|GCkn9{51edlT`=H(oK&mpJ=#u^Sw>)5nq6?oKlg50PP6gp`Z~X!O@2x76Vb3_Pg% zamv}RuQy;*dr9}LX!{|P2xGeut@7t!ye=!4S51ZsFv;&3aBV4|pnQM~k=)U|3=v!T zEO~bzc38U3srbD%H-5KlH2AuYLF_==I1sPv?3tAIJjORB3tnlH>yos|g!E5w6(;}= zbNHh_ykCDPVh4JJ{bROJ6rb9`Q_B1xmR)V3t1E-E_17z1b2smrK|=zfkB)0;@=7?C z;q_doP|69sNoLH$wZ$MNX(0C3u!S{v;O-!Tdpkh5)jYWwcA z4%&cuB(ZC~Oy>UnXrFkQ7CKzR=JcfX>%UR?9-OU7_wy@flg9Ul9%quFnc{KG_uIu6 z?m4MJ)wYyR0?(CUhGJ{o7+*PJP5sG!f%kSSY}wN?&J|wdOZ-hBlD^8l^itpu{n*`E z@&>&V8zZ^RAr#SgkHYum5-W+q7?9t-&5mM=)%B&hm54&wdM{k8t|*EU>*QKYgZA2>{ek2=NK2p zXI2g?b}wTQkwb=+R!oA`xov9aI@4K>NoWxxB)>mCHACY%ye?s;bwyUrFm}7-Vcrsx z_-SfCnegAV-2#^&_r>@sv@4%WWG?TG0;HQ?+_W(4b$CE8u45$Hd?nL_o?Mo2dZilx zcHe}0P`=ANNAUOmlf6$oN%wMCeBB?)p2uBJ@ZUV##co`-6w4Si|VEWB9 zIgqDuQ3m+w#m!ra(>k-4zMdtBEK}owjkE*bs<-T;r)rk3mb_J`{ z9A-|sA!)Qyxcssv*IhXyMtz{ORa4UN#o6nhDZp=rA8dv4WSRh7XFAB2e*q;Zk^X|u zhsEK^OV`UNRgHup2KcIMeLD^dv%uPlIKcVk#jmK)M0n@@JYy?77^Mr7o!6-2Yz>m> ze`|F!@aw2*dOh|aq{B`FEw3#qS*)nYjth)qdjS&arhdm@II*MGCmI-t#@tDu>t2_IYw(&Nk)C^?R+?ca)JkC{o zXrRD!ik)+X7q59T(&u!6@5v(l+}wt*%k4`g@`B+3_vw;C%(*~~-V_t?CL{AGpri+) z`|bPBPUhd#JeyK6^`mLPfKTY3#iYgA!*8s9aewlsp-aYczUlp&E6W$pG`_n3jAtA= z!{eS0YXD0J9kGWyZv~mcl@jg z?hi-)zvu6sk%~b7dJd~qnm%S9iV>subYmPc(zAhoNN=`eeaxL)4OF^( za$G^{L&;x~H@bv@mo<>AE;w``MWQ71LdSuHnQ3}icTeW?%)c`d4hf2MP^!B`s4VCV zw~R-S^K)WW8G0S6>U5H{ z<7^gx28YQlt5pD|jGtKo`=oo47V4?|8Kj6u0TQXtqJT-rNL?!krXZAu42iHxr+Hh4 zHJ98H*vox+gf*xe)YR>DY6RV`GU0emv z?K(&Oc9=EYVT6@juo*UCn3o*vl7_!YBSN&JO_^Oq-E7{0Q8LzNun3ONJeni?XK1Iy z^sq?k$NhZ8AJ+A2Zi(DE(z4LZl<3zOY2q#x7-;HP@){5axM76TYXJjBN+V|!Uy|LD ze^Dxl8ORBaglFCih&Dt>ikS`-!IS=$A~!o)GO7#RHa`>cyFDWfwbNGo`Knk|hkAHu zoINgC26y`+Z_rOMX-D|(03BARFs>Vi2CHD-Dq}s^5AYT33-c;YjRQ3a<8hD2bFIz` zDwu?h)uLDezl?Z0NY!4xe)622AAKMXmLYCF~Z>JGZIs0>WtjpmzLgq&9CpLuh_NDhdi z4*Eck5+dlR29~V$S+Do$7lVip=8QPs#dR&4y%hK;7e^b?wFj-n5+1IPA~SinDj^(! z62M)JPxPKa68w0lj8tvy4ylM)e)Z zP5|LsI7P{EN$SzOZDkAUn2+@sT$CX1>C^W35J}sy&Kiq-FTr5xnfF||#9X4NxpVY% zKD{QbtZ8McSoK9dG0HL}B%d@RUBFPL>3#UO4xHx164>0-8>wm2>}BTsz8mRJH0){S z9crqK|A7}=Mj5TL#;`LWVo@!X>ZqaFwp_)bzlo_`Jk+H01PN{MEmr)f_OX0nmay9{X zc8q^%XRP@PqrwBx0{J7k=}awFedrcGH^Ga@&t-$g8EYH&__^`Yb1v`#37o_lWp7nh zteBin_kAyD9utYv)F2c^nT%w#1$a9Qt#UlaRthqt#_Q7K7hb9+@*)utMTo*y=?$NL zuXV2|#f^z_+~9P;?hf_E9z_oQ?mtIC#z_yzo9-@%m=ROH>9h;!L&R`Q*OP@j11{cH z0rQ7n1%PK6! z9$Vz#b4S6%xw-xbH-H1e`b99eP;BcTJfOAjz6ffS(dP&U5onCq-st!%Bz$Bi1veFd zwWp17c&iu1blvyqFate0r=5W=g`3*YykTxpLV~Z@*vTyg`EHpDR#b_(vIOYyyc;1N zP$VW(-BF(>d6trd-^8tyxq7t?n`53c|8U^tFN(g^PxTy*6}pMFxlSjzAsBr zPP15?5wl7>OoJ1^iSfEbiyJ7M^G!h!IOEdDM$4s4yzFm7n^Ttn66viiiIlHI*%Gen z|92PQ@Isae?5hZhMj)Kmq=?w2li6{8bEIu6f!jAA#Y#}0BAYH2c0nSV^g# zzXrR#t^lpJh%!AG>HJ0#C3vn3)r`hC{g4(d-Pni$7W&?pe#{EX51l-byIS48xSmLN zdI601n{NpqtdC8Nvn=<$_EDf#(p*uNXL|W#8U4_Z;0goO1FHWfYqq}0ThCSu6rmIU zLr%yjgFfp?SL7cQ%v7K-FQ3~QzaC%Y7z;li#Ha}2BE_4a=Z@Jq%^pfK_Quho$6lU% zM3fAlAYh|@1T+X;q|^z*_m%77CMue*W!60`bJqVV>rRdy(p|Mjvk_(WwRh*)Suoen z{%ey-5!@RYB+}K>T1qpi`!iDO*q8j?$)lI*Rt(I(_i#L9|&BCflT>CnJjtZWTlnzL(C02%hW}FGd{Nv{%g)W{3yb;EJ%kfYDe4W#O;P? zR)toaLzd|;(;Am;R(6KC&U%a2Qgois+d3R>#2)1@t{R;T+-~#d_pAOm*N1{^KU&^u z5YU)XEubuZ@I2V(VYiH{5H3!vU7;SUBlH~2g6?k*3_r4a+!-EUt*$@|OX@l^JAJZ4 zp@>to2Z=5ltdv}8tvMHe8i#28xuIH$P#}u3@tUyXG&WvyDXsRl-oVoolNk{(+WaW= zN2BNTG^?(OMXI!>@-#r2o%JmR*vUpDQlnWXVlYU#`;A|o+0Lh5BM+udf~6Zp)j-kc z{$t#JmgzIa;)jxhs_h}Lv@b4EEN5AfJ!5l`<@4<<%ott|*YSd79{Tmp8=g8Z5WINM64w9buLj8Za-9K8{XOAUXZYODvJeatOSEUqnM8* ztXZn{WaV>b?~YN)!EA!BV*;ePd#oE8vnQ;w0qphzzr(v8tYZKVCX%wT;4-ly#ONj8 zZo!<8$QX9gZtsLgvfQ`A$2=t~B39s%RNsh51Na*siq>3i>SLbd)2W3JY3|N`6QiN; zn4`sItBu&t*NTA_!~zSIHi|weM?IeU%n}Kg47?ap6D_)v zq<|p?of%RCR|e5&DYoxReTCPP;YZ;thXI|U$#7wY2R{YZ8J-)OxyXZyP-u%O%~m1@ zqRqN32ayg0pFFP97GhAxAtPXw@fz+GX{`*IvWYF&bYy2*^y!urdwn1s`rrj#@bkP} zl6Sz3==4e;(%}hSc%8A6rYxr7u>@-dPTZsy3d4T=@5FE>8Kp&k#Mo_^IOk?AULpZ)JG_hMP21eJW_> zs4>#@Bn^?S3zqP(O+tMbN>df3BTZeH7g8B1C(VaPT9$<(f{u(B{-na?5f)vaq12NLp%lh=D{BTK}N9wMG4})0?qr+ z6WfQh|3=}|;$i_)Gt8|=vX69P3fiwh_6~MwKM%mu4yuwx%7n5m>^!V*bG`-g# zW?ERA220*DzM%?I!U`1L@KE~u;-@xi7l>@Lb#p744AD80qH&M{x-cALK~xVBSaZ0WMK<*d6o7%Z6=55D`pw|IGcY21%nxI zQJR+2t-y)gP46AWvh1$I!u)2fvjv2N$FJ(LzcL-+7#kZkw-pMSbfEM3@CEVI-W3^# zLVY2Dq0qsNnQVEOH~jgqgil|?gTe2Iw%^H2a@9hf`w{!8#z zdHM08U}fJ-fHZOt39v4$a+Gz>d@kP@`JJUWBc;IE=+j*PW95dkgJ6P;?oz{273m$) zU)mn)<2E8Gvrjl2%$~&rnUIiu4 zBdf+nUnsEt5QS%Aob*nxELArW2VJh?`uXIerqIW3d-r2`H2v3DJHY%HplF5HB?qA9 z2G!dHf5!M*^DencaqVji9y!6YV{+4hsk)F(HuR9sz;n%wUhlsoqNwyZ)(8@l^;3cb zqX-B1|6k4d~F8T|bJngTrgxR*=W(R@@4#@1h8jp1)9hioQd{kc7Gtp&8Jx`+qcjg+r9z z^ED09AR!<~NS8Fy4T3bwQY#_dAYB4dlF|awy$eg%($bwvhk_uigdipPK7PKx_aAt6 z@4a(p&YU?jH&T{E14o3<8Eb3PqHUD2T5Jx((yhk9FotzKZ_Rv=i)L58{gcS?UG?u* zDhOy0w(?AQ>nBxk>(|>#-q`M%sW*1I8yzkHfOHlyE*4U4ZVjNgZTBbV+?vpD(o>Y- z>$ z5Aa~O@0=%PNF-%EcuFdN=Sr`tm7=3_q9fki&vO*F^n5xyC?BBP`%XCR_dI0KPqyt- z6X;`ErtQ?p)u>Gp)k}2)rYBk3cHV|13C$uk_qjf>D@C#GPIg8jXLw<-wMG`Rom_=i z)Zt(7iSsAZ)GV`N{fP6qJDE1uNNg+#xVL^LU#8V52ll!58fWiLknQByEmR_3Kx$$Y%4aw0i_H>7V z${FipaG-Ud`*PmNdqQ;e8O8;bac~i}+uoO6WyQyk)kl$ZywOM1Jx8yYVC&F)c z$}cV2h%!1w*7vSV)U8?TW1x1IP2=-|`o~aHOaGsbX13nO0lCgc_y)sK3_fzXEC=1k z7_x)GUYjx&OOUcL`#>%)Xg*a%9Vh#-3H7!~N!uak*8*17qlc4hHlb}jDTUehc>Mvu z9G^+g^Jdeq!N&)*KT7X~7`YsRI#e|1GHZx_$K&jyR&Cw~1i}AQaI6fy_kN%w0d?G- z#lihCa(eR-LF%Y1u2WJkA; zB2W_D4*wRpNH|mDH>qy#z5Ho)67!-e!yyEs_)}n9!Z}o#Q1V_5{IKpHQu21y-2KgV zNQzm=EZfMZZXzU7xEKp#sCyQZkC9pm8j_s$@C?GZ)eA#iK4vrq6c%v#)Kld0CJp7UdlinruHsy;sp$eNj>nS&1AXsJwiL0XD*CsW(hj;i(^oVmC z_V+4h$_p3*KE*Y(TWae2TxL7}&l+t13l~~lT@f!7ru)w3mit?eeYZ7p-bB4Yr=s{D zS9Jq0GIMW9AIXsq!0A{o$~yYV3|LO8ZSy%cZ4%t&ler!{5*VFRp9ry)|A8bWwJqFB zG9*(y;sh&zg9G-f8z;}TZ>)OyvzXXfdrMho&nGF)*4j~hrA+DUO3^J|BC-vZgTYhh zD)*;DuMnew;3-M-!0+QfH$X@F2G@2bEDCaB6>!gOwpE+-91KB=D;Z(ccbgmJwgZ z4(Q^U1$7W{_95EW#3W^xvRgW-$5hg(UXZFIawcsQDkGa@}9FLxC$O zBV5pDA3L+g(C>HFk`IT!35mOdh1uo7Um9I9mx7_u;xr;Fc&TwYf4vqHXLO+lT2SueW2!)yE`g68yzRvkp4kviiOYDb`hn zPjyuGl>Qcw6TfBQAY3nv11kE0m!64P=L=sBy!yN&8Z$+v_+RsDa-wI6Z;1B~jZR*1lzHFh5J0@+zO%p3g#_#13=i$1-Gn?k zSIpV+I!2xFsgB=60>E#5DjkxkupitQi+o!=z0)rZ&z)rOI)lc_`Pby9y2kyDMTX6Ky`L5*5|MMvS zlE44E(VNk#v}0-@F|Ofb2Cdf?&i(X3F^4+!^)}Ij*38}RL6jF=-TH9__o!~f>skeT;RFa3@Bm$zGGFfwJzBJeVrN@ zK(z}|rl&Zq8-$epN1aeHoafdbryXz$0><9fy)@fD$}@VCVi|KMsub*A?6#5Rfe#wf z(t0%{d#sAMj=m&32h%PEyU2=SUOhU_EzLS?>}5w}3+82!be5mcLn1T1&ea{aPc{=e zVa|$JE7JtD8qb@n3?b(lM!W=_wzH@Ht*z8v!oH&1Ok1L%GE6AC5>F&fp&B?pYyu6h z8iW>|$)&y+53^*-M0KWN8khX3u^clkF<{Kj7RFL z_5=p2Y&<=QMI!YFWo8gVppI59dI?NYn>QzGTDY8g<(3PMX@xre%@P0ey7o%rzXab{ z4kIa)I@gAuiH}WYc|~yKd_rQ;I6~jdk=9Y(&X*PU>QAehB{NJ8R*-vn`$-2fMlDw_ zWSANT(ND8OMkEaW8vHM(B%1@m)@C>u+_gZILk9=yJ zm-8sq#@j*^5M|#D2<@qV)4L}<`jY!P>8K)W)9h~%9>hERp#J%7Ve@kDxsm^YL71tp z+(kkAO3E*w>mPo+=Qs)X`RpOJk**369H%+m9Ir{UQU?SWz@_wj8~zJlODgQm0TK#Z z`?_Fi{Grc{iUO>E^7LN3WwDTA;px@o5`K|;d$BqJrW%<289@mQkAApqi1}6gjOf-v z5~5vvHIhw_NEju1S$~h^;Tcb6bJ^Mm!)Vg)vO7g^ymF;4XZAYx)36A_Sf0*QyWnT zDn~*i7-Dp6t#<|gsYWd9c8o%L9=a`^aJRT7WQJIh-tIlldX@$4bEPe%K6aXb9`JFSw@ojm4Yf?t~t<%Vi*^`~94dmR2?BD2xtx*iS2N*HmF?C)MDSCMuPH2_LBva+GF%Drp)a5d}$=}3_t1z9aq5wdqyN5*u=?be!*@Zx@VBw74LZO5w}SyFF*XL3t{*YZ0)eV<^`CB z*~ajT4ubV6xUe|I%LL<6M1b|Pu{U@9T;>L5nyj~k7@wE>H(x=F+p4O5lOvl3le|E< zwTM`qsU$&t9&k;JWdd%eiP2Czl|NwzoVAMX%5N2W1^eJ}ba`U0;-FLh`@Cm$Wt82g zI2Vgv)Z}X`A9&?_LRvbWSsQCyByrNmI_^` z@Exn-!hfs??ECi7Egf|~e?tECYcostc8zBHpJLjTi>$Bn%RR1`b$J;gP?&V_8Xbq* zQ|i*0!hWn2F>rn%i=a9(kqv2LUU(hIJT#Uts`vtsen4V^W}{0M@vHa6C3-3hK8Tpti~Y04FbjhC zKL!HbDE1gPoxIDu$Vo}d{DY_fPf(_Tb|z;#Um)`4FaXi%N+vO#+6`it5nq&Qc)`mO zRx>=S<(hW^gss#-f(XjgCl~}rU~n?9=XQXkTpYmeu^Ps_*P5x)hOjNOm#!s;h3F@`|*c|KYw2s2zpr5vO(4f zyujc)YnF+GTfqpnR|0Y!%xjax&pf?AaaP~dA~=ti%>N+Rj>8P?cP$-KI#7JR!(}DeH z7uVXPs`O3mn-q9Pg#8Lx+Q)zSL`NF{oMOfCH#a(#8pqT>D)pqv)wLEqQJ)|hM_?ka z#e!2mXhfWC6TDw?R1XvONc8gd(WZF_uxkiIB?b@LY z8<$L{JRE*?9pH~=c6Xs1$Ha?&x}LV_8O{G(9-X(XmY;AYe%gjvX|%cLq7(|fD*MMK zD({WBF<>`_fTJ+&2#UX}IIDXIAGv6q&x9@@f&EOl%Y5Cex$flJdBP%{l zuvm5dK-Tm5&=l?L9^!3CvGsG}eUuPzi;WU{Jt-v~gEKWPSVyi?dS;s;&|l*lki{iO zEHQ3J;#LWtWY_xhn`mdy3;@-vmb1!GT*sK3uc`SM<6>cE=}NKJQ&QTQtS{mT9SJnC zb(#Jbcol{e`)|+$S%SB6kz7IS_2TAJ)B&wsVYt@8n>hI{-sn+l^o%5UF=?C0a4kI^ zX<#O*BQESS7k})lJ#ptH_(@}m z^RLVmK}{nPKYh>vfx>5!=w$Nb<45}_EC|v7LO<^=c4N@1;3BO$`nR9u#UC-7eMfX! z7h(wI768$a!K--vp)-Dx!gI`BrN7RdZO2#)+BIHo^R^k%6Cxj)i$6g>+?{vD;-Y7{gw6R#2XEp3^P|BB4 zD1{@yeC0T0tT?bhf^kGK8Gp>M#aGQ17Y=p4Y)2q>9RjhVHLz#E1kGX2Tbn&Hk3HN= zpAcn!QaSL$+DG-{&*?zUjq!@V0k#2dc+Tr6WE%A4lRrf7a7Zs32AdEREj7D&2fS8B6OX0$9 zJm4z@dnJOe2ua{DUv+Pxxbh90n+^DE&D_6Vej#%&-SBn2e;&*XB28#8{BVU*kcbCB zT?%Zp}OvVV$~4>KR9SpK_AP~Z`xhO84* zx-;K+{<}}8J#9-L;dD40VPaPuRc7KwwyuP@Cai)pTYvr3S8AxSxW?_31KKf{qJ+*B zYN`0IRYeQig|&Ei%sh#G%xd6e{@>wK)@)>BVC8VZdHCUjSk{eNiGPO1ehlf;O$=#{ zd?mu$2O>*EiufQexby2b?jQK)+_b&(>So18p>r+LZAv+~y+iygrV}QFOkLP$WJiv` zot6h94Z#GXR-?^9tj4d2xjqF%I^N48q##II7yq6}n+bc<8KjTO>XX2QV~sf(e}`aO zPLNER!}*?%M?TLS$IH5ztQLlN z|BP!_-|S`Gf^F(TT;iDZRKayLXiTB{LpNv=eK({mIs)bV&H9}7KhD#Id%9E_KGq>4 zXE6VfK6j1?Xw|>(E)rR!dOf1j|DZ0&hyPS)1PoqQtTJ8UH`{_<(RaeAExak~L}#O< zGv%$XKzX#E>hz3zgplH25OR$sgHaLA74JuLPI$}|fU;Qz914YBmp+=L?HFzruE54B z;iAJBdd-`hc>eLiHl+FSh=!I^2@H4f{lmbN-vSVu<5hAzDv`zenOSyd%TSyCG>*cd zrCR@jlKe@^B0f$1nSOsJ4e9wy zIfF!ubrc-9DTKQYM{9?#R(Q>`zxfoY9HNQF*K~-mdY|KUKHngt41PCc=`08(zwrF2 z-;nhB|7!u7NM8~YTx$(`V)7Ga@{u~@$c@J{AuZY*yBKtbsRj{XW9Y}gy4?_wW&%Ic z5CM7(A7(Q(V8v;0XeFF@5Vp$ym~sHA;OV$YSx^AK^$&!0OLJ=fc#9xM1bkyHLtp>9 z`r7i=PW+}ccj@N$d+cw>MYoV2)yq5V@;A3t+B2Yhxm^;3Jlcy8SEs9x`tO>VzL~(r z3-F)0k0A?09~6>csyB~6sVIwxA=hg&_Pi#t$=b&fq*=+-z#_9CRwOcGz<4TD*@_!a zrG8zN`L2Qh3gequ4H5~;Wz6lbD1de0Vv$RJ*9=;>NoD%@7Id&lI63yxE=Aq}_Xl&? zyM2f>kz^MH=gZ2q_SCg7gw^=#14mfgp9_!H5!M|_h*depbeOuMV~on z0?Ex3)rptsr#q;s6NVE0IG=wN1Ii8oSZ*&T;%PDR(23n|mHa#!FQ4z+;bSyxr*#*w zr@-A;<~63Kpan4LPu6&?*pa;rJ*7VxNREoPGDz|zc(BFa7G^&h-|;@|w4!(vm; zbZ(5LZN8ou-ikNRU(DHgeU@ zSSFGp#6T?I6$|#!bEa8#70xOqYh0!rosIB0s`5rSB* zY>VCrUf$tbyuiEb)~2Ox?L!z86yOJuZI9Yr$+m*tN=VU`EFCVF>MCd&l!umFl61$t zo^##bW?J5<`CtGZ%I`NEXyqISc+gVkeWRede|U^Qb8-Kd0II3%0S z4H8ECdtpuvfShnwo-jrTNFGdW2$swkKDP}@(~0a)<(9Ti3q{j4)qy~47977B31(ZU zM^s(wevD)|5tpBi+_bUK9)zEJLrQ+HjY_Tx~51d}GCRyQHC(N~G%*da-R>^t<%kjZvKa4`5 zw#9N9@`bhfH!DI8AUo28VaVMFSISw*m8(7RKBSE{3xu`hSXuFy3kZI5F;_J54f%j5 zKUF5kB-~`VJ6bSHlSaz;ezI`EDZM%V-YvsYW2 zD96_fsM-Ct_urD?MS^Hnyr*JD@F>YyFT0VB7Wq|$@&i1#^uY&D)3-1o2Q55dTr=w& z7PgePZeF#Z+=6lKajiiSOyw~Dcjp82-ms%=qtD&MS|DFZ+U3^{=byCO!_N@0 zqi8Xd)2HiriGeBa_+v_B&rih-pf`Be+GWI8ulFRpmT?k%&~u72A;QtcAN<0tgN3&s z;k!>oy&Thae@uB;6%rUJfJ;@0A}!;b(kkt7e#0l)%5nBuw}yn&HY9%%g7Tc>`|ffD zdzOzJU(my3R0*kT~@7?+MRiN4meLLA*!33|rN`OIb7kC-pjlixV!0U|wZ4$3xmk*B8)r zn`->kQ3wJhA8{S7-sU^`7fYaDMiAk*@M2(mX=yeq8NKu65L8$e^Es_SBE#b_a+l0; zN|;E0+?s^1qFW4vta&+@h|{#8RJG3FZ=f&{ba5(;=oA3woUjMfCJ6#3YHvX&A9bI^c!CmS&dpD;en77Tt?*VNW~e*A`g@KuCCP2S$q zPumhL=XlSDm8r{XtRl@{b&aMX?Z9q8P`zpX7OZj(pRw8Q~#FWfp^JmWu|JV#0* zvZEDm#+FEwrsx)PHb5IuwO^d9yeU0Md>hTy!Ry@Ga8+9FVjX)|Iw*c^#V;r6xc+aOHE zpB!AUJ6)W1Ec7T6%b9ux`PA?+X=11j_%|^WZ$8B3Wg=*^w3PRTdf%m6sx(ob@L_N0 zybrLhmqS*qQHo~TW^tzCiSMfH-qh?DK_o?3O_HtU9+gb9z1>VFhW z2qRhBpwQ;rxQ_rVcosDKO>>F7&_R`u9a`2p>Q``93?fgyxIXZ?=LvgV+PXjF4#wZW zq0si;A^XFr6T#k5NzGZ(6AsZR$6*4f3Z!C$XCoD}fn7)(P3EJS#1=c%(UKrPT-9 zepBX++EnqGxGks~09q-OwNIEA<)7O`0z60O1;E-_vd6J3>baV8p@E^r;y*-R;8uz<)Ebixm+N$Ql zh0;!C!0QExVqe3j{GU(SEE{o^s*vG0C9v+;wJ>Rb0KlMRGKDgN9LEYf6=WBucm_BEhT+l7E-O)DmU*=y-_S79{WH_W zA~?w9;Tclk^v|EYTYf_^F`R6Pu~!>+>vaBGDY(Cbq-XWmXUF-qr;!)H`U0h#5C|ZFP}s)%k%m1VP3IIJdQ1w zv654A4CX=$PDqns-h#K~E;b7!_qP5M{N7?(N>3&Su^nru@yMgOBUI~T)H2>h1Z9!) zPZk%Liv8pZ*(R|pro!9D@30233`#c#m~SZLy_@heXe<|WeFw8?>4v4cScpjxNg=H8cSABUDTC?b3Z~U4Qy8fcH6%^7l0i{s#Fd9o`@5B@ccsh<2t%S zv$tq1NV~FXlu?MW?2&@|&KW+KeJhYm1WQUW(x{`f>NpD1Sg%Hkj8reuCxZ(b0fV(g zHVD4^x4icZE-HA_xnuR^Xmn!c*U<8GEv3UB3qw!b*4zabwN!!LhQ;t3BD*MJin8$M zCw&TUEc6%gFH-oZ-b%t5d~R)t2mVo@5GKeS{T1bRa-`(5uW-vh<)WZ!Y$TkE51DP_ z?46)o0gJdYq~LL6Eh8t51o|zQ!m!ULP29DdJiMFOrmKTm=3f4z{PM$)$o$yPL@j33 zzW1EOM9a5J;>Hvy<;7>&@^eB0RP|=~R%WF!HERx*+ux^KZXe05cOue|;%?9dH=W~=q;twnV-coIksPh3hd24lY2ADv@s+{iv# z+N>8f(#?c=dE~vPsQSs}&)kuenE({hP)T2#+U6Y}uq6(HEG*y69+KPSOAc1lJd1fOWb5 zu~8UbopD4Rlt$Hq_J~at7`qoY3(He0?SGv<*KDcHb&8~%lN~4aYCy)5I-a%}mepfS z^BMejEK;mR?R5GVtNz?A;Hr}T|NQA)1t#NkP^8lI2n)BQls2=@6z^jN!*puN-dH{p z1#Ry2qNS6Dz0i3-YOk%HBIURB+!|GltpR4@LbRk@pWS%#bcraO?57BN^85c4`XP7H zK;%Y(rT)Iu=xjKQlds0X<+>PIxFa3;xjgUEvnu`6tcAOGp6g~Y^cl6E${562eJ4=^ zBd+x&fcOLS}VB4P@1d%{}r3SmD5e`tE0OfQoK z2d6csFivTcyAksC@{&|U4x@OrcAg6Splc-D$5|B)4TVdrR$Q|ln~nlE(UquGD9EGM zI!nl*Yllt-qmbl#D5k z;*=Px-^6@-=N%T^l}Rqk?{-YS|1Knc2(I(?X8R6`_eB^DTW%8$uKxRACrY;+S7Y=D zEc=LGzLaO7)zm9Jbf)SJuW;jRxpJ0rBiXgd@QB&x$J&i=6L>0bT8`uXBWHhis?kyL ze5hHzSRW)UJ=FS8^k8YJtAIiIr2XsZLaxK0T69iHf;y);b#^~|b{rAs zM}&oD|GW%)O?BEUnP6Z58B{6RRLch{35C0UcM@@b=hLF~kBYvRIK}_Q4HPGzRR|9p zbG+f;sVJ)`_?9SYy4NzUsU@W#7{B1u!}KG@R~{1~t!DPzkvEqrNAj66T^BPpXIKLz zLH(uyCvXHZ^Z?^U5{Zk|;YMyy75|c<+1qQnjLscLF`-(eHGyg1tR^~c1Tr%`WEJwi zEdzWjQ?)P0w$Bb&6RI@-SzWoXbgWTt|AV-gT8^W$2(zuW0M}AY9WL5ROy%H5x|;0w zh*CsrHvdrsukyLN871#kUO{?>gf;(8&2p}s6y`n(TzIkMi=s3?&mvBE8QG&pP)BI_ z0UxXoGjdPHdGJm6oZ#Ds#6RKrQNL(hI|2x7)dN3m4YWxW|Fya`egtU}Yq$#J?V@EC z$wi}h0_oy_wcw5+j~%bKH((!`p6z3m%(!!2P+1+S_NmH0wThZPYGLVbloP-`3H6f{ z--tE9Z3yu=*H~Jg!p8POF7r(r)X_x+E|uwds9p2Yu{Q8Q>Zaaoh1bJ|!Kz)j zp}1=?&(?gx-RjLcOS|MM1PG;Ogia})nGNVa1AYIYuuKtNKIiq}=`%wnEBqT$OQ&U& zUO?`>Rq`YACE68mA-5m%>PtN#=vqO)@Yc{bk_o=`Meo7^Hu^{HN>hI1>(;1B!OnNN zEn$8fRyA~y#GK#FvEAA>qOo%BJjYdK4EUu2$d8pk3Scpy&f~Zyj*7Xe;~3!uHyle! zxKzHP&q!o@`974lkzW(Yx!qL4IFCrpiAZ$D)X5Tg+BZ}`KnP;vU~5k-EiDVFj+#c( zDOAL==*73?fX;pu#`);Hc;#1)!RF6SM=(=4Km|ka<3?S77?Z^n%R38WqwvG?m6uc& z{-8%(^$@xeabUjSnVL98jy)EfG1JGRCI(j~)HC9~AdK6n40UtDFqv_Zg z=^glRMMPodC~`IWCxTe*-HT}WMfR3@r*0#U&$K8dCQx#pCd6$AgMJg!T@=4%AlF%2 zG~h|HN^GrV1Nd8Zj1eDmWhFdskH3r|XnZZ*1(V=Bwpwet*N)IX!cF3o*(O%9z3#w> z4NiH8&Y@DA^s*my2E5E#RdAP&6uC>fe%g?&wMP1x>*a69g5}?16)#McZu@2C{>Y%+ zNS2p7p%uqcCi4~QIdLmw9DFCe!rSXCmyGWh?rpCnjJBX2go#;INGM)pw6AF_X;c=% zcA^>#d1f4@NwH7vVgT#rr_}VK>l>t!fK+)3fhnDe>*4Mv=#lc?$DjQOs8qN~d@MTx zb{FDJxbwWl(Rj3WAA1b0a=c_jS?YHgRb=!4ZP#%xMbX0UHOlet(8kGMjMJe>Z#%CW z>ah*dnI>3Fy&TYGjOdNY4t`Rz!JLRvms7Vm(ZJcdF3%%J(7JtT$>ivYy>k4as*d-n zt66nbRMS&vVZoBZ)rajF>5@Flt2|QsX+AVd9#oVv>`txWsnLD;wWA{T%NC_d15NMq zT93;LjYsOwSh~dJh%;cDt0%K`Vth1?PKB>U-zgqm3Jd~SD_*KD8q-K(o;J|{=!mfC zidgOGg`d*MCj-T7&2F-2Y;g*6C|q+6!Fq=#FYXc zK%Ip&G*6AB;+bVz{%Qy^bFABZUzaCy6g+~jZ(^xLcE8~dS7OHeHP{C_v0vA6f>=Y{gBpp+zus2S$BT z=5>7fO#95CSjE&(eC&>+!T?D75t(_>AuS5?{Hytq$~7z}GEnjBJywZS3J}=%=~{<( zqwqKtZkSQp=w*+cPIm{3BpZ&&EPYJZL7;wuV_c6b?Xkk+Kc8z9qFg^I=rV<_ODA_F zZlSu#DXcpSl~87UEg|V;qp{!Xy`J{bZJGM5S+tqF|6A_G`9!?Ueg5;`GE>oR0S1PH z-DQr1$J=*;;s-dz@0ZLPc^Cr^@4Kvs>|PS-P>z{)e65UyQ{jw{y}WrrqXg70#5Y*6 zCtI8z-Q>yiR$vbcvvmz=wr_^hKdQLO3WH3TW7Da9c+@L(Fz$EW5y63ePAI7-Seb@S zwy|L_16mC2y_H&8ooB!h`1@RvVk!LkI`=t0I^h|=r}qLQOmnI*fbnJ@EafjJ_(gTP z^`1O~wBUaGsk=kdopc;Ay6G5<>zlD4F{qg<_`QfLoZuO!0tcT=|B)g~i`jWdL3O;Y z*yHu@fw`*6-^=E-X5eS{Yf*9^T_dkwT&-K7-w5H?s}_rNGbt(FC{+FcRbsuh_5x`V z*-5e=3|03GmPRg#XkA#eU1PR;jmngNoM;P!7F39Asb^54`J7oJ4f2bgY;D@lgaw_m zv{q$zcE=ppe^-175{SuuOuCrg3?fPdu{|mKlHIw~%t$&1TiOan8{vN~rzF?YpSrK+ zL=se1ai>nc-c{D?B-cq@?Y^8rW6?LDy3(SW+SGwgKY%oO^vK8N4XZ)=FKU(6PbCor zS6o&@6p5yAQQ5+zNilVn%TRSEf*?7IF%s(I!6g*!%p>RrN2Oor77wU$3X*I3H~t7l+2jsC`-L#lRKnt171xui{JD}@whS5bze1D^`x+1pyP zi4Y~pB^*T(1;?Uv6}RdjsM1!Trk6c+f&bKx@FKrx^BrGSrkh6JmNNs=cRDHVrh+f! z%Hu=n1e`=(pBDiOOKQh4m5(ry6~sG4d%~s5;7F+=J@Wbq$437Jl5jL(g^oua0J7+X z+!H~qka~WdK|{=Sw^^r{>$dXpR%k`;aPmvx%CxMvVB6O(cNi>c9$USC6@(pM9n~#Z zVj5bhnEVoZ+UVS+<%nMTaAe8xRz>iI-tj?PxVB1~n!{wz^iiW&=!NgJPFIs~F8R6R ztUu>&Wg8rYSuT(~0d?i2Hgi9TT+VhA(Wv=ZOr+vKIl{m8#KdC*!L<+5+;PAYwy#*H*E zJC~pMq{qy%_p?GWwYXo1wHZCOhWVlaBz{{va-aL%?C9<@&8SR}dc~whgXm(m%g4f4 z!ee-r6_J7v4^@OADIQ1DsB1;{k~21be)u(t%Pn?Z&n$rwmd-<& zHpU*!%Nh#oTAb%t=M;uuZp}{et>KGGRNR&PQ)i34_ne*euMOF_K5i6ex!!OrAL@RN zrn``MAf^4z#w|tN4m{M?z0gqIfk1_o=`P6Wg2?5pw*FLTHL;4=1{7gqR0M@D$xBHA zp-q-`b|P3Lq3KW}%$D-bJrJ_jNn;90eX91r`ym824MHM8E(0LYS7vrjT_X($ zq5XsdKgxG0FY-!S3ElE+uSF_wEh@Ba2*JaEk|q@?u;D6?IQrdNIqbMqfS2Hv&Imzj zgE1izVUf(syHwVmwvkBCTia?CGTiZgGTUtm&sW{37K(sY4qb<%9UG;FpZaLle+Efb zKR5H%jo~PqOgle#QL=jF??bn2Lltt2Q=nQ!Cb1V~__!dUcP3YF6=;D-`y^h?=T172*F*jhAX}1!44_a#5Re&!sFBDECsr-*%j@n@No3f| zSM8l+_dNYmj9VG7i79lGTZ;d$1^8QYZcFDVu>#W76c1m?2LwC4d}uII$#U3IJ2V%N zy$JLE{s677UaRXGmGE+$KG^+u^rwg5BLkH5av zt&<;C8_Kyln3NeokQ!h4EUTt{DjkV!11fh{6u##=YNmKL8!;zt!SIBPP5Sh8RV$wdQUn?1VjEKDxEBQt zL1gdK318w>_;WZ{{wzR?Ea6M73qv*wpz#q#_z&OodYdJdbQCSs3uKe=^zk(E*)iAHI~wqKsZ2IvIXp2`9`_3+aLwZT zEZS&qOG2#GV(h$HT&rcRN^GC@`L3Gwj(%n4&}+pRya?EF5HSR^e+VB%ITpEv85KBE zATenUf121dPszkP^Ca@_h}Yk~XuKe#pt%)&>Rhz{X(H&U=EWQOS5HYb9yDKsIU(5+ zXks#%&<-&S0X^nH ze&ZUK?8}a?U$;P~=r9nl?oPfIn%*S!R}HJ>wQrx}gUlB~fp&S?(!Z3zAKZt2WuZ;I)MP;6$V2<@MBb{^v;`;oM#)QE3BH7q>hLBKMP$KtCnq2 z?ohWo@u$MW{tG?8)U^V=J}ejv*g1%Ix`2es0MgJnBF#X}A)VL+m>pDy=4J5y^JJVc z($y5*xddiT`S<;d=Q;kS(8Ja8o5I35(UR7*Y=KkOzb|aceC6MhtfVYzs@uGzG6ajv zAyf8#*-LQIjb|FDeUxK`99qA-&`T=o@tB%3q?$$YhbtS;Ylk$^Wl{IP&Xk|zEL=(b z*iU+r_V~#+IP3d!1|WwtsQ?<%(_$5iG!W5AF2MQ zNlS0<6gX6!-X(Vc6v^qzdikQwF?Am8hZN#p{pe1w5s<)~_pxV2#7aj5o4W`Czv zgloMxr|z7z2v3P??e)db?qx_oB1cq>XHv)GVAnAeZylhtF$&DcI4?cPTJB#?k^ke2 zh(I5E>DX>}1a_s23G!?!gK_A;@)bMpk z&Q|*j>B^e%BS94?&+r2Z_oo8@Dt$Q9!6PWl`x|#7x9}vrW};H)czMZD$*x0_*2=M5 z)h5&q0p0!7iCk`j3R_`$vXsZ}LrcB@{gi-V;bpd)>-=nC|4i>gum&;P#jpMpIf;sX z((1a{l|K%6zrQF^@mFw%D!A4xNCXL@iy2%W)LjbR9-(x6*EC^pu!AuY{bV2Y%tENA zD_PRa5qW>|&5TsYSyh%ve=+l%4;%a4HSh99^E>W2spT-472hH@QgOjTqCoKZ3^F5B zYiCc%vW!VGmH+4ZH92~6b`v19tZQO|tb5D-3`To%yX{+8YE2#fiq}}Va9N;Yy`FLU z@OEDU`@pF$;cYQbT4alQ>(rzFuF|V`!lrAppZK}TLCai~RtVbgv}5%DRvGz$C_*m( zp*8LANa_Kx3EiDU`5C=HdC`8VG@WT~rg7heG!QYYe7D|!vGCGV3-urUw_Zvlh`Mp* zbO2wmtuX{@h2VPme}`AQeVkY?ci)f!Yl)Du{BA?-{MEF4?=gJV^Tp#wVe7Jm9RW!| zowLWA{E1^#NnQ1_2v-%{u^1K*2a_rW52`vE3xS{)q*WPz{w;Up7C1?AT{JmbiskPE z{kGJ!Z<4pF?nD%Y?fI_lC)YBiteWQmvYF-S>@*z>_jG;IPabaroX;gN{A_eT50G8$ z1O*%I?7wq$B-ZnI1}HI+ZoLk*X4Sv_cZ2NN)fLy(Q~sbc?Na1GFqZG13A$2$`)0C= z?|+!)RaK91B15P7*4z7(%#bu#qH~3<>+1qAZe4%C3*@h-kTQ^pn;@Sr!T5vVmYXL$%=4{M;yPi_`5y=23%Fwu40mDxX|!W)t1Wl*dq#iF z?HPHNF*nfOX8EM}_r|nsM<&4p)8A_QvkN^=q5~X2H~`WYiy-KY_=DgCpLXj#l&TjUKSvN25c9to4MPyEufwH=Jy-=cg{w3 zl{?%`!;?yPtZ&st(Of(RT#|5s!4Ej{ez&XBK=;-w1fsRa0|TvEvsXRI%6Yk@RM!lW zp7R$tf>-~J0DsMsGMv_}-~ZLIGPzJW8q5r;(ZhkWIb3N?cX`K(HOZSKCT_lqa5XcH)&r-?8mSdsUCyi`x zXc^42!SElIdn6@(DJ8o4B({d^iI2W*vkl#YGTGx7>R$ThJ16@%8HeGfX#}ADc=?v@oX_T) z5cx(6i(I_tBIL9%V-EQk8ElG~PN>-xEz45y#l~20ieeTi+k-;6y4Qy3<}uu$@m&_r z){arV4@ zY3S34KQhRPwv6NHJ{7b6iu2C`+Lc3oc(nFE3jakkVxg^d6yH!i-!h%E!0mXb;&i}* z52&K&wiT?G=XDT{)XFAi_of7b(>q??#@?X!9wMNX!FlxtryVb3=83xee>MFjTvTW> zh=dpHhUY7C<;e(hxE%Ya=M>?TQNN&vNnZ4!xLQg*2sAd5Xw}C=TA2!$nB~aE5YW zaoRtBQy}DUm5u3j8$l4HkJ#3B2SG!~_GejLvv<5|V1F6?e)-=kDKMroqY0fA&jHfe zePN+YhtvGu;q>Utp%y$3bA(bg4VbZnvx!$ma)>m1I8!5OmFCqfY9 z&Hp3nJsjceg7#s(B)aIG=skKD34$QXTErr1qL)SV5}hbPln|ZRWwmH4dJEB6U9<>R z@6ms^=Y8Mr`vZ2*Idf*_%sq3>HTx=u|H2UoVO!XyO4u0oZ%u_aeqK77=4>@2BeHt- zR$(e3_VNBLNC%IBK&iLd2r7WcN*iYjHoB=+@ql4(LR*yJ1!%VRuN=i44K6Rv)v3FC zYRoj-N8wHn%+(3hDH_(J|1WOJ8|LF>hxscKE{pwIoJmdaS!oLy;R+@+X zbvv4;Vag#LGVFj5;9az8S!7=Da?6*ZBSDck^ls7UEHl85Yvm#NgjTb9w8|Ry8=Y!D zaW>n`Z+itRqOr+^bi#DjdeTfST^_zqA+OC0YLQnnnP5G-c)ILE@pgpa(uS2*d+SFK z)6@jW(aKEA`=35n-ZdQxIQnxWpYA!fVD#IR0SgVb&GGCw8auV%&20`Fvk(5+bsV^Rmk3A=`iRSU*Ma4e$B{jR2Lg} zlm6w$vFOn0hdmNBIu;JNAaCZUNIqt(Av`Q&k5lb7pG_5uClve4v;B|AeN*ojWD*yk zVHJl!l%QxGSL@=mt`2HaS$C$t&Jrc)M5qLbL8(Mhm`?}BT+M3{yh}qdZ~4h>neyW> z%v9`d5b3|K&n#SRlXhQ;TwUGYv(AJ>X;z^3U47|74hL3>7T2`4!8cxj^E#+}p{`xz%M=3qZsU&CVU~;^G zsP|zm^+>~PrGYGF%vJEaXvv!d>75{N#kOA8!~gj;OJ+}i7V-2AHGh9U`4bUH)n-)3 zemoPAr?B|NGn=Lv){fcnl6QmYAMsN?x&s!KHZZw&A-uKV8YQP``}|XI`nK~}<<8U# zL>@xXiJd?>`vXLbGCbkwk<5Eo6gp?`7B6P|d%}s8vuH_oK~)eL`#6ZCH}kXAcg(0Q zN5VG)@pU$WJB~y+8>kI2Ci@TMQQYMFWH&x zsDUTiWOAR_Q;-FCN*yT#wpA*FBi_KRR>JU%%g{sB$#5E!g z_~**uMeuDDNa1DVzN92abn1^r%F-uu%#vjJJDCV-k)z>9fV6Wgvxa~3%IRPJWh^a& zuaJjs$hbO66C1_h%#lD({pEJVw$N)Pmtn$Oi;yieRT$=5d>{4=*}J)(v%(qt&w8>% zA4P{4k63Y5nSV}BHCyk;qQf#ZkfM)Po=2;J!NRkhnKz|f1XhX{h)d_oVSxLn@pBt61BBT4%lH;kPPaW5ii@t%MI~wGXFb^^9wh6c6|y6;cXDl zGxwA`;cDd#Ghx08Om3G8SQ1TN7v6V zj~&}zTj)*Wt16rso5GiF+f%k+vJA+!m_I7WOBx903ouO%Hlh@PR3^G$XQXHCNoH(b ztb9aT5?BG4@JvA2QB8V2oPl~9s>k^KdEdTc;dmOgO{z>PkPLTBm8!x0mR=9Sw4F=VuSo)2L@>VTrr-KLroqF$Rs{pR5w)M2vyqn=R$9jN8Zz|*h63o-U5AJYH45eL zEa7g>&NrKVI679+3EFu+@9n66JRxB(j-@Y>G0FPjDtijUB)$^5nNG3JTm7Z!cpy)Z2tKACte6tAGD6w zbHD73uO=kw_$}?kvBI$+M8dNze(8589pv;4GF94lGWPC-3NrS+DnJXoHa?w8>iV~- z;h%|-dGCfoX5khe46cRrlkq~~Cq20omHk|@RZz0b?^X>+6(iOtWOWQB75^YU+~wf4oZJV^WV1Zp%Ot*kl?r;=6ONAChj-vH^hbY7qTBD{u)lGc$(g^ zZ5(P+=b}IZ-~Ef{%IBVi=6qcG`2pc6eH(_jh-yhr#lBTsATw^gmFP&e9zkqs8SSg%|fk2CW zP@?Up7C(CfQlz1vIu~&Mo3p$-*DXJ(T&bN8&NAiyl12T08MFK@2!6lVZ|_R7_{=?5 zw@DgbLF-#MTpMi!gZGOrO0Ei3wqI_O_#Ec)wi5aLi$-L1WVGdK*XUTlJN+)6>nVHqHIQpgI!${&_jN)xCVrr&hEeZ8R_4((N!bxF=LU8ulO4CZeL z^Av>nONedBQ+=+wxA<|fieyijdt_|+ZTlrErCQ~E2n>5oZt92Ze zi5#%P{Pc#xo8q`nKbCDP6JZy<-BZ1xPXbdI1NmtzbG81B@5L&w()|2S!;< zCJP7RazTvx7jg4Tww-)L(CcY5B~mlkcC0^Jqb-hx_z4{4YtoC+k`ESZ#3JRK>4gOd zm|k5NCXW5oESu148tT=^=r4E5debwFt`4kAwa$aDWH_vz_4g9fnUC>W)LqV2lTO(k zN(PsbTkW9lcQ%RIscpeJe8t>?E41gW;oAbdy&0R9;z|Uen|!Pg$T*4$-V@zU-lfrd zBQ9go_Uq67C&kR9{j*uXN7vF$d|T8z*cz8%-Wcqg`1YFeg&kBPdvCO$jc3FbOE$~7 z)5dl_SDnq0NDjjRtl^ml@BKNvW}cs0p%FPAcf!;x=V=yTwHG^#K~oG*r>~kF5KKQB z*e;~(b99GK(r;JTc1PM0(PLT#|I7F+L{V7bR~qhA}u+k@c&K@5?9a3@`DN zGd0gYWwS3A&&(TWkF9{-p|iB0@ySuT6JZ+j7Y|Z@rvkC{dmH>}$tA`?B%R^OnLgpMiX)`T2`F{2mQ58h-^|hb zbxyve$^3vs>nfeQa{bTcO~*iN>%g{5j;@tt42g;H*EFkMu@*jQ0S7F2fxB@Q1JPj#SP(M;q|9w(B-X>uaN(@ueS*CI}uTUMB3n`>wzU= zoCR;zp3y%HrFyAqtZPj2^QY}IPgV7=bcL~nSxNxVD<8fjR2ufd$a0EA?EUkf4kf$0 zlmT{uQVd18p8t9)V(PnZmwq>)1U!cMY)>Y|l zUsU?sJ0I1$4g#hvv>h_5>*=aPW7m_ZOm$FDj7O>R$$nAz34gx~Vs0>GHnL^iXhqkl zOFB>y4v!Bbxvag>+Ck4ym?F``1{J21U`jiSlUxDJg$R|!cAKXOXuj1Lg!yxbElKZq zFHY~-%9QrUQTJAIvkbK=eQL>?qGzs6``-EfWD1>Du(Rk^xa$@=y#r%QsP4s5CAx{9 z;w ztyVJ@T7HWd2nP{7it8b&djs#9i~@Smz-D#bqUBatV#8Zw&K>JFM>(%MxS@4l z>la##AjLLgd#ydI&whPIWN`%M$YBo(crA9&vAPV}wKNh!gnOlr7}ziJle-VNPBAi#V#aGVfEKbpf} z>ywPFg&1lNL4C7lQ~q(iOx9|KXa`lU#=)=oCchL;_Ah8pj3?FEyGWd2M2&Q60BJXt zyH`r+_ocH#P?BpHw}$d}vu_ocgMSH1Kic?gL%lZ{PX#RM3ip zLV86CJB2JGc`T^-db#Rdj#m=J3B6;Sr5jRD`~v)L3o<8A8AtZ>pwu~9kK$R897Se< zpeXaO%eNxlRAU`2ogoz=ll}Yygqqw6r9bC)sdke6)$q;d>*G@H9G`-DHY_y!C1}o2 zwv~UGy`rtmMLm1Pb-Mu@8(kpd`j!-^mxe@D6^o3Wd0w zSV=^QrTivsC%SwBRW<)4>H9DTldOx3^~T$n#6xO$Io$7C){Kkhg-}F=p$+%`Oh2eA zJWy6>X3x-1ztA(Uux;>UhP8eS^j8o1N?fD5i)*svuj}5r677#NnYxdN+ip#DwhwNN zQQAG%+8>=V?pOxbhsrYF)6*rZ?`waqV}F%R<^0Kt@s1~fe3YNRwp?p+pq7v{*qe?m zjwla57YUv~{?y)>-ja$;$WrkSTe)%{dO#t)v!8GM`~<7cNq*c7bO*M61!>`eq$%Nl znWg>ZH1Uqfu;YL*bEe*+TH8B1@fGH5znV9@L7UZD=VY`g`39#B4GJ}cqoXjGY?~f( zrv2+|aT5(LJl;?2u(Gq4Ki5C-&fIr@HCWz63?cs78zJQ6B{Ko=<4&>;h;{||@c5IhIsKBZWq{o9 zP4iJTWS_lLwhxIL2W{bU4?JZoYj%XbT1Q$t*{{|!s&VO>^EZFa}L03!Zr{ z?NbQvr+H`l0$iyH9;34q{qRfPvX4BlsCt2vRZewryvN5tVZaMVbI&adjrz!|j2 zg!KpGC01xiS2t>g3->Pn_d+irko;JuX#(m7>Hl+snJ5slQHs84KiG^+dWHM_Dtw&I zjl~_f7(uxvE0OkRyVlv)7M(^>nanwuQxhn(vb)4mahRcoHQ^j_TA z`Nfl%EV1%oq28oZk$Ngsi&L#(2@_~xk{=L2wW-#?h!_y`m&kD3aQen4Ik zzHCDu{;hhaK`B&$z2;W!Mq3YEyGMK;f@=tts!DMj2br~%o447B+0QQ~?^8Opxidn& zJu7e7r-AwyXp2u4TfojuqwB(IPJ`nZtARx)#{L_@pMoU@%J;5y`S(pSUiX?^^Hki; z#uawH!b0W5w}WJX-2C?k6q`hDdPu&q<>%>V5!dg;{t89lD!nd_zMH}RtrNG|eEB(N zL6#Q7bKhtjwe!j|`E4$y~x*d)mCBSO+4cYc zFBJ^FJ=+g#QZm|Wn})i{Z=pqOH}F%IWWopXq?Vw=je9i_)&%>2z%2DSH;dalRt&%V zYHuuO&irDOb-Yja83Wy0_T=r?B>Rov9$IuZR~`>8xG)At#8SMYVV{sSE&wpQm&?zcZ(GK+)r zq_?}5SiPmwUnwu48Yb9fu~~tLv!o@ideZf$cL57S;4QYwS?v1de}78>@V4f|i_-tq z)xmpwpkBW+_^qXLeM9HqUGySyaDHiu1(x3_k7TjSZMpS{NtZ@JhT!Q63x``(`CFyk zWCe%29tPfygqNa`x>0Y7;NW7ozbtn43$SYmV%$+uXzVHPu3{-4m?KI9Z@)8+FZSJZ zDO@qYoGW2A4kwyhI#-;ab~y*{qVES`vp}WbjFEwxZV7Fm9{|v}*GGIP>ZW-f2&5o? zNL{@^ooO3T0bs8V^&9WJgOQ8cJzFzOV0?)8i;wWDuR{F z)!FZz)QuYC6wVV1b8{eQa=@N>_hdl9_p@v$Y)cD?M!kDf=5@gULDhtiasNVA_Qq%_ z-OyTj3aB0it-kt*th_P-uB%)IG6esrN99-mzuYM(`qYc^OR?QkZ z3&Pv4nQiG$3U$b;NPpPjwB*AHIlk&$ZHY5)I$$g|bIJuw=5W$)^4V6YPPV3=LhH80 zWNxsfG36SqV@FrcHoH!SpC3vf4ZWH5M!o0E)^rs^xG$8wP6Z(?pOExjCWp&XShl`f z9nMe|{IvM?5=#!Bk-D)`uZkpuc^U$brQy+P4cBOuX>L3dXR3j>DGyXIRnoB3xh!Z* zZ0;$ff>+Bq{i|>FGKZEuhT01;Z&>%<+FKro1jfRstEmjn+mX)SLvjEE$)Bgf?;cs= zd)4t0d{=RG38)5fm`Gcw=Bj%p3)yCup4!%g-zm1v&9xX?18lAe0U8B0#pV=`@a3`^ zt0vs)(p-%Eftf^}z_wa*r1lCCrXt91_>eE6y}1@X{0ala70ExRJ@L@-44i~N_FfBb zIfx4}ZYj@a6>Qk#^}ghz4H&QcdMRsApf@?OZw{%z^F32!Sw37z1jzIs zKRAzp6u-0*8SSqf++0$ctH$0Kh_&Vd-5l+0a&WnuiIcCM`pv5&s+^xnL3MR9g4j_H2 z1+FYY;L(2lA!W6;=5fZ3Fxi+feJ(Mdg77;q`HVM?iUXwcT49$3BGT?Oq zP|^5n;N>POYhZ;qes$99rEmcNQfLq@(hrAUX1KKU)QdaQK40JSQxb|u7nD(dvON~QR&Dh@1)TRR>4m7GiqXW^l`0;fuF$&AX?%+|Yn>-maI@6uF zUY_3Ya5f{bt~UrkkO`eSiU*X)iD5=3C=_*72hq*Fusfq>Co zYbO(5J*YM2-Dx zb@7*kEcKSYIHmN8JPqu`FTc13j_u=o3j9Ux$y|G3i(9ylMr}FZO%uA%P*!_>hreig zeGu9TiktRsp&77;P;Q|q5}QGF1wW3*^o-g#V@9?a|0=D^dr(_vDfA&sQ~T&mUA)eB zDrZ0gAE;a-?U88Fhp4FOmy@ZG7@05q&6XkNkfFcom&TB^a0J(%+PV?}0Zg58Z%d|~ zFO6GG*h=>7|Cz%>T*9TddWQuY_>1@((V4v+UBklW))HhN%%Mk72= zzmnrJZarI(6zd}*;KQz%g$4~K!3-#B?hGl!7xHR2xw3IYBkCQWPe=W46rlv6`D-HS z*4b;<|6#T$!V7oBPL7>Vz6Vh&D4yhqN$v#=#b+P;L52xN$(F=h|kEG+|sc>GoVnNs}#xlj0uYP`MN#J}XBO-!PiC z<-g4X$caSx=((ieDR>>Il@vCVLg!%4o*9v8oqwrq zR~q_LFuarYeFwD4;7?SWUc`ilcpmVwTve39Dz?bvCQu_D73K<|WZ{{-fK7(f5OU{V zznI<`hJ}HW^jmr#d~~T$-{Rsq)Geg#wi^^jWYzJT2hci-Q6jeX5 zzN3<-@aal5k;(#6oHE+8^X{#qtfy&35qj|Y!Rw~BjgO^fpVuBprC(-!6!)U#qhg31 zcb_deQd}<%!yNVYvUnWMD2V7i>m4w-0~ZgngvupeB!6b&ZoFs_N%Rwu+S;o#oM`-P9`|_Us6SA_rb}2}u$T-7x8d`0Z`AP0_wb-CgSD)mQQ` zUUqy)N5Z;h{>~ph-YVNVI#};bzR*Xb>yCQ6pFyvg;SJU~HA9LN{tZ(0l~{xXgqvuo zr*3}VY)?6IO|^C$Ge5s1Ph}2Na=Js;&0p5i{dv0dj=Sy2VerEyr=uU)38@M}qMAcn zCdd@OmG__3`q2%w{0g2W7favW4gD5`y;v?9)xS%C}9}yo=Leglb>% zWer|->G-(C2cRh;A4F?|#XO4V;7su;u71@P(lg8lzU%&X>X4i;%&b?$*^|U_H!_po zW279ird*)iK~&F^FROmP6voayL5s(Z`;nbx3TyLwaemMx{c zAf^83h~wer^_;1Sf2`GwxM@;-2gVN87fCMG%FV;=ch5=(vHUXs>SjKk+J~OXidhj# z8qF@&ve2tP#IGi9{s@|P9yu$4%QU6VORvfy>6ht$d~WhQAKxZ>(bSAhkNZF zBL4JqswF9t;(ixU9k4yXc>Dwp4W;R|nT}UBr&r!NOy$g(p|2bIh11ME*m%WjM3<5_~u-Z0A;;gXz&|DQ@cZK=}`6vfHf)aXGnGkP%<$0E#MXyL3ND>x6n z7na@G*d@Qu8OI5(Usc#loQ2bmy-*Un$3cUzorpp2Zf0BvX|$&B9KlSfU(A@TJt_Oy zhoFY3Mne${tEK9HZtiJ4+Pjjs8#a#aZM*9KurnOJtdX(R@LzOho0b?x57?UL1QQ=( zl}^d?*#3eTNXO9L&1!CLIlPp4+shRliJM;~)f>jT>mRS8zeiK0NEvkoXPY52)6}RO z`@6lNc6S=ypItGYrL0Sqb1S_gRtc@UX{0x;JkO_b-RhyUv~N`cgEJQwb+_&DLBZ2= zV9tVhc8adVj_gF(|E;v3j1#hLWPuhTjGDsf_cfTsF z#{_Q4*Ynh-PfujYH+v4qM)ul?An!2}ul{LoBQmPDZL&`Qpt_`fEhRfR*lK1h&ayc% zyrCg0%!o#+n>-4dgl3&&pkQSb;1d1}{}yclU6Ms}t{W2@c)}yj`3Rw3I>h>ZSR5Rua}R!Sz5Ls7`3X^D zHQ*rn=qG%2qsv}|#8%9!PRVQ{XCP-R4g&@1wzD&0Y#U1>pK*QH7FgP{@I^c8r-{qU z|4FFDZ4)KMbG8db{5kol=R6`~Ns}6`b=}Q9j1ixKZq=77x^o-%$9}9^QfkEP;M8?1 zL2G%Ic^@@@odnrmimJgwb>_3r7?5*8hVF6$Xhjyq?Jk4hzi60(j(Y_XO?F`agB}>S z@NnF~@IBIQL3l9uY5}}gfw-l$v__yHTYLyZYKW1sR~pK^WmVF4;MYme2OIY@s(_DJ z?SIB*?n>ZIj!C~oVcT>9VmkX=4Y!D`OzaPP7xYA4?s{lABlU{iF#W!eZ`{Gu@KfWo~yi6`1eWInk5?(mJmT*W<@%ug;K-L z;9w!z{aik$$0PvY5Vqjrrmv0MV6akn|8uZbXZA=}ljXG%a!$s)>9|YGOTu5#mV0)1 z3#R>b6S(ha=eQlp2Zx-^F7?6-3BZ?p)wxb)WRp?D|E2<;qEP?B>cFpg-24s#_};&Z zmRe5nIP}@!6Pp63jAu_MR5D3*J{=w8CI9%S^4-?HzZa@Vt3u^?R-5Ea8KP4K5_k$3 z3TqE`5gXn)X!`~SVAO(k$d^;dI)IM6yjvz2o^4dt*718swyF&o+hlb9|F7$Cvfe}+ zc8u+jGxWu>uhP8nQT^qF59&L&Xcx1;+G{Z%ah{S~!?nZruGU}{gEitlON@+3kQ(^d zWXIt+%b{4&(ZpOhhtG8qv@Yd^qN^adWe5rDz=%%tOYrW@SizVT#$w~Dl1T4)?C(zj z=zriApW?;3G>*zRX#M`k#u&Y`X6{#9GrY&P!}-;B`G~&;IB(_YjjM@eh8R5$cQxvW zmRrO^WG@mOZGZ%i5m@kvZ88twlY)jH4EcMV9;!hCuc@15Q-ZXw+N$ATeXv%~^;5St zIL@$}*gHF_q>E?VRcVfUy5PHLN)@xr`vTf0yyzET&!ekrJ}G9b)UBErw#C3T{TcMg z49es-=tVE-HZ+HU;#H)c&8q-`_mo5c;&GPfxd99$GIaBj*So~Sq?aFJtBU)B`wZqC z1ysgSKO05H_YAmLKb2d40j5mz>I|(QR12jIwPZpNG-nvD+3IJL@5C6P9 z-a0vf`Blf??W+{c+WCK`!XAyLBp&Hw-e|&N@HqGlK%mM}c)Vbf$n&|H z!HV;0xU+MJkx0T~HGyLZjA=w-)4o^B*7o(h0pEe@j)OI6_UFJmpG#Z?2cF30 zF_7n~!Lrizw3Rl=t@r%K)T{K_ZZuK(l(GS*`pT(IY@~Z^@EQn@nR;T@08H9vMDO>s z`OJMs5>N8YP1v*E0HI5}^)X@7pa;<9*ek**o$J41_8~?F#1u+~xk0}ekT5ex`H5h% zfR(RB%(OV(HvRI{4p)`eo|yG(WI$HK5S-rC9D;W6>z{)QI!Ieco)tj}CZt~q1z#a9 zKjiX#TiN_E*Bg`&F!JP5_m$PJTkzJV19z}DR6-{d`nSS+4|<8#hul=lo*P{%#U5&c zU#mhzd~hke^bp!tv3HsK4)Zgz&rczn4)Ioxf~yJ~?TcE_Rq)m#cLM2orPivaQy$>^ zM~gv1rEvx%+dw^~>e|2&@^4M6$y~T$<1k1cHLwqRL8k!^A|ch{taYOuY(%Bf;%w)rCdjQwnjctzwd(+=;?{YIj~{KvSR?i3Zs-Xer8T+yCLhA!vsj}Wn(vo;zp!6} z0(q7tfxUE)D@%RDJ`pnf+62HOpkMx?Q;NT-3UFOrb~MTPPLslYag~^wSZ*|@n$FDC zre#o^TT9xa9dwKq_h6U!vR$D4HGPRzOT1}B@h&&>0@whLntPuK5K2y~Ch=o!dzU=BHdH z3o|nMA@^IRSrj)^LE$c}Uqi|vu9h>^je>l)4l!xss zSA6SgjN81m?`_UPW?3Haov*BBGr)Ejzjt>X56&!gA~6>pwQ@sLy}U5N0&6sWGLgRP zUj*A*6Ey#bfBkJfgO>7=(BtrE%>SakWs~(q>9Q~cQck})P)Sh1k|OiwhB@H91agjy zFwT-7exe&xQnZuRg~6z4M};pgS?=g%aIac&B&@HfTh*ci7aNZnXHe<~1~}}`AD1gP z%0%upRD{QKE`Yhky+ej8`DO}DHhVS&EjR9(;pq?XEQc6!z!z+C+7&`)#qYQh4L@_h z^z+gVwEv4OT3J7dh3E<~z5g6Sh9Z66?UWOWSFcIeCgEMd6DFeA+9YT5y?YM(^E_Rp zis`Knxm(4c+OEU()sBvj{&(XhPl*(dZ*PA%3oKe(lvkQ2F&^SWdRWPl(_M^-WPDvPfl&hAWehNFv=vvZFzn5e-xad)LD-joBa%bNN}s&)$_a zZVl7r6OuarAwbS8Uv7VeQdxu3#TuoDF)$%I-88MJeA*t}b*3B1Z9A;bZj|{T^TEH; z*agd;>)yh;^tBn}KM~xo-iu#HCvi+!;MR8o?3RFE5o;6)Gr*m$Xp#Mi%Z~FG{oIsz zKE#>&04ThLBK*Foxxb#9D@`*zI%6(zVD35u3U57T@YnzQQyw|{2W0cjM>#Ihv~hpT zh?0GXpJ1)j_a*?Y0_j&b_!z$s^SvJOxU<%KBm*Autm-MwpQO?d-~lN5pYZc3PU%9c zUxkcnQuGDR3&AbHK`B0=f;2bEaD=(@_K#NY-oNf5Bb4Cauom4R&VFfyfK)3NZPa?S#<;g00esA(Wtl;$ajiX1{O-_c|)IIjeoY9F`m)v$?NrLFl<_O z)j#Y%qOH-HAVV*H;Y`0Q>x<{%U2gOO2=dJPyH^ftT&N?cn?RFrIe4dA%oG@7{w1fm;g_{K)BObFx z%LMb)b~g%QVq4#*!?#faC=xf~i%J`9u!e=6U){vq!(s z8DzqPbt=(356D!UP;HdfA2|k;R~h7u^?tl?DuVDr-#kn_0i>buv zJoreo+bYZTLy8m76M~hz`jw*kFH4ctNagtL{e`hN+GNE1pXZ02Q+C48==paHK!}eu zkonLlC8>quhQ;h;9-R>qV;X-F8#qj8pAE60-tpQho5nq_{GxxAD}l zww^{2#3#}@+dnloW|#OAYp`EIe}FJ07W|wLM1%(n%=}jl%H}~xUIAKeBSfM;M)H7< z*v*1~?J?u~)5FA6$G_}7(1`5?w$hhH1xcj+TQN-?%8UT;(U}t1voJ@kyMqfUv4H_*ubN+St`AdKkm?-io5r z)i6uvq8jm&#Fp8YnTrut5dQGIS4%Wn+9`3OMzUO;f;*)2Au6=wO`lqpcd?Ds#qyAT z!bmi|jN&p?bdKlr_ugAYJ}#DA1ilOx@6$H*Nk$dL&4aI%6tA^~zI*M>KF_!=49pKv zwr!q#Hh{M$=b;V{mQxD>N^I%Gc&tSthlIh2e?n%)?%HLQ=hwUMQVaiz08cZSRl#ip z0!w+tya!tJ}!n}#UCL%aY_do4KEIuczRg;uGY?Ppob70n0yEJ3+tk z`gn89xUADes>oPfcPfK;BeiQO0qWj>Gazl^Nmk)TC7Q&3aK>BpzyML7S#{cHQrYYz zMiZzKZ~k+5@K(yj8UOeV!>T3cq3n525BkA_P;;|B`7_GKu(HfOwz;0)-zu5iVwPi$ zq+4TG##|3CC}!nsCYt}Gx|<>-c-=8uiACF<(JI)bXgu_qQg;6AxBjz|Co{p)VQo0t z+3I24zzgAgEx$iT6$I5em7Ssi5~>cDB)P(~btt3{iu1`F3 zz&NTox2Qnig}=98KNm`Pumu~;7%Vpi^gz04fgs(&aADvZgled0VcB}wL()w7*r!|S zM_3OwV^yW{EVZ3ii}FQSWVhS%P4vhIN-F{$ZAP+6N1N_pXtx%&$YLSXDUR3hqGeW8O6UFr4~ADI&^0B(u+ zJ;=97r#ra%&%jsxSv^PT!Y*bk=oueZcT>0Qa?Cdg|L_#2yGbiE%G^G_WZR_xW>2^3 zfj?Ml8kM$luR-hBs9`{VoiKi4Ce|Rw$W2(1%J9vJbQW~=OUS2Z9pcKQI4S;M-B34v zCGl|iUpzqP*OmiO9}fTFfV2c*k)tpM15iSCWb-DJFHKENC|7OW(rtPz*d66o_fv@S z_#L^MTYxevB+ZKUIyLeizN#)=ySlo_Gz|V9;X(R`x^KBoco<3jrTU8I&{a8B3YR`v zw&%@*Ej^t^1V!-GwTE63a?#Z@?dshh2??i2EM`Q^_c#HpUv-h3JC}7Q6-2UYu{;WT zc$CGQuHg;e|6CshI1*o^-zYm&;+I)S(px4-&kPC|043C3VY5Y=5rUD#;N;U*E5o|? z>Cj}=5BSD24?>|JtN>;rOXlzG(@h{(wyIA6-W$L}as4#uOcNf%{nBgNMVh(7n6iFJ z4BMt%q7O@JQ2mL`KdaUC42kUCKs#Xdg-`h}sQkl30k;vYWEADWY1YHbXjt?5@u;XM zgE_O=Tps!`ys>#y^Iiv-iIy3~<#`vdmZrx+P~g$^v!^CIZ?V{~U#F>p|9~ z@;D$ZA}XwwKdWlDI|RSH85X=s<&J12>4^ggI7JvM_|2tO?JeU2^xW7G?S!MU;l_vq z27Y`)AT`$<4XkVO9vAcVbvS#S5S=UQ{!SBA{iyz)C@IO#BNX<*p|rkaEv-bjFHsx(&B{!{6<| zBgyv4eXkIJXfAB(zKeFBOv?0#ONM9zy=g!_RqQ*PF=EE-74GVSJBF^Z^0rV=&GV5h@RO4t;>04nb~{BEzH7K)fMff;-cD6g~jPipB{c$aYI> z*IRiJ*73cATKDX%2VPr;SsdN(M39*$_DuQbKhc->7X?EgaY(ZGG2Taz=OYJ*0URCn zdtA2qZ$Vqdf$+I}_PI*C#a9Twco#Pgpp%ldG}Pqn{jC5qIRy(0 zeqxPZCM2Wu@JAgV@4bU?z0THnY@Rj|OIFylL7D%|m2=s)Y1(7TKBB^E=Qet1o7v@a z$O|xd0+jZ}=g#8AKk~JZBoGJLf4`0E=6V&a_I#7acByXX)nhJTPPN?)bb%H@?F0l13-`W0^W}sXJs|+1d;ryPmYi+hbI2cRkhIk*bO*_73y$c zt?I5%Nhf&^VHZ8mD*}H5Xw8sejB`w_Mv8%=Xq0)y{-wD1)8OWgA?DXfMu~ug05|s( z%n}uWsNs=FoTyQV@H(vq;-%g480e{xBV{_L8KIbA)^PCyox}HaY(;V&p zc_OQ|TFa2scD+LbzBWDU-TYVgjozX!Q8Te&N1%B!oO0&#vqBsX>TW(l0^ZA>F!SjW zauPtU(uMu#I&4w^I;Lmfa(P0-jWxS#lM}mHO|J5JHE^z({s! zz9W7vGWQ}loZUqa0#T$cg6ofgemoS0g7ig`NZ!OI(NLR@l(V&Ysb%xCS6)_us6qNy4F^EcFP!F%RS9 zdUcd6y7Z9S@Z;yodSBd#wIJo^$%s}uZ^b;ei_r1#zUn`g^);^u@3jm#1(3Q`*7rgw ziM(5uo1`(Ld;_?T1fDLwnyBs(-^U*%(0lvhA-yD*S3lzKH=~Mj1aOO&>zs)8ZhjCYkQY+2PLH zL(m;Pew=cUZ;QxMi&Q8?^y!?`!EV#C5OA2EUE7^9y_ zyV<< z!wV>&G}4`dbc1vVDk-_t0ssMWZ2d`s%9t{2@se1XnJBr-zW_Q33x?>!O)fuBafD$L^j@2m;z~Z@N;N6Sh%U z9iT*4qu0cuj^@X<2xXv)+^T;XpcMotvQ}IjUNQqw?{O|L=Tw4;DHj2Ca((+Z#xB&0 z3XJP4+Z(aE&0#DHDYWz1d=sjHf8)?5Ekd+yE1RJ_@i;|@hw{dz+G5NAI719vOnrIf zC8Vse1?zFTxFx1oKck}x_H2GR@;j#|1GMeW#EO<;ZIB9T4eH2=TC=nS3h%r zVP29PI$%?Ve1t7++RD}Ktt#)dD)1gFtw7VQ(I)#>0?d--HW}G`B%QAjJ%*6zTV#u=~Lxj`0QqxVA-IRZ8GW=tdb=A*RCQ@mH}?fPqZOfqfX6{KD!k z@(om8-^+1D28a1>xR12)b;Cs*?_-a}W6B6=ZkQ|MlS& z#@-je@MtIz(Bt`-TvxZGp7HK!U8y|>zn=2zu!X6o`#)OO&$e&l@8!V*HOEgvm)(j4 zOirW42c|!!B5V;0DczK7?_K?VE7B5FM|JDepVkGv0T%)-nV+r-d{DMt)THAFQ@E?b3o*}4OZeDVUt?Gkrn38w{6nDLF`%aIp0opdk1M+ZX*k@glD7 zr(`*jqq_cj^DACSpE`JXo>8_~jJ_ja&Y*gCuQjE&tuUegnRRj2^#9R;V)&$e#0w|t zrZp!s^al&Ny=`kxxZ*A2e93p|fvCEskvH8BGYnUs#;j60D z{S;3kg7nKth1uzkuZD4}Rdv_vL@m!w!b3C4-B})nW$4->IIr~&wF_ZCAW zARblYNK0Jk$(xL3$61BA52}-P=ao@bmR#l0$R2olLQs0E;{-QS7f0zDRweUVgu~5h z^hGG$2Uylk^6yqxq?nM5nn9!ga0=W}6TJO4%QKb9VBXKz)jsL2&xSeQ`g2A3k?LhM z+&e!5dWiy++?*vE%xwvIPU0usS@$27JGT7RnR9q-^bOws3d~QjoCGJH(F)$uCye;w zwjUZpm3u^JFs=j)u)sYPT5QB$C83IbD<{nNbo)2O+4obf0uM~~_kXrcc3XInNc$@x ziEpzlLa%7`77U;^QQ6tP0A#A19Y-VFs9tMVIIA`Unk`t>i65_dCFo6s(^C2ZPhMXG zoMcm`W16mbn*qavx$4H27ouD#xALtr7S$LSjVyp+Gr-b-np@iLg3s*zA+h*s5$$6K z-EL|j3y&FJ0@c3xBc}Kwf0hcNp$ccG6=(L_wm*S+kPiY|XkJNfOX zEp?gg8RB_ik6|iqrr2yrF#sew60eQc#cQJqJcz9sOUk?D-nZ zstxvKV7fk}29Z0w5XN_X&<`UeY)~FTu-fvd1P8)gI@W;!)EEc9Lsff-GnaiuTyf=} z=2kv?F=_Oeo0>Pc@6lzZ-!2N*jPSbmgKPU|WcE*;(E67cAz5LG38(Pl!wt4Us(wL; zu!1PXVx>tkXbbznMBMbI# zuF22ngb%7O`8aGT9n`0hVL#|GnJXM(HdSXph{yyqzBR(3NQr zsZkJv&$c9?Wl;Nq2Ri(whW@B?`?v4}*M4n%`6n37?|a@Sa}KVIu7T&esUIAu7VYM8 zifN4fV$8H2Jo9iM*1QsreY=56IQQt$qC=eGS;zu5;!h7?KxVc%5?VPv@>LrvMc2K! zf+u?(#U0b&=2Uy=T*Wq))iu5Ye@3tVYWG^p%I$BH@xkvcsEgC-_n=K^i?Is0UawXl zVezT3fDz(K^2W=R!!|=fQYq?WFzCZ8(tYL^76|n~#pvGSU=&z7s%}(yf~EIY^Lyn_ z8em|muUY`mo|LlSQVNThruw0{*$xvSC2t~FdFoo5UuL^=N}n(ltSGDXnc-VLqTdz& zDyD>z_b-84ZM_Ut=-1@PA(EQc60u&#l1EZNDUVnkJMC1nD#7F^(u@tf)&iX# zu1Fjr-;GK_2cpA}HP_mc=7_(6b6Qb>z3ZX5mF9&Ok3p*jklJF)jAf0F0iI_zMX~(p zmG4HV1RU@-fkqk(1Z_eOyyvUq=lut5HOhX>L>(nRRq9rNNXTVphBz+M;c>a(mq zb(w{Dc=FoxjZQ13RQMNQ&1hesqg~(iVM0bZ=wxt=vv^n;rUB z;b@xc#~3t_)!&<_e+WITnF6@2cFm`;{>=3YNw*B)c!Laki*N*!uGSBaa*1p$Fs0gM8--G*K4Nk`>bpq&-odXkHMuVH^z(6b3$Z>_vqye?iu zMQo(B#@T)LxXp8X*P3x0CDZCyxWHaZ4CIvdOsC~N=_Gwx<6{y61W8m&e(Y~_HC1Th56b%_P^59HrwXoY2^bL?zdE1hL zV?}3hLb6;?F8GnkW7EXae^X?8A##j13+-tIfp0hgyr9>ZvcF=GGCP1hpo+H;t!)kU-aZ_<>d(c;CVHRTm#<9w z1($%Ku4=RwIx*23q2r*urm=-1Ckz(^PEptbBPP317?R`Fy2L$xVt-+#-9*XE-MWO_ z7TTu2nWw+6UoG|{tq{bMD@Pm<_}BROQ?NI`*5ee9l4qa1FS&=pr~-R(H3of${FYR~ zQ;isorvN8ls6ZFXby;o$^D|xph0TEK?^?&MmIVYkd2D~*Of#)G@FJ@)(?AJHVf z3zXk2)#7QtA7rz1c zlqOu;Z~>1Dqo~uNxa5UMNxl~NN**`npidc6Qwc@tz9byewV?yLCw~eCs85yN zsM;+KL2B#0?_8uC`$xvxP^IPNZ=W+hjU>15sG>h)P+ep=CPGcNp|4Q3-N?aDKx(&o zG<>;@Ag5hAc-l+#DQEF;DO+ei4aa9IOj2A4PH-f|jS?>oJ@`)li;Bv(f?RG3VQL{FZtN7dv$zY6e;lX&Xg~Yz5dl7IREW3+%j;e-ca@hC$ z60@*$B*y;v3TWxx+5Of&lZbap?mIxa&t5 zPxQoe6JNqNGN*R0jaO<#2P!K_%{}-mPS}sJqVR?p1Z_^uT~9ZAyi4T1S=~*>UUK5S zln(0IK?NUW$V_#Snf?E{5Vu%_a@bN^5BQT_U!mBpII9+z^9Um2J32sNpKT=CxW_>F z;~g*ek36wCD%msbp0=WRq{s~$MUHf4E=$PI;HXM63h~8%R98e89K)oKN63@10INaxfpVBxq{4R=HD(<^*bQuhjI)MALa<-&-~CMZ=Sri;9rA zj&oIQ>WHT5n;wwk={f*vtTtuG2;QOnMM!KBei@t^p#*J1=i_X7e?1eW-iF?@lu(6z zwNXwOaGZU06REuaejs|>?2ubOSoQ?xWXg4-!HQQpxf~BjXtu#%Ge-kHDWIvmpPUa6 zu}XH3t8UTjE`{r=8lgxnhZWYM4G^Qch!VDNB-Klsq)r^fYzU8U58j~YZ?&t5rJh7u zUYty!jLIA_&HUa%g?D-On>Ax4uv}cZy3)fB^&(7sop z4%d_tu)r4kfZ;$?ph~l~eIyGOzKT^_LhJW5RWgtJ^9cEbOd|4`SednZecLaZM_ZgH z_OZW%;mUb%5!t6lqPciOt2F`P4uA|SH=zf8y*FYjPX>5!WIBJMY+ZutO**{7^O~Nl%R8hW_ZWSuRfc1y#l(~zXkrgIeP;ObPgVp+l1kYj(I_%)g61WtnDk0vg z1MefEB1dG)v#Kxw|gg>!}tK44e?&X9Mf6&EIoA64hBn_n8E` zpJcu?3i)Y+%U8LYi_@Y#Jg^UI?F|UfE*GkS&6ymC8 zxt&;;_*l5x#1q43{!l@!X5(ID~Rh71pZ-P*7 zNl$Gi(#Z$^?LWz0F|kae+VSewwKk$gCjTCp&o)x@2~G4wA*jH#mt0!iUiVXI23Gbn zvFjR#l!1c{?yp}?APw>9E*ir2-xSLh;9%3`{2o4!2Z$SGieOiiDg){Dlz|AI8|;Ty z_!k7$h5h*PywtFtsy1kLz;juPW9RvYKfly|Y;$mOlssp>&c9v3eTUhh?dm-$V0ZO( zBBR6dr;{GI5W8Mz4LhamK=Y`S=>gjD3Ga3{k`9>j|jvQP%Mn$eGi zS{b$RgK))dx?}0pCzSn*V3(HNByIbF{8kw<+_-50r?1Qfr((AG&A9e;7P~2PzFQ_h zSOr)@9O#ejgZ}d>gBHm8Z*V^)Fiu6>&&LfwF1~H`o~n@yC=_#HD1e>k!n$5DBqhIk zY@uIhP?0MAVN9kLXgt-plrMWzcQ;#W#b2RzK=V%3+i9_zF7+&NJ8hxMLzu@*2=CB5NUa^&;fPt5_4HR8Mjv|fe&8;vh~5CYeK;Isswo74PR8r)@ddDB^U zCS`dX-<&v`^YcZ_cE2Ln9}JjECEMkH?wM#+-&h4>l;EN!b_}UW|EwL}F#e=h)I$D#E>{=tQFkMtM% zvP3$ur!TT%qz38aG0emKSmXT7Hyn`N6#9=pRGq*E^Mwb1V1xA?7-E3XM?0Tp`Sf5B z?LR41Ja+*Q)=gJkseD&A-1f$AdUcMXSpSvHI6=^{{N8P0q#kE&=BPCk37Jun0bQ#W zokmv`!|tqep`k6us8;}em7~F9-;65UGxCfqK2)gnu%lW{Y8+W(*Z~Iw1k75|U{EO3 zrwjLF(`InQ#+i&b0l+*@dR(*P#pL^~N|)h9$aeJqP$rs8G&*;U7vp~2j`OWfwo8S- z_Az4b$0;;iPkRpfu_)ehW_)KuD9^a^SLwR_ndXOOw^{ubsm^xiofuS`K!y;$RL)3u z^6}kI@Fp&s?NVgO4jr@C-31VB)Bdz@#1UGJ86J21I%VT%r_eRlQ>U4eQ$PI`LCb03`U~l zU3Gmvf!)pd0f{?@%F z$ntTzn*o3s0Jcv5uKxM~N9xP7+Lvn5A{V*KaN{FqwZ3|_}UT)51?0D(^`Q*q6E=K`h zT_9pi+F5Bk_NOg?5sxBO9%D?Oxr3$tB9Y+%Y<(Ec!+h&X4EsvpKn4?#3Kj7WOAG*Y z_vuXPGa{->P!vRa_>`_rn+XF#Fn~uBt&n8!m?(T9e!5p8`Bg!MiDQEQcg=_nsSdkB z#8tcA5vItHrWS(|;wlzDvK&4+uXb~$5W??<-HFERWg!y&Wu`=u>lifNKBy7-f91m@k)RDmS|GKY>e< zghJ{LHYa7Tq|COYC|=E16zCmc>N&t#;ZMuqWb~q@6 ze+5^@H*k7m`}pvm00<8-Ew-beHqRuIH}&*F)~0g^!wH@{F^B~oJc-Kupub0$anHh9 zFaDxQJX#Ec0{760g-{=Yw{EBdlkKN4B0)c58%ei5uY9JC=n%Q*BN*lU>OV3Iq_|p( zABwZLpbgA&va&^+q=e@0gXJ0SFaG61e=H|B!J0Czw>Y{jubhGmYA ztG)XyMAoN>Cg<6D>+h?#xv(z6H?)Y+ zy_{r9))ZiIuB3skNjNV^fGGwnb8@3kl6mA9r1N27BnOczLOAU`bnodN6%@f3X99mQ z!!^LpShjLiuXMq*=mNT=aZ*8g-?1OzcWvggtd()#HgUaT0Y78~^?VP;LvZA0L0RPm4ulO+y+S#-l`Bvk>JMh5)SSvACWPAMW98bz6~DjeP9 zOt@8mE9p_oMvx~4nzpOFD}wS)f&UT)yEJ##GL-ddNtwP+9K?N;>+R> z*V?|@e*}YbCq4%$*o&biYs^2kpyxXt<^uyiHjK3WTb3J6PA_DZO0jdABO8jjFq}*Z zHl;5ai5LZBh}N4YL_=z;&EhY~{9NZf_nG&0&N4Oi z*iC>t%Q?L?5kHVtixDC#^s3c1^zUs**Oy!KJeZXOAqZWLE&EZO6xGKK_faW0AG$kF zv5CN>m>n;1v}F$L76cdDLyFHH(q`S#%z~=^M&9N~pe*c({cOiIC}#9u=h?SmbX_RN zikyUJFv(~K_PSRd=dVUi;Dl3Q)DnST%kQ2z_k2i0|F^fkpI~uFZA+nuyFlUguXi`} zj1qzud?(fP(Z6uKg@6Us0KsJW`Q$Pfp8UAoDY-#c^Z66DosHLm6ubqV4z)$&Z2)6r ziX23kS%9W)i)W-h?Agqs)5+yUYHSK$B&W(!*w=~eEsUwSzNzL*L+tM zTwwIjp*=-MsH93@$QI{bjT{?^XB8YBZYP@xa;QK}^
2DhI`_^IAc^h42bKL{P~ zo0|t@w~L&NEbhFI+{d+L%RC1FO5PD_8`qhH&9kp!d*Aya8Kb=}LNs_wr8HD!j8Yf4 zOXikHuvQgHo_HHD?qeswafc#$8;abd$MTDPsFUC*C>2dRxgQjjU0S(;sqg7 zeSh<0`#BUKw{VCVe>3JHMRD?4>3SuW;m)KDAP<=Byvua7;Gv=XjP#mH)cz25g^X{f z^sOr2r@Ed-`nUv?h)0M}Db7`qhPVk_1=|$hu{a^_uWfd`Yi)YF+r9jo`3poz1ilgm zPE$+26#NdIRcC1k@1+*V++;KB2794FaduliAMs)-+C3Kabrz&O2Hou`z|q9V3% z=TObdA1CU>y<2sb7}3?x4CN}4j{Q?}eK_8*b+*8**vE#tbLC|}rqSL+&$z7wgWVJ< zEvKbl2b)ZMV=mm$yGF{D3cD@XQUr?mHVi7gcpvauQ20;+YHUUm@w*S@?Ga(oa!*{)g!a1tm}743k<$#On{QAv5Vb zoTk)76kM0S&9Arf^XAyFQn;Oxjx^u#HW#)}4Jx0MI8} zMmYuddDOmN;-8%WVork)jzA`4|MpXTrc`PX<^%jt({~;d2A|p>l8OWRuN*%*KQgC% zA0kM8y)Ml!=BgPBT*?5claZZ2A9M{mDjAyib^)KUr*v_@SK$XVP4A~GX*Y#dEuJ*D zQJ|kZ(&g<%06f{|e$YGz=KT8QYNZL>KyH)o!yGRP+N2+V+X{=3@YI@Vd7ob>?-3t- z1YP=K7MNdg{D@a7@Dv@g=T2v(SB5vH+d)+PqMLa?=ToLFdg?tJzTcc@nDjMv$YDjK z>42u9n_mb0F42p8d>pZFbd44pFQVA@3vuoH@_#=8z zuAcHDu4|HSrDO0b38^ku_V+c~V3j{xtg-bHmpG6QDGPkHb#FKaMYBdt=e-Lc8THrX z&0HvUu4>sFS7h3K$Reqn%3iG6xd(8sjmBe4c#=2di`O!kRSgg61CyNsznM4Qw4p=K zPL*zKQ=sEbtM9H;?K4T)UuRksn^86a^|#4=D9ci`HcjxJ=F&dEanWZvp+>Rs2He!E z>G5X0c)0v<^{yEJB$2;Rxp9!fo7t_lRmr5^FOo>G|0rjy{|QezLY+)Ksq`t}+jzhI z2CQfrmY@J5TY5ZH`t~HwSD8d7#PhU4aE)4!_-P_qXAbNdqP(g6C)_IuwYeY-e z*%+l6F6P!Fb^U&lPZP)&M9SOxri&7p!0dwVP!(_We3?s@(pD&pAeES}rXK*@w$!ML z88>Ua^BtQ>qOgrLV7va(QtPS8di`4qQJqgKeTkel7Ij+U>Y^TY!1B?R6wPvw-qxLT zW8oKt`>U#P8_p(X{oRAl&(F+wo6lX;l-jf$iZm~!6t5K_y5i{9socTSO~6tj*sUNz zEdsX)pF9|WJj&APd4-ve6i>2}UND%fu)b@Uu*p!J*cz7`QfqJIn|>ufhhIEUC9ygE z_wmo$qhv_*5~WJ=reIr8`a&L3L;UCGvCL>M-FUg9@a9TLzr&&EuEKT~JDyo#V zKN1NQU~m+AbQ^TyovjDYOh`R{YnWu7~lPUN&pQ*#m^OMcS5)S zD7P&gaZmfWaxh}Za~EFn48m>5BdJ7HP;U8$!b!3=+tCX9<4gMOC|$H?ZaW zR%AbOSqU={(u55^Qve%)=YG?(G7F3&jk~Kvu{*EimboK$yx6K=eWt9(zWu3fzQ_2} zmuezg^dHCBFW&dfn7ER0e*Ie%hEluJ*BESBUEj`794eb0bLXiqQTZ89$xs~Yi4L#q z|4@9M)->8WKI`MUsdY#RRy8=0E3E7e(gFD2#e8J8>+hSKFvwa)OoD-F(h|(Nt5kom zy5%V)_V&@5QlS>}ir2ySl^)L{BRGsm@EB;)ut?cY?%R&F0tC-u`Wc+dJ+2ESA|gw= z;E>|8w|2P;lEYW2f6hcs7v_p_jNMO<8XLkT)R5<=e;Ko^E=o8>*M)GSe1#upXL<)o zcB>e28^XJVLPB|0md~zXf0w>MXXG7t6r`G1&pcdCJH$4B==Gc=dwsEGhuAtJ&)YD-Ec_9yV@gCtZhBU8z^IYcADkbls~p|y!O>D ziE8isgKz#-z34TJPoUgIRF|}LC}cmVjN^9ic5yh#{$y!VX6GvRxXMpl(_`v1BB@}O z`hzdy(5Y|HsYc=_A&my^g~>)(U8|bQo$7?Y)Qt%owHaufMRo74hjCDq?scCoHH7)v zZgdJErOSJ$*4bj~c8&F8+bS1h++-rb3)9y{_jVEp`V!G=&SEv8#T>)WGlcfvVvcwt zrSA|k(w?UKC>|Nin&;(P64Q{XQgBGI5?aiumq72>YO<`TL__`O7d;b6crg)a?k>8V zgRbMJ-oH$YAV<~<)fF!moMyRwVW3gDu7G<<&O0@=$ox|%e5Se3 zDMpv0Jt?52=zRNWpe_zyy?Mdp0`p6`O$$`7+AG$zQwuT*<{(Bu2D=+m|qHLp>e9Urqw>3H%CTCLlI0ho7A zTi~dua4Bb`eSjSQCPY}Hhu>rTLlcZT_bZprGpuuIms8$-k!Z!sC`qUL=WaIkWialB z?Z1c7y&m{OIBjb*e$E;GG)lAgM!EH~R-WS0eFiiHd%AVM;Ib-eR%-sHgFi z(kdIMN*Qk%SHcMRx!g+Aei5Z1XlSc;tE46o?CwIQMXz1a`2NWw*QXRWzVw4PL9t_K zllnErZlpvbhmiRqj1rlKVQ6+1%qQ)2wrZA_WB9<+ab};&BSEr&a{cGD`jdlH^fa+L z;!V~bZa}BDbAV{#JmjV9Nj1Okj2=d%uZkUOXKKv zL)4?wXEVd)D0QiCSkH~Bj0bycvOtHUnf++@p2R*vcbV+?iP$KsPq>2D=~c zj^PCR^ESU1-%zUy)*3fb@w)dBE|*6b8sR&UDZyTO=-hLD^`2YjxwMRa3j(;^2hrbjt>YCq-eWA)pw0;*e$ydrr!#gK!}tu!bloc#m$ZCc@JwSL)Kdk%Ob=qKU3S<&dCgvQ`h4-4TJoMcT2 z!h_^Kx)`1bt>=D!BTB6QO2t;Oew0&;B+~}*(Ia@(GJpQ zW?*#V`+&?dBGfjT*$}_=Ml^-OgxHss?1vC#kU?ppKo&b8yV5p9_P)pfQvG zqrz8s?jL^|UXm&Ap1InI3InL}-kZGU;MU*G1JYWvR=W`2`(={^dB@MNo^MuQ9g^iI20T<4D{ z#YQUFsFJhCj5{VsvgpvZg^K%m5R%M*_tKS3IXoEC^V5h6a>C0=LermNjJf=N>Q=O~ zWrnvOdFvVTl#CrbWlRL}d%2Ow^qsdrW5RKpTE;=Mc^$OKtYf<(+d%uo)i-~}j@GD- z$pYe(%mx;ml0MW!zgVCCF%{(26plUVvQ52n%v!&OevxR`teQ#qPjnvH@w$3yUe~|* zJNp+KjNN`xvU!l-pg0uE{}Y;Uk`0gTsNkp$dpWu%y+V;1UpHhA8TzviIq1m>Is0Ya zI36M$2GV%{g6RC%cfe^|N(H|_9|O0;&#)!3NjyiSjCEE&ERk0guE#Wi4w9@B(C7VM zEr6ogRDRp#b`)_@ZI2IX#UXLX(*1 znuHqXzy);p-W!>z^m7G%){jgfI|VaG)({PtWA(hXvL41f%-yZx8p^)N|563r(-uuT zjLS`=Zu1xYj5<=*`7=jsMj^oZUA0R4u}3W!-ns_IWKMp?YRm5;9Fw?ax2OoB+KCe` z(IdaV?{4`cCZ8B}9)>0zK9a>AMBq^yMs5cd)XYzM#oYr@QzJ@=od*@i@yrY6n+YX7 z^)yAt{TeIe%$6DetnAR7KOnc;?u?kbaKJ5J@5y73*nsYijq=i!PXE8#>u{@6(Vf>H zPqECzCKp*wmml`^l+^=CENn>kS80N2&6Wyjo0KFHfYaFjHr1)*#vi3(*KY^nqZyja z%<(4ma1(1ZgdSBXi4IuXKs^jzOmMUtz&X}XEyh{nklGYdb39O%aTlazwQE*a+fq2s z7$TcclJuGt)Lskx`IfH{N;b(v2r_JpY?>9k_A7OF`*WqG9mDn&i`hxhMT8$@jE~cc zlx`F^xpd1)r8r%tnP`_tIB(EXHegG!qlEYrd+wD9Z+$cYW? z48AU^JH{gm4ewF`@qH((NRrS>u$V{EnHV8?yNxY4zAABnR1#qgFkct9R`Yl@`Hq5i!y_Ra6KeZn3kHzE#3F!V|g+GKdS);ZHu`4Wt zt}LEc(ViuQHqHLF>jJe>Imc`ZuA_2wU)GVU`eS=hcr7>kS2z{CNVNaVlXUeKSo!8RMfesU8r)>kj5wPq3 zciH>4d6vzIH@7`~NY|eOr3;buIPEeFl*`ptOQpndV33*5WGj_U#e--Y?m^4vkMM2o z--}pL*;R7frF#I+7FYg9UNV}z2_rlogNkzaYRz=l`7+>6;Omf06__Htsz-ulHGj=^ zr>s8|uMTw228hEgd!u1^YBM@W`-vVcF+(ud@BgSZdB{g`RKWt_DSwteelk2aIs6?c z;6CNQXIN(*aP@uj_$f%WtKY+}3x)e{rVB!=1cy(gCY)I-36ceWM! zIYE5q1+~*%*H9r*&6OFdpb568 z3h-Fxcu+g(6k|MT1{^Gq$#u8@Jf)erdlR$NDrQ6vd4#Ia}lGpT=8l z9($Jt`X#C-jl}#IO;NGP>a~WZKC5?)-aMGtYW1g7F>U2bC~)DRwDzKi0u&vW^}%wk zp^W!!Z=%KoK^Jct#dnRz$9ODjKSAlK&yU~1bceBne0i$2LvfBc@@FSjdewQ?p?Zk; z+9edNTE_LY1IrpTxvH^$;C51P@LRp0+U?H9H-(Wt2scL)QTN%8KRs>#B1P3IgKhTM zK_Cjk?`Yn<&t`lG91hVzTv~8^6kL42lChd`#dcjgs;`89C$f1#_aFkAzBE#2ciaTo zgXYIt=r1pd(7olI*Cou-ECn z>E&o2S%xoaBz|-SX#XWhKM28?zh2X)Kpbo|LiTK( z&U9{Je#Q%#4N0)tqEkZ*J_)7D*m&G;Rz(O$LifO(4aZ})`nc2-E#6q64WgxUmD{De zu?Ax!@=OU)W4sj``x7(Dl=IKiBIgd-HgrCp8`-$t?9LBnaNxLI+J_HdQa}JH^zMn? zN8Q#w29Ys=vE0BGnqF+Q;M~P>Z56HR>ltO&!0nFnm~q~LJ9h96?^jd0unLDC#tuZI(~Ss22lad$k5iNhex3Ik9B!A3bolL}lnA@jNLTDR9A%Ge?%cT>X!^-!D*VZ3 za&6_U#3)9Zow1aXObX7)V(=0_?_iK=@8Tc#9SpVa))rUmlLA$I`r3I<`63?HQk!_y z)|;4@J+P>=F=uP9D4K5^qfSl;(oZ#}6`^dmcfYwlLKQSj+>Q;1&wm6oqRbz4Rcg__$O&d|L#Sm%_A4?uLnVgxerS3=6|M6tNB*Y!=njp%zm~C zsvX;_4z~qIRTBzZ?$06@ZEwtB`DZq_k3elHFoi}K(uN+oG_(UQ7HV!5esb1R4(-Q_ z`)$Pu9?<@k<*OZhA7=^Lp~*_AldAfZe`aFYJ&W8GzoPHl@qa1ZbfY`IX41be$^L5gbd$hB48v zE^>L6jDGtk%2k@@D6$!;aTqM)1q;yV?%UfE-Dfb z21O;nem}>cY1S^XUDWdmMNNm*)IQ29@@O&95#KT??0!MHZ06Bc$|bcrm2c~metFMH znPX5ri^~Cy*uV&oTSM^yL$PMaxs%SG)1H&WQHh>?gsLO&-;2(=ySTZjf2wj^H;ija z7xnO|R5OF6Q{R3q*PFW}Hbq-jYZP}92&i_zf&Tlazh)rRgwh>TLybctJ}gY!M?;)< z-jDDNc8fTrhvxO0-WDX=PVDy!4c3hb3l4TaZ?G;~NnPmoG2B>5#*oIE*9B3$f#a{j z_lMk$!x+SbsDxh*&)dCBcdWnOd4H?lgjjTpvpcvpYMf2L9YMgz#m`3S)~I#hLpr!- z($;|>5Ph`v?uxIl{65OIey$vz9)bCO+4$}=d5w%Z!BlxF-Tm6f{vx&B|NRbX*6+Hi z(WG=JBh(SWQ@k+)X<5YjIq>Li3gTb~c9V|sMUr@r%p=nzhu%EN_<%+xhgkfor0e+I zeSKl5yC33vbSS;TZo-bK-^00R{17E!K_TVoMgG(~*7%NBH2u2vNz@DLh7d9gp(-Kx zP06;u?KAb|P-N=Pf#Zu1f9vyC_5W{@$^M-!}a2hGGu` zYB9ERML>cvo#&~7?!(a0`cL#NJRMIpZ!NT$;wKFG-7A-ZfxgMlET4qi42`VE2~s{b zT|fM?YRtq*Y}9SDADi<2Kc>Dr z9T0Vjg6K(Dz1LVh zETX=5^ZefT^ZIMuJ9qAxGiOfu&Y5$6%CG)fA4Fb@`xpLJNNiyWVk60Wu|_*E4&YmQI0^t-FYz=hAK)*94%=giCIXq$O+KDp=}%}8<6bgiGi zk={}bFASW|#^6!TM+>22+99FJGia(D#z8>*ylT7?A)&6#P=0Tg6rk8elC}Jl*W-ne zu?oV%++YKCB+Mj{b$5pk|mmP(UK_LiHxlnExKn{AsRJq|+Mz-2Jj zDEL5hxE8hNCY=syK!Un}3kF+fntsoD&fQEeJ#V7gry?PX_P1R;%5D6?B|s?|H`uE? zN%$ctqS#joMyI}w?3T*@0uC0RP(Zg;!5F0BQnO^a?`F6=(Wn#+u{Y;9(k|w~aMG_@ ze3mYMJQlalFFj(=xH?=ts1&0rll^#yZh&dFjrZ`*Ca1n0ka zkz&%R4Iol*)!!sjN}0^lKqJ$nPySk;`xl6U3~S$1f&WstXc_MA^~E~W(?iGC-g)S{ za*Z|7tvGQ5XGV2Q$`)sbp4Ft-<}^jZ8%7o&Ikj? zIz^BDKXr*T#94h`^gsU(&=`UfUH5)5^dH0k0PlBvRvei=ul;;^w-w7#mWH3+&oX%E z&u`@}gpdsiGrWwjP=x9eYTf)C<>syw3v65NT>nBk?uN2J*RRk7W|w((twRZ~kk`ec01%XNgDKle%75TX3kqe-`N6>lljwvLL*vKk zAJm{8eaBlK4+XQndi5WK#LWwf+zvVi_{KG$2)au)l$~R&i<9rt{)PVnk!0oDrbi%3 z_P@pwf2TT$I#xhZ>LWg$J^WDGo8mf#f$B_)1%imK00q0l?ojZ2wcokW*&7^RVCcR% zv@I-a9uSw8pKP%Xz-(;#X0LfodoqJCYJMaczxV|@5Q8UghBsJMSp)OgXR<&~j7vp| zMGTdWqU{n&$tDszCIHL4O_lUzagdTq6rd9WF_v_z;OQs1NxKw*7fmNs4{b+7cJ3F0 z(%p?0n1mQSc3o$?;Lc+C>VH_t9NMK5Ms{#T{|tR`Ms45R>q!3bJeuJKjQ3{W2-t=N zTnPW6Ds$+Rd9gQSI9I{kY6XG7EkO`909ngQ;X4-{q{RV&{l|ss9w?SGyh!#0$ZUpY zq*jw4B+t9|Pd$f+@@YEyyU0WD@G%L@zOS9Y`x8F7?~y6JHVXGgCR_pW7t*MIBjRp= z`s&|$NpFK=RNel7O&%&@Z&3SSe4YlN*=Meoa~vf-WTGHTxJAO0*g=2Rf{G$^dd62* zW*tYOs-NDWjj}=CHwyq2-@mx`N*fS;D<+*WZtV;cY58PHtmlTZu+Qj>kpaw;QHjF|}RWp72{8JFH_ znMDDfa)kmAuOR`xHs~?DL*51@3B`FAkP(jx=WK#$ioiy10;LN7fLW~#q*(d<0>$mm zEWavamVf=t5V_{N{_eSfFpm8Hjay2UR!0e^f2Svf{_N_R<7rYqBD;=xSMwf;8ZeMo zdW;^D2%`|-1{xC#4J8lX$Ubb5!u|QjYY))A*Ix#^peo> z1O;xNN$XO$NjLt30ez`#ZA7k5w^=8U?WCgw1d7e_%?@ZX=}6(`NO0WMoy!zglKxQO zU{+d8N+PtiwExBca>DKHQ1OK{a%v93-&k$?XXH zc_$Sf26qhc=y^$BP3^5)RTo>xL86c^941@o z*os6FH9{GaaKd2~;%ozNb(g`}RrRNNse9){_*n#HJvW){O%~P&I~DEn{$5tOa^N;Y zN9j!TJ*U2??-BdFxFX8z^S_g#!dvn|9Ri5WYnOvxZayWGzqb7KY5do}eqD=Z6hOV) zJS>ee)GzC8ppG}sxJD7#f)Id2l5Jvv_(ZuhjSAX>TfjR3%$k2MQ&4_9xUeY>y4iEm&e1N2=(VWW|EWiJ2{ zA#?(xs}x|p6Uyu%cOtHn3M^)*$v02xA8OqJ_HU*gx1?sm0pL&vW&&hNGgj;`S(kXy z^Qc#23Saor16zOIHqC&|kM9^BxKxy}Vv>JDz=;CtRUY1B!A)FrlN4xHsinI)MH=Bq z#3~=I$9GpgCH7^u?`QqXH@QLH#XjlN0FwNMzIVKQ{5Lhj+1Ix33!=>A)BcWy#DQyoBPfk$VYZJ8?X!g4@qg~fCKa6aMl$!7#U!0EimW7 z3d{tA4@z#|Ld1Ql04XI;{+>_)0Mk!+9O634C~HAO%hawC@SF@#iZ7hg1AP{tgz?KA zJ2Ie=Ew{e#v8neU8OYVnKyc}BsPUyNyrSzN144(lRLSd1$Z*5@A5!?Q*~FNEHYQ5l zy3H5}EwRZTlswY@N5MCPxtAaXj-4t+8rN>I-jSUI>9o|nJWCF2#j-o_uyDk;fUKG*<^UP6b}ZL-g^0lr%7u`eaLa^oyhV17s>1rVy-?tuJvqeZAAFv(zD zrk6yq*d;9%u*;b3%Tfy!m%_D}m%^~%!2TQUR+&dk=)NjYO34a#*_DZliV+9nzE@ zP|&}>&H~_`eCA1#13Z``vxV&V$@NYpV+^m882((~n!EzAl&A&red&gwzV@CF=(_^u zv!N#ElCIj7A8ORt)D0D!i)R7=!02{WjiB8#);*qW$(Tz0hdol@#oM3hHJ~14=|!O- zsxa1ntq?r=F8qP9&r{S)E^G*q+OUk>0KbU1Al}P+?=Hle)C%|G>xNTVvlcxN<4d_^ph8~M*v{i{LR|E?=6!nGK>dJ2R2BEoM*C;>JMiN=Z^E*SQRm(3~^YSj!r=1<-;LlLy=Fj7q|6T=b@fmohfLmJNWt_(q z73p|IBc2uxYEmnKTPnvZ3&{W7lz5w6HP8w9H;e=3qKD*0`8>>{x_g#fHzi+_nlHxJ zIlM7X|InbWkZ#8EkA&lM?zx(MEs};jH~tM|M$lW+kQl4BwCu7`7qX%ogq&_*mOolw zkx(6~Zvcli!+0_B);vXw@WrS@$B?OpHx|^@DVorFEC`u1r@2Z$-7$5RM?W_Tb8Wx8NX0Dx%cm1OdE<_`3=aAB>xZ4ZEXkho zhOj^G7WVm2lu+Rw#J*dC4GX(=mmPvWHDcO!c_qdlMbKEtU%{u(pc@5Y6gup{?a*E; zmV{gO5qR@5Ir%CcKN-&U4Q3XISRLx`2LZy2PSX4HTjBng4ENw9wlFMcS3_N|AFH)b zA8k0KSWE=*Yo1hHnW31(`&;&F+rxS^tk9~)1b2FCtiGHSFpS#DpphvN6^nri;! zJv&KXU;NR4a-BDw&8-yZp=~rR@h4OqgFBh~3l8r<7The2HBjGeR@j)4!BC3Sh19@r ztW;^tL-+Kg<${@QL>$HijbNj4Ps~IEq-o?Z+`Vhmxyu}luvvLk%@0Q2# z4Jb(-^B>D}lGOM^;rucDo06}!;YpKa#k@hA#1L;8$*|fwUv#fe-J?Ize>_>d>}Gax z3F_^pRE;Tb1h_HNv_3?|S_kdXZP1jcrkR9XJ44Rv<+!K5c;zkY9cjPlaB^mI($Ve3 ziLb*t;N!%r9T9?ibzD5_S}qI7#`uhyU*K2ECZ=5oV&1&BdLP$G?6;aT2VFb{==jZ*)TAf zI(@#U4-U6)K+*qz2Z_sQ10SuN{x+UQdZ595hZ(aNTwx!NxSUk|&yLbP#6n@;Qx?!S z81b8=87VvzJ1lf@^ctJ6|7x=AWrNIllqQ7O$#~_OIX{#cv~R`7ut6CAXSkIz2!?fa z@dgr&i18>(woX9Jz-#)Q9$x>aOX5$)+Jc&^JgUjnm~?EFC2ns%(qHC@M%=X$DHB>3 z-P9y#`hr3o>L@tIgL8_JU_nRAtol+a5z zgCx4^@4_|}1gho64LXohn<`_Y+8RjK`X#rV0o6K0e&wh7f-hdCoqk$WxBBl0PiaH% zdq=xXN|o;#VF?)uKMr3I2|*`sa&{+@ZAS_`&u)!q!>2h>0}0~AuirejA%g{!?5w%qhpCCIo?YJ16}y;P-M;}!d{1A zu5mK={!2e+O!GV46zA%&?rEh0M<3$H`02n7<-|6*@#3F}Sk9`pJjdGFuj{L>X87BK z&nIhYrh~8pYrgvem&}RKIr2a*Q=gsxF(CqU2)fyazQPwQ1of6b3Ea$P>ld5E?`ons zHG9V8-M9s^FTbBoD*9FVEj+g=cId|Uy*)j&xfe$6@~Benx(8x{6%JoS=dHq_i<8d? zil^QfdBtYTlobp7tI;Ui^9I!QEl>95HfPC%x-oUCEli;1S1yt^lLEb-XTa8qI_e2m zObhnNTNj4XK@;dT(5cwo%)S6cjp<-$!jyMz;zaR|{_)@J`2c4GYN8JHpipK@rq*!S zT}s)w@v4B2RBuleAr^{8pZ>L6flAR<)M0V5jdBjH z=vy1V(Rrr!-zvR-BLzBkYdFMCA%X%0QM3)S&?negkte-iL zjr(?RzMnKk$624-(j&>7c5U7ATQXeS0>LkIfS_GMRP+Xu2~wX{IG!qdrrZE~#v|;_ zMKe}U_T4J>Pi-}Va%|>lLVTua!Gn z&16JCFSI)Gq1JyNK}Y|NzExB9N=NiMN0 ze3V%qZRhv*5Ko3JmVvXMX0T z*TZR!MX_PaBTQ=@GCD>f@i}9KYrUhQ1tvZi@s9@i#xVST97p@R+8LSt-AYC2(xO8x z%H{DQT@9+`z>&<|y`pRW7Sv%fw{QjS2thv3bz!~lD^#V>U9;*Gmv>rz)_&SqKVjzN zUyU7C_J&>c>U%UP$bxya5VY>#x!98DS3zbGG71-J{u9_Hh=;5b-bFfsGjw3q^i=AR z3k-{V!Y=n@*l`kn@La{^m+7iNfTTerzg3m~a_MX7$e|h|-ijD^yn@Va&pIkhVwi{X zy;WLj&URepa3or08GFTt=v=EQ%MEBf*aKI;juish(7clBJw{Vay@3=pU*Aoi_lg-x zr2E3S@%1p}rCU&ksUG|hS;e+=aeNpjQr-u1)Qf9Cpmc%*Q&3PrpEh7=5%%3NveT7NKNqtLm2bT6o?CTQZ9|H2%-F#l+oBR&DGyRd zO;FAd0 z%5?;6yf)j_AVg4?7kj2#i~NMz3SuX!S@c3Kg~}h~nL*N&fuy(96Oe{DpP~m|U81UZ zCA4L}Mrv<}1@_QGN5*-g-}}Q_H(a~gIj`_^afe0ZH;?xEp1ge`*5u4Xp!fN4=a1qs zi=O$%9(6uy)3*82VHZ8UN>)X!q=YK%$2d#O_frW4-(sSk>FRoKP6b-|e1v-yAH@?* zeGt!qgQFti-SCJAKM;_ak$1;jlp~Vi_`XsvzpOF4>63OSpSZ0@eG)}oZ4PIEI_5~8 z2-Vj(f3oR)eejn|bi=_e*|ydKuHbTp=^68NaZ$tDtQm5;Xz$Erz1CDNKYj3Qo#N3t`;utc%=;eHn7ozZX$C)6%U!2DzQ#qF{KXNEjy}fcV-D%ZA**HIFFA>(F zIn6IW*$&-nvsaRZTHl4|Xsu?tn8D{h^z;X_eIJSL@XHP7^ zFVFt<^wDgqe+C>X-Fsgwe>1gt_dlEd&zxB<3Z&mD;4IdlfKoc|5mXb`&?Ji;+03+cD*t)wa({J_GfQ}o^qvCuaAa)l`eBIB z_zWL#EVkO!7(K*g3}TFpys&f{3t>`_rTdIcw~FZ%$^Qrc^^Nr0+^qQNZ83{(6o3s_ zF*}pmxY`U$c6Tc_Mfj{<+PF$`SPpsPUbcX@W>EJ$wPRC7jJl$G)|JLt!e+mTyOZo0 z0Blj&TRO>9$=MH?UtDA6Cb_<5ai? z#HtZC9FG&;oozPjtf8-^O^>#O)d*D|ZC$zKYgEh-e|1s99iM9qA~FCD;Uv=HYmpLL z-5_q*TJZtmzP*3^@RuBvDcRN-3Kg;Tc`)ng?xpF2FN|4XYl1qZ&VMG=;4wr;kAuh$qtFcyJc1K{%?@YLsGrcx47V7=H*DgCOuSw>WDL>#<1wSk%3L0kEaYdmSqE;WmO|Z3-MAFv8=;3*)Xq z>6E!&Q}45%fasb{se&B=yj^%O|2sW%q!zZJGi`*ZtH%)|=Ve|y$1P{nDo=FcAuNlv z&L%!lb$i*OUCf%plQQlQ?&T~^_karJ6$87?6=$mq{5f-*@%2Ny#c>VG6eP647L1s{ zzK!b4f=1z9(%PBDCa?I@Iq73e46~v684N%OU%kRq1df%_$gXgVNR?iz=x@+4Iaj&Ml4V-@j02{z4Boa~bxL3@HbWQ* z+sapB;xOxMKCz0tu=r^e4-gdqaNX`ih>sNdoZUQtnf^JN8p1A+C`#Zm8 z_cj)qt5#bFUDpo&rMdC?i)WMZFk{IR{pvATab-l*YSYS}H-iT7*P&H@>y==V*RPL} zb9QxOc$I%h4$%IYGzQG$9-TNDWRnrHLNSB^U0|YQqW1SNerY}KuO50RZeVjGZ>UZt zAU6X26(>CJ@oW{U0?DTyI!BqT@8a{M1FHS^6mq?uOC2;~X^!cyNa1qAgHXi!&{p{=?pWzLRnPQIcd6z}= z!-^@jj)byFE2sW#=wnldnFzUa7cV<&Jn;LuV=3h0#rq#LC7x{-goo3oi2Mx>y!$OqGL zm5L+)WQ&CET9olrt~#a})efl-5x%#dAt$T zt;V8;-kHI5`CyFoQD6^1dpxSd2jl+HyVG2w+~tqHt9uTEd3=0|nvE~0`S#xX>AxpT zZ+yMEo_(qcLxJZ2OY|XNsx{vRvd}!_3aN*KkXq!FeG6gjcCiA6!haJzr0iCF-PwkK zFe9#pxcaOZzc9b@L)*k*%4MeRB`m5B-tglywLQW1M7UN?dmZ_=eV)_}?yMU!+UUpD zH-&!yU@x4EIAMew=s7%_+(lr+zaHoBbwO%fm=T`t#5ME=myg%Y<#hPr<2vS{E8w0A zsNlgrt1dj-Bz{m#_g5)XIG3P*(VQft*n(b94J%cXfEumc*nKGs!|2sv52}ZS#^rq= z7-d$QjB3`emG(*Z z>OM@DWf5M?@GSK`nt5qt6I^UN`Pgd;i#jybrwSvA>Zj{8enw@)-D2oSM2$zLP?8wQ zZjNqDqqCIafFd8)w_o?Fw~9kLcs?KDM-9mh77$7{jL{pODfd0bHRVgjETVKxi_)UZ zUV@)H(8G>NK0DDk*?ca)1+-U|=_Hd!{z|$9GsA zcboGjY7yx?zTP_4VWAZdMcY;d{rYfpJZNS)zCKm%bX1^XH7;7haNoJk=B%q~zvD<< zhFGS;{2lTRjD-m8l8HVI%-I z^G>t1OH#Y3u735DqJUdD;D;^FxcJv_#t>*K9en)$X*_jz;`#)>iTXd>)Pn@Wf=Ho6 zADEFvQUd=hbDi2h+_qk8W6az-!L<9kck|c^f)ucI;gcjti*t(;*^SU&j}Afvbhb}G zk7OrU!a)63gL3VJQ2b~2g`(}S?r#W8(QNC0fT0Z9VhN z@xl2oVbgJ_GN%6w@h5_mq9@I$q~`U#V%~@}px(t5-)?j@ z`_R$zV1={~XzDKY=)a-9i~s9~f_NxDaeU=498w&u3_4#1Xm@f_hf~*k6{^=SeKLHo z6BTq)66ljv^%6$fM*{$fvItI7{3QG89#^!eY1B}G$bZ8)e(Uc6F@L|Z&Go9-!3$wt zC^7+3?r>e-y`A)!c{37Ic-1^GN09P-Niem?mUgYle^i>oP)oDNu4+CNnowoZF$2(1 zBDs*_3XaPjF(MaygMjxfKga;K)zf#ygI+i83xm7PApe9fWr7nUe%p;$s8$tW7B)<6 zdHrE61ErK%?!>{yFv#OX6hFu|OQXR3t=J9(RMXkl51@LUj(RdKNkn|8K3s_ifK zWX#z)k0eU8vvfpb{J=&jXR6uRJIWU(2fK7b4*%nuR}_gTN#YOW5VwyZN_*hiAKWU1 zI9-Bz@acUU^oMcftKUc*l;bJ7KnBn~xt&=ZC51`JlV{rg1JFIv3(3=wXB#Nbpc`xJ zrvOJt@;eW12Tsl|RjlFj817qbk(#-$B-_gUApBH%Jc0LFL_W`>2gJprB#-$5yqqHl z3xt$JYRijsdV8u_4u#kH&w-ryj8X=7+}Hu!r=k?jpZ$gA+9I%M@dXOHH}R2cqRMt{ z{mP^Pq`D)@E`ZGVIMjK3IlybVlOvMURaJIygE8pYI{(M>+rcP09xX zfA#4b@|Q$NH*w^dqZ<)+VOU}bCXdqjh`1_n^if>uo-@- zlerCh9Ij3CH2@q>xQfwLa)3Qzu%S#Ee-t>oYbkjs;Z5K3G-3?6>kDxnOtzm;WZ-tK<;7nyD*~84)uP?(o2WJty~45d zcF(B(6FRz+T$IEJ%HF;HYu;nwtimI5_P0;1yN#1liqAK2fZ8z2pAVK_1$%Poh#kbq z!=cwm{jZJMOGW;-7y3-yAA@vMJFTQQo;!TdFKoA}{&lX6jNMkWpddT#5g7N819WdZ z(mhR3gT_Kjzcl3L^&Ogx`)e3Vf-Hceh zYknp4*_MhM6Mx4q4FS+H0nMXN@bYON{i1m$Kd1>AhJ^K4tJs z#z%>(U$eHg+!0dQCCBw{v@@ngy@r_k@lMENWVvCqRchkj5Te zy_y(*fnRrmBcoVwuD5bygED#1b?xz2bD113kjJ=$S1D|Nmk|MOV}X@>+v{Nn)~P$tUF8M`UPV=+r`WQz@jCKeiJ6t znYVlOgu`25eUQ#x%(QmjpaggMRTCeK#og;#ZP)!j@7^9Vwfwmfcww=&e=xYtwgA7| z<(3|4ul%WduF2nlqMyU9j#BFbyQ)rh?+3=6Yy&7y zFf%b_Sq)6?Ec`8wqGO^OAGU5%%DlIbY+KAr8hd+3W#W0lj-vt;mC-uR2uvlQ90)7Q zfq=cwjb{%i8sGIsnImXSsITyE;Mr>`oSSvm)wqh?dDMZKp!3#XNi%5t^JVWoHg@7+mJNE#xnUYQjIY(pcfv<>$EZf#i-yjo*g(0%4lv3UMoV_Oe z1fpw2b~_`u`Y)YLSq7Wbm5HIhdNukIa(&1NKDoPoz3`A5|drQCPsR zn&VaRXPKcbEK-{$8oERwHa37=2b~RTj3&16Px;Yvgm%8XX7^@^H;DFQXP5I8*QKmL z_*CW){VNn8eod@6t)_gAo*H;F`@u=}!u0!&;PVH4qkk(M zh|6l>GHp9u_`rZIiS(K;UG18aI-VwEw(WPz{63+?%9d+TV-ZqOG0ah|B&&mz(VQva-UfQ;AX?9+%7^7VS$?nXu_z5BT+sPO*X z&oQo3>CVset6$<#MvfL8K^crBrqrxoPKi1*<_frA_@zCBGpH!#YU)()fTMxhNYjOo zwDpgvTunyk$7yfHtRC%i96(*FIL3HgEv93p90;ExQ<}RKoE#BFYgn&jL)ZF<^Jedq zb=n~w{$@hrjnNrXE6ABjoXksOnjH6=d;SxVDV((=F`Jc@g)mn-?0(mO6yEWi{@9D* zwdX^0%kRXLMoDhXhcm+J{qa&VX5&^zs8*t0Qae+i4B3q@@m3eyiQQXA zT=~q36c!J>vY;Ws47^9+ITM{J{lF<+!QR08D?e*8HKv{z{cwScp(yFjaFAu z0wRJ<0&=RVV^pGiw8ua28Mw4Cj(vgD&5#{`-l6;+BvG}H_I9&nJrTv1((E&XLa}Cn z)ZaOxG|Hh}mxt!D9@5yy1IpU(CH}Sy%xV7|;8glB_sYaeaSd`1p&MG~^1@;jf`}n5 zO-Fph%RZ@A7}53~8nR)ojL4+?@u|)bbiE;UAA-TU9< zxBr1ja1`p^Cumo*CYxx(fJj>iAmANATW7EF`6W{$ZG{2yuPs&}g2K|Uf%79C^)s=s zPA&JbsOAI#cSJcuZu%C|sX+=dn$~&q6sE^k9CZm`S=ud$=ZZN0uY%7M@t*mD()}rD z;N}AqFl@A-taF>REb1X+Ktw4ph9pX|%&KMXX#eN!Q?;gbA3a{0T~ zcdF(;Z3hM}BH%%XE3Q|X+&>rK9*H{8S_pioG@AUxLH^^7#;%Y%5nHg7nz^pJR}TPD z=8!nX%+Smabf;vx%(?s{3wn}X8#EFoq2}J-5WKLaNItoq1N$8?lH+VJ5$>KP64-;t zt2EbsQb!?5+K_56AXLq_NLNQgF-iZCve;>XV)t3lD^?9(GN63`#k))ML3MB>>M5fs zoXWD}^Aec?PqA`@^(9?F7z<=~=XtpMa!S@{cFi@iG$N9V?qvI#+>`p3s!@Sao=E-| zUi~w`mRN%c|7CC8TQHW+XANU!ROM%-0s=F@+#x~_UvY!b6deb>=1;J- zzQyBK0{+1fWszCzw)FjTT#A=fRdxW}a@kE8C-11F(a0T z`qfi+JN&1F4K7vE0^GtR3+{rbf5!uErlJkd!lugYR;&7f2WhhZ8iqL;v zvBLx?8KQY$wgO6>_@6tMH^0ye<3!#JQP9bL8_I+2t8{Z>aK8aV z+*^@)BV7b2lDim$u!j9ay&=*V4X2cI&ewAKN$BN)Iqs1jzgF`7V{wZ23^vN#@VS=S z{0&YZ=f2oo%O?`9U(l78(#i~#)N_z%v6HHRXxA=DQTs+c|916L%>9(ThnBk|zOZG} zQG<+adgorifAHB!C%g6Pyu+6##)aRm1s4iysR-WTSRgacoT$s53+FcYcG~C=iacD7 zQCuJ#BoW$wWQfxyf;2Mj60YlAE&P5li2S+KOCKZci&76zDy|gu#FZ<~%nMev$XoMR z8xT{JlO8vHWU8Hi-M*D}OT7engFxmkbw4u{ch#w5D7ygMY z_iE!+m(*}_@)xbN;(6vG)0xGTNyu$fc>qXrePMSGaE(4Y&p4zV0! z%UWyDP4ah494y7kw%EF?Ngyfc6b(OKlHPv`1_FCDf>c#Bc{V3nmtLrnp7;blQLXf0 zbB{(`wLWWT-ABWK&Ejb#t@y#yEYnD>T$l#cZv={u;$z?GD-%rqtx3{2&VIM~MEv@- z&;OPij#fvSfcS|C{jpZG(eUW?r6W&RcN)W45PFe}A9u zj2IB~nZ3T#2^moAzmS|p{_zbnJy!USg*##le+{fsKsYEmq=n^=)jT257kB9TZqV1Y z$f9WH)!Y@vTyN-l5{%hWp`k7rDR`5VTi*sxpK`??O}bL>PDmxvU;kbh?m=FmE!K{?P~n#5&L%t%wI`g+*ap(#@qVWRy5 zsro|*ci`_8cM2sH`v9w?kY}SFlTB3y+yEzjtH~eIV8*!RpBd`=A)Tu*%dXYo3HSiz zphpi))tgUrKZHTHB`whh+s;+r0Chw#kt0aEWGH(FqeS+K^Gk`<5`I7XJeYX;;pFl* zKq}dSaB~SPipQon3iGAyHV2WWX8Jc|9>KL?Uc{h0_~^mkr3Yc1gKet7bl)@bpDLtU z4!Pc5|ISE}feC-4e~_(HlZHd4}m3~55hkKEft{kx4%%u%xWfxQ?S}Vow zn%=S8ZrLprdJRwD2yiq#T;l(ufE@}H!$+MOlx$cWgeuBM$YH((o#3vbg6Fz0N$<#y zAz(Z`Ch-HD+UUZ;&rx|om;5DK{ur4;c(vlc^?Ag4_?GFjJ!o$oavAc6D578%znLu~ zd`lvF2fwf8Z@R32_}0#qF>YbRn=!Cp4Je7bMT1}S6;r=npvL|9wxO5V_2|8*r(d&0 zamEI-CBEZ?aQ@=>fVUe}DD@g4W=<9n^gkw35uVJwO(k9cr_qS-mB?Axy@N0V@P+es zuB9mAeq`Ru-}ZK+)`L_B09cghu@!sK|9y~QW7WCcB2>DlbWm4wSJ?qV-z3!to=bT6 zUU!eiXt@OKvz9VYcH;%h46izIZRl#VB{)YUE#P6<`T4zov(d9Mr!x`Q|#44Y?%XJsBOwVKe zGJxtoK&^NrCLGScXu4hwYYc9@1}Z{suRE?xpQy+yl5*ohW?;Uc0fwb~I z1lhWUw>-Ml=}<}gvKSW9aMfYwXm0C`h`!R%?tU%$S24J_4z!@XpX;t{@RDj@^v$9B zRPN5wDuNsuwLUplG!?WE2R~_ zjyd-UwR7=f1*-PU1`0As)nKH7%8}!vG-JTS5-6dX#x*lJhz^Qo4uGwbdm~7l&SMI^vU` zGRQnY2omFW9-8}x=9WgP(9b^nACpIou-W`3iyk+Z16zLy);rcBsdwL$*|VS=ZQlqF zSsbFG4`M`qiW&-I8xRrP zyLQLRD5UCayDZ%y$H|%5+3z#ZKm0Sj;iDA=`oP`(=W*qNJutRFr4cx>`2G50#y0P> z0by0+H{9GCgrcu|B@|rKR{7urhrGcTh7w&Av>qwDhFs=&quok>lW?|6INPUDS0r|K zWf}?e2yt_fS*|z{T{^$}Ka1?T0v?AgnSgl3-HvA1h~WMX;dlg|Ad(YEF3>P3`}LmX zlrGv2cH!l{g@oegkj~&QS-qdm@F@HldIzkS-c)A%Q1_^nA6%T>cFxt05ZKYp3=d=Q zY?lP_jFA)0;AMZZ+apLa#YKEpd$IMnQ&qRCxVxn(PjIa?Bj`0zrf{k6zgf>*6+^c} z+N6nM$NW)x`7<9>$i(j8EUr&FujhR?U^L1@ur#q0zo|j3eyI@^99tKeM_&43K)E{5 z)Y|>pSm4%BL&T!H?S`^YSwjL!M~`k6Jaq3CH-^8NAii=?mYwH?b zw!JcZ%5v7xi&C*dscVuSy%_7J{GDZI&}!f@N2KS@j@9U1;SYP~@x3wdqcid2m_QYb`k98vqbgA>(lQH*1Ay+5yZ%i3gAy-U0 zPoPK@_;);VsAUz3xfYqla{_BBYKlK-9H0xHOK>x*>zD^i8-eOE@+>kfZQmGlpifzw zn7;_4E-;mLSI31}5p^35`<2MX&#o4vYre8Izto~CaOJwErs$u;<#5jQ$wXc;LYiU>Tco3AsymUY zrF;g~JSfi8&bh;wp}&MBIf2Q>SOYB%ebNfmfgXK@r*w5suKJg5!?>p=pc}2GLu?fL znfmu8m+PFevcU*vUl4FKO^LO`%9IluF6{YHpCMU51<~jnDcgH{Og1QoqcOS@;JW9{ z;y$<@Jyg(J<)UU-76}&VC+gA4tZH=>L)p@oV1LkK-{9$)>p8bSJka=jQJ!JiUsXJN zgJ~wLL}*CsPk57WgioII=voZIUC8umPq1JSh2*@_Qd?ThpHJMEC$ zk#L+w50`eAtSp*On#Z^kljts*=EF~X$ZikAi$7P(2M^04M-TMTCTx-5kngD1pmydL z&Qk9v0ga$dY)k@`tv1H{s@dm&9+?D30Yn*w5_~2>o+#HKCM@C z>E`Ry$~0AU6}b;sb4=8tmeEk3Mn#FSY=LN>AuPg@^u%&Bcz_2?J&gf|4P0TgV zmCA6NrmP$Bq%qhi#&TXsJf2hnb99G9ZRCYrsc^k_5D+YisGdAEIN!LR_7bN2swP}n=_=aPe0sO+Jj0Fc_`EO6^H@9xblU%!iusotA`$0D^!!+8T7|2dtBZXGcWoRR$MU*;$WIP3_wjH z>W$&~yjQt0$xG1*Uxt7FC}gn3L(~gd5s96ETv35=UQ;6KCj=%8pBQtZc>+(DzIZk~ z)o|-pgrq6E$WuBp)%&XCL1)WOOi%T;86tM8`_e+{x>gMn%jxg(GN26h9buEw9rpV0 z^>==!7w}#QWm<=s4J?}@wf(&EwLQqScsG{tCh-uxv5`MCO!p1U z=uPVKqpa$3`W%X>mQZ+p-R!!|v*02$E2G0N`B_tIEpL91IR}{vi`^LEzKajvx(z;q@4CDeReDjN;C>eW)_Zeg}?-& zJ!JQrq%`jSvPzc_Kgy}<@Y4PpU;<`2`)l!z%R9ofO?gAk?gb}@yqH#-I|=%VaRG9C z3>fhE;H1SSv8&apQQYjN499k2^$3G?HFib+jphaXqN=%b3iGF?VJ!VVr?D#BN0r+e zfXxw79bs30cAmDn>0QJyt|Dx+!~J}Iog`<@R7%`WX}Y)mXfwSJsEU9R2UR=+%$l2G zP>zLirih<=4b1hPRKIDkJL310PQA%x`G$G~ea!tvtz7f`%<+-+oZs8hLNOVlJ7Xa( zx5>+OlK~vQ=5T2YkDq-+ye-Hy|IDm&L@?X>8XxAxmud+SN&+781eNu3e(T5gt|=MT z0wNq9lQuRqfG7wsds#CUHh&+re8;#^kujX$psR&hwc&{EW`}<1Y}aZM^=`F|@w}go zO(f#G?~%#pukik^|7}xa&2cZ3%Z=DeAV_vj_d%icQYUJA6nWP7IRB!N%!muClMtuB ze&XW1rL19nK%R}`-7eQ;JBZrg-OSqc6QSdQLMTu3slj1S;C$)S&|OB`&4}f!zFW}} zJDW1Px5~URV$k$`ReIHXC+8&vhV+c;kCt4w_s73TFW~Wht;kzVO*8dW@)vsQ*Fy^d zz@;7UBTtbeQ-~}fxK#%e3+vAZ%{weM`f{O)N}%avU8$@q9w}gU4b2X9ilXQ49n`P$ob{}r8^mTE0pfhX`<=zbA^%y(wi$@9%uwE#xSC z^k&~3>1U^DPwxbCTZv;Su*qP7JuS0)_Sdf;PX=gRZek2UANqqzu-kMuU^RO2`VTkl z80wel-aET{5>c;uqw78IApK3g`F=X7Jx#w%rnX7a>MJ!_%+1<`=Y7zsAeNMZy&f>l zVU=9Bu=YE8n-+mi$Drm0N#Ut?Ux*LHK2MWTZp>0WTRjq4v(|KBBb2Nu-m_LFS-n~N zDNdr9@VrO{A9NaDmi9jHYVPUX8y%$>AG=2n9F?NPY=ls<5zx+6rl>4lI--kUmr)wv zb#P~(OOSO>I!?xHTLNQ$>X`U7JxX?V#f#4kP`$)6rmpbt1oUMp%`NE#5p^_k9qO(k zl_OFMA65tux|5uUD`PR#;?2FMe!5AtqF-fqS9t(S#U5gQI_6tUD)TTQ*Dk`<2YH$T zFE4?$m}v1@ll#xQaVXhnsLIk_Mm{XPU)PBQ8Aoot&Gyi>->M>W_jLJ^(SsmahG^>E zmHdpTW0d~na+@U4;|Z)2GbKL;ow+Bse2plPT5gMzNfI^l8mT=FzJC&vdfboK8`<#d zt*HJeyV^q?75g0%hl6JS4kJ!^9fyW6OOuTuzhuk)Aai41r>WVjpR}`EdBfoxoBU9? zbijbfW5s49vL<`&vHda5D-0Vq$GglM=3W(>iw7Zp-f?+HcFrewzB=RO=IveMl8oCv z7`@<)3GW^s#B8n4Vto!D5)wHny`#9SDz*UwR6r}=PkuE$ws#LK>V9>afcp-Q1*gAN z=Ik2SEm1}$NqgYA z^|`;gRU_$>jUqcSlccZ>fTy+`Hivq$2E(MDj4<6W80^&pw~gh)ROQD`>m5wVFR|My zrt^DMXv0F!4e};Gx7<+~6l5g+l3h4RcSi#8)z^_@musQ405=m+sm~F+2 zbqXfeL;8VmuPHU9xaSi)$GgOc7?Q;a#0wmd=G(E>WdRSAqb>W*ile&D?|{t6o~ zT@PV3NM0JFTlZllt0Vv1y-DiR@-!vt1o})0?=M&E%=-IiBaWErs zINf-w)@`dXF&egJb-B(spLh|+5zs(ka`1OEfDha&^YMj*3wf92E2MjX28DmA1Cd&u z(IEz{9-}2_hnQ5uJbfv-kU)Cfznl+DbAP9qeT6X|{{(Qe3MXDhp$%E@$M?f6 zppVTy!1?lh#pb6E`ggb!KP}k|oVt)$X+znoSQ5!J*=3rT0 z7dr$;UlK~k?XHe|Qw!vk_%9`KT#+dx`E`jpzD;e0DlZ9*jOpz_x&uu(Xz=0YWnZ9f zf`wg%DnqH0jkBf1#H1!}iI=;hbvP(1%`*M?iynCx4)@!pC5QrJgRn-1D2{y?@#>?u zQ4qY?qDOmse}|>580hwRHr2tIBsMwv+d#mMGS-wL8yPv>XIMXK19|MU{T3ITN;aZ+ zVHoqZXAs0`IP#yD+hgTz|Hw5#`#nRX*AU-kb6yXE2n&;og*doVL%?Qh!vi<}UN&+V zySnQr=!IO;_;NyTMPPU=);aq0jgifP z>!Sn`0iIw`XZ@d9uIM{#wUIMqfWD6+o)Z|+y&{NmysD7e0IBZq>l(L5JS-BJu)PBu zfWO%QrRLCBX-cVO*M*O6xKD?+{s1RJPYq)*W5L~BxLQv?6R#!#+56{+^drbLQG2F2 z7Z(NJdMfJ|0GyS)VmDOX!#5NvdjuXIg-&A&;R`tcM}3GLKJjhf^m%tVD@e4PJVaR* z1rlx8g|!yGi5O}f>P#{=UH@wTund@!-tReqV_7!|ys&bDL0LL30XW~;j@_>XfN%^z z6x+Cj@AP{gR4srh_X3AQ9CkD6R6Eq)gLUaVc`>h!QhaNDr3C0a1%U=sXcQPbNZo_@ zmpCxUTf~sd>rIG2?w8n%QGWsVCvR)b$jB)Fx;RShJ4{`ovG!Tto2vs#Tn{4w2ENhp z3`_63KYZb%r?OLKn&sQ$&q|smru~6)Dxh9#(tPc{Phq;4r*9*LP%~k@i5_YumE-iGUN1_3g)6%}B7$O21}y;j zCRSYG$^uJDvPjemTr&bqoE~I0`tf#o1~IUUUW5*AivcgK3_(yCQ34=A1ai(gg%6~P96fEdqMO4L!qdw|yRKcTI zs{ZtXMUJBG`Egdj^Okk%eCWFK(rk#zJsKVVN&T4ydnQJ1xf)HZOX5K5^zdz`)v_ zQ>~&yCu4KsPUSdAE*x!D?Y*7fV&T3zakz?c(q-t{g2pQa(bA

a@s{PF5LiM~Zls zo|S*1Quoi`F@h4c>30CmUvd~DS87>xT3yJa#vyf3-9xnF9ZZO*2-t?|24gTu<}?Uo;ZMmPfv(_O^9k8S&%;Ole`22W(C6g zIToYU1eNI*#+#92K#U7^G&zHx_Y1zgIDP7BM=v3MO@(DVOG`NW_7Q}!S7VKl4AZ`A zQ#0W&mu#D#H@-*PC0_==Z;s4Iwh8Kjtui}(lBR4yZhun)&*5*l7tjWZw~c!EE{qoc z?6y;Luw6Ksg%iHX@>6HEP|i|o;pC%))!YeT1`D90=`=xu2RGapi9r%k4Jn`e3w&X0UKXDm$x*q98yu5h(EnW^oV*5yl>WKA-+C!l-~ivk4p%)+hSWH4V-o( zE0Lhi?LQGox=~(QX~*(>=en0@HU5(YR~bcZJo@D_m2AIq^tpf{f>87LX#ia25=g+m zq2ySFbZTvhyd)(agnAWa8o_pr>a8*Q`zQ%o!ekxoeofrvy{{l-3M7xJs%ZC=5!dUX z(N~bjQm6TL%Q=2&Lx`XAULSe|*1GT{jWvJVopblxtsgRzem3^Wq-%IC zq_Us$+DqJa{u82qr#be~>4`#B27CKejm9%n>c#!gqS4*Q&~Q{xNZHM`6*`i)j<=L+ zo!MhJkSku7%LYGbN2!6eqnjFPI{Qglqg5>yq?g+qa~@%+_H^!1BXXK+yuq`2I+Wk! zo!1=6To-a#eRqQ>k(*EpYp37}cQCDk(N&TU)tgUnuO3>8@*VW4SP5SU zrW@RHUMNZ)Qf9PzSlL@@n+HeF>FOg6Q@(u*eXLx|;7gS$qGL2=);@@5F!Sr-`2moP zx&D~GcIbJfPGmfS56n!Pts9Xny~UrtvPin2wK{B;2tEMCM9AcjVG>mi6?(nERcI7D z7?m?@>VmkSo<7o%A@T(RZsvXAO(^$1Pxf;jA#j4gm?E6jlP@%!!;poUE@Du_)|4+fHm|E1<-}IyMJ8XF%Xk?a8 zKDPfiH30AD9MuXQ*|rBUxGv~gC*-;#VzlUT5@i_v)tbK31vkZ2Rsc0dHh{A)rwWhJ zVW1KHFYitRm5i7nT$qfg>Iv4f^KET;so173C7K}p{EaXTKCMHQ$6*upjgu?NZqFpU zsJr0K*~vH~ycTs=l}0K`z`E?1i0(%s0>xH{5EZoCE$9ae(*!?(cU-0-tT5Q1CK8o6~VFK+TV! z`%!>;3ZOiPFrHbJQksi|rH^^#ukM7U)L_7bE6G3S1yF8BXavJi0dbo5B*3cRGl*&Q zfUlg5@~2aHP|A-e!z%#72ZDF(z$$kF#@LZ!V7o*aUgR8RKD)0IEcIW6_#XcY@1br> zrOq88BJ)2>nuMrPzxiB>v(DKq$)?x$jen1qrp6oZ2IUmrYF$W=nGrv)!{;677(C}v zEl}sIP-UzoHfX@cGiYkjEIm_nc z4S5>X!GqoaItZ{Et1L}yObJ+ZJ2SS>{x`tX*Qx%*k;uwh@(5ezrQk9ysBf}jlB*tA zA=#7F4mnBttLu6w<2AP<#_RK;FV&)4Zm>V4-~Eh~#$Xbq+0o@%KKe}MJ-yFV$e?QG ztWUOh*JNBSq8*m?_hrE9W>jf5`@U__xDDbI$giOB;rrSpjGL(ts>@u7Vqsw1&; z!RLY{{nW054qs_SQ8bF!@fCqrbJHBfHQI~voBNsX1T@eZ-JDx3rl55u-0!)2fS|`Z zuY1(?z`aE$P;A35VobJL~;Wh zqS56e=lw_kYeE)|Ioept%vrp9<4vK}UV*+y?{x&Hh`)-SG_dfYOB@s}u>2OiNSOb1 z*vb%bESkQm`#>5=G3Qvp{H*!M-NwlM zB|7!pjZC1AN$pQ*oO8FC;2FirYLW!jc>n}{>2k?MeJIA5vD z1k)|dPOPxroy>QOgXsZ2bZrICsLk$^Jgc|pAmU<|v3$f2QA1=V?;l-@QoGdTu(~9U zae01C5EX3Wdzi7uS00UageUvLh;xgJFy)7;03u;%f1vwF>eTGWYLqxe@#K1x@Htk; zZT0*}(THyerfXvUyo^P{_eW=*6zRnusoFGejfo;vpMXbf!0!lk+*`E<^H^1Sm3D^P z&#)omgN|G}Ha)UxbQLWAv*qKJNZ*siF(}lG!_}l}yMU>&2iGenf{5+Qqdd&S%l=hn z-2L^TvZvh5hBo|kzF+`2R!6zi_lKq7Mz{@DL~*?KAC-3aWL;Gx;m;p=D2~AxzE&6!GeEq&7G9q7^aP7hWHZL)>uP3Y^84a za7wZC8xFsPY+;Hg(b`BR`651`%=qyRxPrTsW&91gn^|ovc;M`F`9^Z%tgZhzyvR{a z$9X>49n$>+>JR*W?-nbKP>(dRaopO(Sj@N`)4hClOeYmv&nnqhYz7Kgv>XOFqV(Dg zv-K#3*eGLB8M}z)$(>-Zko4IltR0Xrx_YZ8!8<}w6??*(EecNa4!b| zVD^wL19;de4@9-0XWLj8X0Jnit?cb;@w-#tz3Z;8GDS!-M^UaEX|(0K5~(KdqKf?* zxTy70o$#(Je!K^aMe`sSWL{lg_{y^@p|r%h{bgTxx<-`UN+j^`?wMS1F(y2cZvOU1^86=t3$RGVHf$2dWa?rlXfb)DH}%}b;YXZIIT3Q1HchL27-M=MSn*RNK;YEk zq*xCclQ&S9o%VJ!U=4M^r)IiknzCXz1w4#uB=5~p&`hq#+YF<=l5o`^N&$spZ6V*- zfm;y~Vq%$sf9zxK?*U}&OnqVY=6J+A?j-K9DBeg;YCpr_`V$X5)xP5eo4v_@Fs{oP zv$e#?uid|M4(X;e&sHo9UUhQ}Izziq=U2sdM)>g80 zr)<~i)uwxAXff!|7rp*D0@U?nO1nTtr*I_;4UVeIZ6lX1J0cxr?)xc-jRL8C7I>?X z5rv-32Df1{gp4mxHMcAG$la^S;&R%=g7)q;0*!oiTV!l31o>k0a z&V3#X29pPYl)xa^aNXwLFtmKX=lBaS^+pyfK)u7}NxXl9j_*p_j0!y?3sKl-W$NVo z3-{5^R0D6UzEn_+}`(bNq@;a4qIK%CCoFB5|5pOpLOK~51^GxAvy z?7fz^xy{yc>Q5n5=zuWUsz+!lFFkgmnDt}DjJzKdCsfu;am=9%D>InO51RJGhr^*M z9;dgF2ldqh)o6=z*U7-2u)*Eilt|Bzk%Visj!R#8>RG&HF~wTsc!h2x5hSEnFUb_O zk1jnS^=-pQ&^Yb|p`|z`O_aiEfEx<$$4>I)OPl@an$m0Hsf&4GqdIFLfI5CyRTKx0 zspZCqJ`v(_4wzLG#3^jgxD{UIR&)sx$6Q7X9WH|#OH+&|mi##sQC?bckE$!?Bs{w2 zNdS&lwt5taEXfi#7YQ*Y#xIOFRW7ATD(=oc*d)nZoAZMUv(0sG{Kl3wMqOL%n!n@N zFU6%ge>K{%nYK@)-jyMW$(~yQtrr&-T z=vm|4P-uJJIQ3Z<8Fd~n=uWtdP4q3aANs~a`CZJ@B3+GNtGVjA@CJHFU5n94EJy{u zv({56Ob*uY|N3o_V}AAC=q+0S{cG>*v+iECu^datNfgJppUFg_&N|w^R3~%%^6$BF zzXn3qDaKy*z@-HxTFICf4R2m5AcLN5^>VebCiSBo)p_vOuyRh+m&5`ft3Zf@mb;l4 z;;bmU4a^qxK3dsEvUEW(Z*^0DUgSNOW(gPEL$`r6LaHUmHLsRIGL1fDt)l4GySlkk zsE0_m=|{)IiIM7w#Fs$z9O#MsPJG$TG|(U3f}XAQ%u)vlp-|s-qaTi( zbgx(XGO`~O3~4>UNXVFrVhXmFV|~Bf=WMe$+N>q2&rq^J2tZ^SliutBaC!AFy(p4G zPQdCT#PeSVnb`?TO^Z|3GkaGSIYtNG`EIStogDySSo`=XaGM`Sd&Ez>*lZzRTj5O4 zCim{^rC+a>WXhg)H#>*~1xijuSrCHF9V@B^2xN|frca9`GbORE7agU*-J*wx9(W*k z1KP2_C{Cv(AheGM%GioqPoT$6V+r|a*lCdObqp*yijnAHd*iHHnZ^KNB+|}68?<-= zh>sNC`hhCB$D$Zog#9rsb`=%D>K5-#Y}<>3%pAaTnOX2dohpbUF%nvzqbFKUt{ZKs zu*XO5eAkuRw6iWZxW_t5VQN{4?`!yVkiAEG;};R~A=(!b&8Fu60nwBp?|d7-#=^rj^4=E|+rHT}uGPkM3K78BsBZFVXU4M`JF7G_ir% z=TD@t)$Ct@lv0MJXYb_O7$2>AC^vG^TddD(1a`eG5(*0P)0m=ct>=~qs-c2EdJTz( z5xX%C{n={K&aT4lejyoIN~Dk6i+sE2%Hiyncnql_f8L@tv+!LELdpBYqa~sdC7^<} zbFWc%zhAiEXkQkfPBTfm2#O&(2Fd^Fh#N{;erk+#jhSsu;`R8O=hME!MukoXP@szw zF$I9%@I?^T%#Dd;lm@EjQsbaeEMSX6{eTWL{XpKE=`iMJV#kg!qU|3{#}#LX3I}|D zq=OffsLr&TdQC#d%a*QoBcUyfFC?}|fpdGKJOt<@kT|ub3E)Z}u+-64;0+f3nWq7X zdH9r%QK)U!`Q+SVDO<^43n#O@w6U6L-H~`AoL46w{xUv%p?$g|ttk`H9V5aY_lMPJ zZ#k)TD`^>w^VrF=5RezTMF#(43WF5GvwOe;^o-0-p_E(|9)0QN+m&{iFT+jwfDN?~ zKgKMc`~(d$*r~w}Rmc}lcc*b~a`60gg^CrjGEUhDO;Jx1q(P9Yvg-Uq-+GpVk7#7jO!T!_6=rLV|AwlwwqY2!RknY% z8wYkuOfI+%%`M=gozwy8F`#R$(B61=BDQ;d(4{InmnthvzCCw>*4BO@Z&lX?C!$0m z#gk${7Ms`w5rp4-a9*s4K6nx9n20E@69 zqrc;3@<>_yCNf8NJ5HHP+FfgNTJ%hf3K;}Nb`C(Zz~i~s(2n$p39-@^DCbhZG#pxo zD852=e1dleyW7${gS* zaphVhQ-|b;)kR4r0Nx-2kh|HRL3w=^zi#xFuLdpUBUawP^CZJN)~Ua>m?w^5FQfwv z0$~I8vz~_$gSeLu(U@hB4G(NSG$>o`O9Cim4glbXlcUlsLr6n#)Xs!U!uH`^M`_Vc z$>ic-oEo<)f0U1_sKq@v%8A@BtgmvZOaNBi_=x2k74qOITdjwhB=!d2uT?;7l#U2F zYF}XcTa61ej0#Pvp9wX*YPtZYWdj%>>tee_vKp6Tn9}2tH{|vVd#e0(4Mvx zeguD&K>O^#Rm=xCP!h}%>U|;VtgrBB9vXu&x4#ypf>Z#gU<)yv1eQzs@h8nICLOn= z8BPT25?_1HM36BwrC7rF7JVtSs;p0x6_7c-JWTOdQX>&ga^!#h4+9AODhGj`rds9mQrY z4*e6wi3dovfuTU@$-BTHkY#P&Z`hP9wBC)om@t;&toadJtl%ni^dirYXw;q7#D_&j zh$PNabe;4%_Y^FoV`V055v60bfYCh@&ij;h89)hpYY1{R;#zF9{Vv6VMi`uUflz0( zfv5vaTg!+MZ+?tV>Ta;Yqv6)J{IBePuzH>79kx7G7aSmm4`2UwqP4zH8oeF%V~Jc& z>7H6Gv7dp6I)8&9>LDYM^wXW5cybudXgIl+aMV7=Nv6K{DL+QsP8u7VS#$?D+Pl5P zYWN4dx}y*zsx7}S`niR|V7fSDb zJaDvhx-ZGa0siIvG~_mSRgwwt{x^~J6pE<$Vyd9d6G$JjJ4(Oo=fwN z^USu6po7#!pt-u-@nPFOQGU>WwFskMO8xd(fja`LkUNoevWK_VerxJ_4cd{{MRE?% z32;Tt$Kb_28HZO2=qba@vXV5aqPYX&8Jeup~aB`dq{hI%-0AnmdnlF375 z|6sn>f+v=Z2hD==4-;iPY5ZNGRnEFu0J(WKK_LVhZ{nJ zCoz_lCIgk{o0;Hv5Lgj8|LJ}%OYa$SMOMI0EV@o7&tdL+1o;uTss|OcKh8Rsh)o?z*+vJRKP|8a=Zi8k&c`k+QRRB71>|-Jq~ha;(vR!nx=~SJqS2D z^BU}mqt3=62g{VVBLXan{$C3a29AONxQqlFCKbPDWBOkz^7$WL(T<%KG&$ev3^PC;Ao?b#yu7Tf86iXhD zKuo_90S!>8;G(5w8}PgKr7W;H1H=zubHK5KqdTJAeTaMupB;*5L&cpkTv4QsMMVa+ zZyICyG1EWdt&^Hdq-uws))xMp8Y#MbLbDDIA`;`)$B0r-!uRV_E=kodm1xnYPrt&7 z=p?sB+P03U-i9%&SK_Mm^rO`5DQl}>myYjq^GdN;Y%x__e!SmIY_YDkwELCCQ1=6i zz#J(LlR<*oEy(cK&!NF~zla9Tb}U4Pp5Xh-J{|IpnZsjzh_+wSRr`cmoT2#0x#Gjh zK64S>O%q+(SwAmdU1;nQtkv{O;zTP*h`$xyeUz%d^>k$#E& z!f4dXgiWF>8Gh`wuYu>ipWwCo>f!uSsr3tU+5>YfRnvWsc?YQ1hPN3XGtT6dB;t!p z-rh}gJ=r>)Yh$NACHI)W@53L5Or>%kyR|J&6!~i>V~Z8F7EN``;>5E2743!w9x)y0 zegrve&w_E$6HUX3Jple8DM66GmPzDI&1P@W?{iwRBUn~U{ZRj~U&&IG zb8%^viya0wTRkvrAe+(ZH+_I?EqkNqo638Bgayk}&*1ENX!5kFKdP{`YTFhQjfK?d z5bJ%x?~EL?5;_YTDk(HKTKu&@$oH_mYhnT#mHEJ7S0& zR`AKnPSlPGcJPSn`8!z{yX5DadcNQfLaTPM5GL9;@nKiv4LgdLc+QW^OOz!cJ5eOU zg4@%R1)dM>>=KOIJ%?|{URzqdn!l!X*<_G&wRj*?7mg|+p@n1072K>~%tevjId2Pf zDybgk{keKs47jqlY}!<`&b@7DOYQ%){xMjvmKIuyRN4Ff_c zeDz{e{3xY~1Neo9;N(E}iSXxsIceS9C8V;r5-FGj4Z7GRzkEb`_=wXD_Q=3b;<7oa zU>|%WyYLsW^zg~E$*ikR=3@i8QPAkj%|>gxt+Tui%i4%p7`A@5r^p;*Hwwd5aV3jA zd$1ar1+Wi?5ec@zP>hLM_~8<@En(>}xYgp;Q9`QypHP6YKU7enWzZZ$kyFkO3?k1&0yIf$_FDS_d)9=?R@uXc!rWsxc7#n z_5#1_G28e~L)=mcVPwt0YtZM8xE@qbvr1~K^@ve8n&TeNG8a@zX#5fbty&{4Mi(o( zwj|CsDmWZ9sE30ZN)_8m+K0#oVasMKfzAPcJBnu`9&cY}^VVi7#NiZr=wYPrUUaP} z$V`t2??@I?XRchr6^w`S=j9^%OL%*&Pf&9g`@ogV#1TeqUIu4gje2cK6BiOPMQ=l) zzqS@+43ToE7pSu7LsreM`Rz&@4uh$brIzrqE|u#@R3{}T4@4e|JcV=B>W&OaxL#iE z%|e4jF&&(CLZuwU1M`jo&DR94-z87Wp*JsOXtGs%GBy=efsjZuEG8_^zG)vo{6G) z7t6A;MT$s>nuhJ;D58KWl7JZA{zVfUW3z#{h4D9QoBxxONKOB&aoKwtNV+C=MA@<( z{G6@pgV`X1?Xe-gKhakbLjjKr0m-X&VjtSu_ZRmqCS@v&gOkr7azHBk1jEiJ!VZ}1 zv+&VVK&9 zv44}2s{e_Uto!%F|IcJXSxb=5R{|b$IO~zEdDP6||9d7S{Qu6LHtR2c97!G2t(4;@ zp~Zc%*Rn0t*9`f}5{e3C?R`0&1Yo{Hq)g?Q{;~ryvx_ylS!>|MfB!MIuLvfXA3iT2 ST-$`aYk)Swf|ItD{i> literal 0 HcmV?d00001 From e62cdc6e12ff3b3ab191aa88dbd4cdade27aeecd Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 16 May 2018 07:20:53 -0400 Subject: [PATCH 1350/2793] Revert error message refinement in NotAValueError. I refined it further during PR review in `symbolKind`, and apparently didn't notice that those changes weren't being used in favor of the half-baked implementation in `NotAValueError`. Fixes scala/bug#10888. --- .../tools/nsc/typechecker/ContextErrors.scala | 9 ++------- test/files/neg/object-not-a-value.check | 2 +- test/files/neg/t0673.check | 2 +- test/files/neg/t10888.check | 14 ++++++++++++++ test/files/neg/t10888.scala | 9 +++++++++ test/files/neg/t7251.check | 2 +- test/files/run/t6814.check | 2 +- 7 files changed, 29 insertions(+), 11 deletions(-) create mode 100644 test/files/neg/t10888.check create mode 100644 test/files/neg/t10888.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 84f0e0cd2519..4b8268102615 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -567,7 +567,7 @@ trait ContextErrors { val unknowns = (namelessArgs zip args) collect { case (_: Assign, AssignOrNamedArg(Ident(name), _)) => name } - val suppl = + val suppl = unknowns.size match { case 0 => "" case 1 => s"\nNote that '${unknowns.head}' is not a parameter name of the invoked method." @@ -752,12 +752,7 @@ trait ContextErrors { // def stabilize def NotAValueError(tree: Tree, sym: Symbol) = { - /* Give a better error message for `val thread = java.lang.Thread`. */ - val betterKindString = - if (sym.isJavaDefined && sym.isTrait) "Java interface" - else if (sym.isJavaDefined && (sym.isClass || sym.isModule)) "Java class" - else sym.kindString - issueNormalTypeError(tree, s"$betterKindString ${sym.fullName} is not a value") + issueNormalTypeError(tree, sym.kindString + " " + sym.fullName + " is not a value") setError(tree) } diff --git a/test/files/neg/object-not-a-value.check b/test/files/neg/object-not-a-value.check index b181210877fe..afe61298e9c4 100644 --- a/test/files/neg/object-not-a-value.check +++ b/test/files/neg/object-not-a-value.check @@ -1,4 +1,4 @@ -object-not-a-value.scala:5: error: Java class java.util.List is not a value +object-not-a-value.scala:5: error: class java.util.List is not a value List(1) map (_ + 1) ^ one error found diff --git a/test/files/neg/t0673.check b/test/files/neg/t0673.check index 2d11d0ef9f3b..af35a5a5fd0d 100644 --- a/test/files/neg/t0673.check +++ b/test/files/neg/t0673.check @@ -1,4 +1,4 @@ -Test.scala:2: error: Java class JavaClass.InnerClass is not a value +Test.scala:2: error: class JavaClass.InnerClass is not a value val x = JavaClass.InnerClass ^ one error found diff --git a/test/files/neg/t10888.check b/test/files/neg/t10888.check new file mode 100644 index 000000000000..371eaa959031 --- /dev/null +++ b/test/files/neg/t10888.check @@ -0,0 +1,14 @@ +t10888.scala:3: error: package java.lang is not a value + val v = java.lang // package java.lang is not a value + ^ +t10888.scala:4: error: class java.lang.Thread is not a value + val w = java.lang.Thread // class java.lang.Thread is not a value + ^ +t10888.scala:5: error: package scala.collection is not a value + val x = scala.collection // package scala.collection is not a value + ^ +t10888.scala:7: error: object App is not a member of package scala +Note: trait App exists, but it has no companion object. + val z = scala.App // object App is not a member of package scala + ^ +four errors found diff --git a/test/files/neg/t10888.scala b/test/files/neg/t10888.scala new file mode 100644 index 000000000000..742d9b3f1f62 --- /dev/null +++ b/test/files/neg/t10888.scala @@ -0,0 +1,9 @@ +object t10888 { + + val v = java.lang // package java.lang is not a value + val w = java.lang.Thread // class java.lang.Thread is not a value + val x = scala.collection // package scala.collection is not a value + val y = scala.collection.`package` + val z = scala.App // object App is not a member of package scala + +} \ No newline at end of file diff --git a/test/files/neg/t7251.check b/test/files/neg/t7251.check index a904804e4357..33fdafc2ee1e 100644 --- a/test/files/neg/t7251.check +++ b/test/files/neg/t7251.check @@ -1,4 +1,4 @@ -B_2.scala:5: error: Java class s.Outer$Triple$ is not a value +B_2.scala:5: error: class s.Outer$Triple$ is not a value println( s.Outer$Triple$ ) ^ one error found diff --git a/test/files/run/t6814.check b/test/files/run/t6814.check index 74f1ba114364..bf261d48e41a 100644 --- a/test/files/run/t6814.check +++ b/test/files/run/t6814.check @@ -1,6 +1,6 @@ List[Int] scala.collection.immutable.List.type -Java class java.lang.RuntimeException is not a value +class java.lang.RuntimeException is not a value List[Int] List scala.collection.immutable.List.type From a65a68747c052d08fc1d3bba928ec64a93cb0683 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Wed, 2 May 2018 12:19:23 -0400 Subject: [PATCH 1351/2793] Drop old @version refs in docs A lot of the @version entries in the scaladoc comments for the scala library are no longer maintained. There was probably a goal to keep them updated over time. Since its bundled with the compiler, the scala library version is based on the compiler. That's what determines scala library compatability. Since the version field is not maintained, the values just look like unnecessary cruft when the library api docs are published on the web. The @since version number seems like it's worth preserving. I've changed the @version to a @since entry when it was appropriate. --- src/compiler/scala/reflect/reify/Reifier.scala | 1 - src/library/scala/App.scala | 2 +- src/library/scala/Array.scala | 4 ++-- src/library/scala/Console.scala | 2 +- src/library/scala/Function.scala | 2 +- src/library/scala/MatchError.scala | 1 - src/library/scala/Option.scala | 6 +++--- src/library/scala/PartialFunction.scala | 2 +- src/library/scala/Product.scala | 1 - src/library/scala/Proxy.scala | 2 +- src/library/scala/Responder.scala | 2 -- src/library/scala/Symbol.scala | 2 +- src/library/scala/annotation/Annotation.scala | 1 - src/library/scala/annotation/ClassfileAnnotation.scala | 1 - src/library/scala/annotation/StaticAnnotation.scala | 1 - src/library/scala/annotation/TypeConstraint.scala | 1 - src/library/scala/annotation/strictfp.scala | 1 - src/library/scala/collection/BitSetLike.scala | 1 - src/library/scala/collection/BufferedIterator.scala | 1 - src/library/scala/collection/IndexedSeqLike.scala | 1 - src/library/scala/collection/IterableLike.scala | 1 - src/library/scala/collection/IterableProxy.scala | 1 - src/library/scala/collection/IterableProxyLike.scala | 1 - src/library/scala/collection/IterableViewLike.scala | 1 - src/library/scala/collection/Iterator.scala | 2 -- src/library/scala/collection/LinearSeqLike.scala | 1 - src/library/scala/collection/MapLike.scala | 1 - src/library/scala/collection/MapProxy.scala | 1 - src/library/scala/collection/MapProxyLike.scala | 1 - src/library/scala/collection/SeqLike.scala | 1 - src/library/scala/collection/SeqProxy.scala | 1 - src/library/scala/collection/SeqProxyLike.scala | 1 - src/library/scala/collection/SeqViewLike.scala | 1 - src/library/scala/collection/SetLike.scala | 1 - src/library/scala/collection/SetProxy.scala | 2 +- src/library/scala/collection/SetProxyLike.scala | 2 +- src/library/scala/collection/SortedMap.scala | 1 - src/library/scala/collection/SortedMapLike.scala | 1 - src/library/scala/collection/SortedSet.scala | 1 - src/library/scala/collection/SortedSetLike.scala | 1 - src/library/scala/collection/TraversableLike.scala | 1 - src/library/scala/collection/TraversableOnce.scala | 1 - src/library/scala/collection/TraversableProxy.scala | 1 - src/library/scala/collection/TraversableProxyLike.scala | 1 - src/library/scala/collection/TraversableViewLike.scala | 1 - src/library/scala/collection/generic/BitSetFactory.scala | 2 +- src/library/scala/collection/generic/Clearable.scala | 5 ++--- src/library/scala/collection/generic/GenMapFactory.scala | 1 - src/library/scala/collection/generic/GenSetFactory.scala | 3 +-- .../scala/collection/generic/GenTraversableFactory.scala | 1 - src/library/scala/collection/generic/Growable.scala | 1 - .../scala/collection/generic/ImmutableMapFactory.scala | 1 - .../collection/generic/ImmutableSortedMapFactory.scala | 1 - .../collection/generic/ImmutableSortedSetFactory.scala | 1 - .../scala/collection/generic/IterableForwarder.scala | 1 - src/library/scala/collection/generic/MapFactory.scala | 1 - .../scala/collection/generic/MutableMapFactory.scala | 1 - src/library/scala/collection/generic/SeqForwarder.scala | 1 - src/library/scala/collection/generic/Shrinkable.scala | 1 - src/library/scala/collection/generic/Subtractable.scala | 1 - .../scala/collection/generic/TraversableFactory.scala | 1 - .../scala/collection/generic/TraversableForwarder.scala | 1 - src/library/scala/collection/immutable/HashMap.scala | 1 - src/library/scala/collection/immutable/HashSet.scala | 1 - src/library/scala/collection/immutable/List.scala | 3 --- src/library/scala/collection/immutable/ListMap.scala | 1 - src/library/scala/collection/immutable/ListSet.scala | 1 - src/library/scala/collection/immutable/MapLike.scala | 1 - src/library/scala/collection/immutable/MapProxy.scala | 1 - src/library/scala/collection/immutable/NumericRange.scala | 1 - src/library/scala/collection/immutable/Queue.scala | 1 - src/library/scala/collection/immutable/Range.scala | 1 - src/library/scala/collection/immutable/SortedMap.scala | 1 - src/library/scala/collection/immutable/SortedSet.scala | 1 - src/library/scala/collection/immutable/Stack.scala | 1 - src/library/scala/collection/immutable/Stream.scala | 2 -- src/library/scala/collection/immutable/TreeMap.scala | 1 - src/library/scala/collection/immutable/TreeSet.scala | 1 - src/library/scala/collection/mutable/ArrayBuffer.scala | 1 - src/library/scala/collection/mutable/ArrayLike.scala | 1 - src/library/scala/collection/mutable/ArraySeq.scala | 1 - src/library/scala/collection/mutable/Buffer.scala | 1 - src/library/scala/collection/mutable/BufferLike.scala | 1 - src/library/scala/collection/mutable/BufferProxy.scala | 1 - src/library/scala/collection/mutable/DefaultMapModel.scala | 1 - src/library/scala/collection/mutable/DoubleLinkedList.scala | 1 - .../scala/collection/mutable/DoubleLinkedListLike.scala | 1 - src/library/scala/collection/mutable/GrowingBuilder.scala | 1 - src/library/scala/collection/mutable/HashSet.scala | 1 - src/library/scala/collection/mutable/HashTable.scala | 1 - src/library/scala/collection/mutable/History.scala | 1 - .../scala/collection/mutable/ImmutableMapAdaptor.scala | 1 - .../scala/collection/mutable/ImmutableSetAdaptor.scala | 1 - src/library/scala/collection/mutable/IndexedSeqLike.scala | 1 - src/library/scala/collection/mutable/IndexedSeqView.scala | 1 - src/library/scala/collection/mutable/LinkedHashSet.scala | 1 - src/library/scala/collection/mutable/LinkedList.scala | 1 - src/library/scala/collection/mutable/LinkedListLike.scala | 1 - src/library/scala/collection/mutable/ListBuffer.scala | 1 - src/library/scala/collection/mutable/MapProxy.scala | 1 - src/library/scala/collection/mutable/MultiMap.scala | 1 - src/library/scala/collection/mutable/MutableList.scala | 1 - src/library/scala/collection/mutable/ObservableBuffer.scala | 1 - src/library/scala/collection/mutable/ObservableMap.scala | 1 - src/library/scala/collection/mutable/ObservableSet.scala | 1 - src/library/scala/collection/mutable/PriorityQueue.scala | 3 --- src/library/scala/collection/mutable/Publisher.scala | 1 - src/library/scala/collection/mutable/Queue.scala | 1 - src/library/scala/collection/mutable/QueueProxy.scala | 1 - src/library/scala/collection/mutable/RedBlackTree.scala | 1 - src/library/scala/collection/mutable/ResizableArray.scala | 1 - .../scala/collection/mutable/RevertibleHistory.scala | 1 - src/library/scala/collection/mutable/SetLike.scala | 1 - src/library/scala/collection/mutable/SetProxy.scala | 1 - src/library/scala/collection/mutable/SortedMap.scala | 1 - src/library/scala/collection/mutable/Stack.scala | 1 - src/library/scala/collection/mutable/StackProxy.scala | 1 - src/library/scala/collection/mutable/StringBuilder.scala | 1 - src/library/scala/collection/mutable/Subscriber.scala | 1 - .../scala/collection/mutable/SynchronizedBuffer.scala | 1 - src/library/scala/collection/mutable/SynchronizedMap.scala | 1 - .../scala/collection/mutable/SynchronizedQueue.scala | 1 - src/library/scala/collection/mutable/SynchronizedSet.scala | 1 - .../scala/collection/mutable/SynchronizedStack.scala | 1 - src/library/scala/collection/mutable/TreeMap.scala | 1 - src/library/scala/collection/mutable/TreeSet.scala | 1 - src/library/scala/collection/mutable/Undoable.scala | 1 - src/library/scala/collection/mutable/WrappedArray.scala | 1 - src/library/scala/collection/script/Location.scala | 1 - src/library/scala/collection/script/Message.scala | 6 ------ src/library/scala/collection/script/Scriptable.scala | 1 - src/library/scala/concurrent/Channel.scala | 1 - src/library/scala/concurrent/DelayedLazyVal.scala | 2 +- src/library/scala/concurrent/Lock.scala | 1 - src/library/scala/concurrent/SyncChannel.scala | 2 +- src/library/scala/concurrent/SyncVar.scala | 1 - src/library/scala/inline.scala | 1 - src/library/scala/io/Source.scala | 1 - src/library/scala/math/BigDecimal.scala | 2 -- src/library/scala/math/BigInt.scala | 2 -- src/library/scala/math/Equiv.scala | 1 - src/library/scala/math/Ordered.scala | 1 - src/library/scala/math/Ordering.scala | 1 - src/library/scala/math/PartialOrdering.scala | 1 - src/library/scala/math/PartiallyOrdered.scala | 1 - src/library/scala/noinline.scala | 1 - src/library/scala/runtime/ScalaNumberProxy.scala | 1 - src/library/scala/sys/Prop.scala | 1 - src/library/scala/sys/ShutdownHookThread.scala | 1 - src/library/scala/sys/SystemProperties.scala | 1 - src/library/scala/sys/package.scala | 1 - src/library/scala/throws.scala | 1 - src/library/scala/util/DynamicVariable.scala | 2 +- src/library/scala/util/Either.scala | 5 ----- src/library/scala/util/MurmurHash.scala | 1 - src/library/scala/util/Sorting.scala | 1 - src/library/scala/util/matching/Regex.scala | 1 - src/manual/scala/man1/Command.scala | 1 - src/manual/scala/man1/fsc.scala | 1 - src/manual/scala/man1/scala.scala | 1 - src/manual/scala/man1/scaladoc.scala | 1 - src/manual/scala/man1/scalap.scala | 1 - 162 files changed, 20 insertions(+), 186 deletions(-) diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index 322153fe35b2..e6c2dd1e6274 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -9,7 +9,6 @@ import scala.reflect.reify.utils.Utils * See more info in the comments to `reify` in scala.reflect.api.Universe. * * @author Martin Odersky - * @version 2.10 * @since 2.10 */ abstract class Reifier extends States diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala index 52ef9ca60f2e..663bef28cd40 100644 --- a/src/library/scala/App.scala +++ b/src/library/scala/App.scala @@ -34,7 +34,7 @@ import scala.collection.mutable.ListBuffer * Future versions of this trait will no longer extend `DelayedInit`. * * @author Martin Odersky - * @version 2.1, 15/02/2011 + * @since 2.1 */ trait App extends DelayedInit { diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index 5d1c25732cce..0e51cd98bba3 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -44,7 +44,7 @@ class FallbackArrayBuilding { * `Array(1, 2)`, `Array(0, 0)` and `Array(1, 2, 0, 0)`. * * @author Martin Odersky - * @version 1.0 + * @since 1.0 */ object Array extends FallbackArrayBuilding { val emptyBooleanArray = new Array[Boolean](0) @@ -481,7 +481,7 @@ object Array extends FallbackArrayBuilding { * `WrappedArray`. * * @author Martin Odersky - * @version 1.0 + * @since 1.0 * @see [[http://www.scala-lang.org/files/archive/spec/2.11/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala index bc702cfaad41..47826467a207 100644 --- a/src/library/scala/Console.scala +++ b/src/library/scala/Console.scala @@ -106,7 +106,7 @@ import scala.util.DynamicVariable * * * @author Matthias Zenger - * @version 1.0, 03/09/2003 + * @since 1.0 * * @groupname console-output Console Output * @groupprio console-output 30 diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala index f28897c20bd3..f96fab410467 100644 --- a/src/library/scala/Function.scala +++ b/src/library/scala/Function.scala @@ -13,7 +13,7 @@ package scala /** A module defining utility methods for higher-order functional programming. * * @author Martin Odersky - * @version 1.0, 29/11/2006 + * @since 1.0 */ object Function { /** Given a sequence of functions `f,,1,,`, ..., `f,,n,,`, return the diff --git a/src/library/scala/MatchError.scala b/src/library/scala/MatchError.scala index 0ab7f13c7e4b..5286fa42f4f0 100644 --- a/src/library/scala/MatchError.scala +++ b/src/library/scala/MatchError.scala @@ -16,7 +16,6 @@ package scala * * @author Matthias Zenger * @author Martin Odersky - * @version 1.1, 05/03/2004 * @since 2.0 */ final class MatchError(@transient obj: Any) extends RuntimeException { diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 30c9e685652c..ba8baf2c56a1 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -76,7 +76,7 @@ object Option { * * @author Martin Odersky * @author Matthias Zenger - * @version 1.1, 16/01/2007 + * @since 1.1 * @define none `None` * @define some [[scala.Some]] * @define option [[scala.Option]] @@ -327,7 +327,7 @@ sealed abstract class Option[+A] extends Product with Serializable { * `A`. * * @author Martin Odersky - * @version 1.0, 16/07/2003 + * @since 1.0 */ @SerialVersionUID(1234815782226070388L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 final case class Some[+A](@deprecatedName('x, "2.12.0") value: A) extends Option[A] { @@ -341,7 +341,7 @@ final case class Some[+A](@deprecatedName('x, "2.12.0") value: A) extends Option /** This case object represents non-existent values. * * @author Martin Odersky - * @version 1.0, 16/07/2003 + * @since 1.0 */ @SerialVersionUID(5066590221178148012L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 case object None extends Option[Nothing] { diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index c054e001d418..d2458d428d6c 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -50,7 +50,7 @@ package scala * * * @author Martin Odersky, Pavel Pavlov, Adriaan Moors - * @version 1.0, 16/07/2003 + * @since 1.0 */ trait PartialFunction[-A, +B] extends (A => B) { self => import PartialFunction._ diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index f3a96fb333b2..78f6c153200b 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -14,7 +14,6 @@ package scala * all case classes implement `Product` with synthetically generated methods. * * @author Burak Emir - * @version 1.0 * @since 2.3 */ trait Product extends Any with Equals { diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala index 7c28e6ea2859..d77fd9910404 100644 --- a/src/library/scala/Proxy.scala +++ b/src/library/scala/Proxy.scala @@ -20,7 +20,7 @@ package scala * an asymmetric equals method, which is not generally recommended. * * @author Matthias Zenger - * @version 1.0, 26/04/2004 + * @since 1.0 */ trait Proxy extends Any { def self: Any diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala index eb8260dc9ab8..d6517742f991 100644 --- a/src/library/scala/Responder.scala +++ b/src/library/scala/Responder.scala @@ -13,7 +13,6 @@ package scala * * @author Martin Odersky * @author Burak Emir - * @version 1.0 * * @see class Responder * @since 2.1 @@ -56,7 +55,6 @@ object Responder { * * @author Martin Odersky * @author Burak Emir - * @version 1.0 * @since 2.1 */ @deprecated("this class will be removed", "2.11.0") diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala index a10da86da7f9..306a10f0d827 100644 --- a/src/library/scala/Symbol.scala +++ b/src/library/scala/Symbol.scala @@ -18,7 +18,7 @@ package scala * `Symbol("mysym")`. * * @author Martin Odersky, Iulian Dragos - * @version 1.8 + * @since 1.7 */ final class Symbol private (val name: String) extends Serializable { /** Converts this symbol to a string. diff --git a/src/library/scala/annotation/Annotation.scala b/src/library/scala/annotation/Annotation.scala index c821344cfa92..52c8cc6ef576 100644 --- a/src/library/scala/annotation/Annotation.scala +++ b/src/library/scala/annotation/Annotation.scala @@ -15,7 +15,6 @@ package scala.annotation * [[scala.annotation.ClassfileAnnotation]]. * * @author Martin Odersky - * @version 1.1, 2/02/2007 * @since 2.4 */ abstract class Annotation {} diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala index bf9cf8ba8f5d..1cb13dff5454 100644 --- a/src/library/scala/annotation/ClassfileAnnotation.scala +++ b/src/library/scala/annotation/ClassfileAnnotation.scala @@ -13,7 +13,6 @@ package scala.annotation * in classfiles. * * @author Martin Odersky - * @version 1.1, 2/02/2007 * @since 2.4 */ trait ClassfileAnnotation extends StaticAnnotation diff --git a/src/library/scala/annotation/StaticAnnotation.scala b/src/library/scala/annotation/StaticAnnotation.scala index 3e7e7f26af62..2ccbbc66ccde 100644 --- a/src/library/scala/annotation/StaticAnnotation.scala +++ b/src/library/scala/annotation/StaticAnnotation.scala @@ -12,7 +12,6 @@ package scala.annotation * to the Scala type checker, even across different compilation units. * * @author Martin Odersky - * @version 1.1, 2/02/2007 * @since 2.4 */ trait StaticAnnotation extends Annotation diff --git a/src/library/scala/annotation/TypeConstraint.scala b/src/library/scala/annotation/TypeConstraint.scala index d80569b84580..2192a3d879eb 100644 --- a/src/library/scala/annotation/TypeConstraint.scala +++ b/src/library/scala/annotation/TypeConstraint.scala @@ -20,7 +20,6 @@ package scala.annotation * would rewrite a type constraint. * * @author Lex Spoon - * @version 1.1, 2007-11-5 * @since 2.6 */ trait TypeConstraint extends Annotation diff --git a/src/library/scala/annotation/strictfp.scala b/src/library/scala/annotation/strictfp.scala index dd8659aa06e9..3b67ffacbb1c 100644 --- a/src/library/scala/annotation/strictfp.scala +++ b/src/library/scala/annotation/strictfp.scala @@ -12,7 +12,6 @@ package scala.annotation * the strictfp flag will be emitted. * * @author Paul Phillips - * @version 2.9 * @since 2.9 */ class strictfp extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index f0a70170c2f5..3c451ccdc412 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -27,7 +27,6 @@ import mutable.StringBuilder * variable-size arrays of bits packed into 64-bit words. The memory footprint of a bitset is * determined by the largest number stored in it. * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define coll bitset * @define Coll `BitSet` diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala index 1424ef2fd049..584df7f0edb1 100644 --- a/src/library/scala/collection/BufferedIterator.scala +++ b/src/library/scala/collection/BufferedIterator.scala @@ -15,7 +15,6 @@ package collection * that inspects the next element without discarding it. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ trait BufferedIterator[+A] extends Iterator[A] { diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index f0cede224dfd..5f6a127c7955 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -29,7 +29,6 @@ package collection * @tparam A the element type of the $coll * @tparam Repr the type of the actual $coll containing the elements. * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define willNotTerminateInf * @define mayNotTerminateInf diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index 419206c226bb..eb1d30f2c4e3 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -39,7 +39,6 @@ import immutable.Stream * `TraversableLike` by an iterator version. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @tparam A the element type of the collection * @tparam Repr the type of the actual collection containing the elements. diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala index 5f4d69c4117c..1977994b040f 100644 --- a/src/library/scala/collection/IterableProxy.scala +++ b/src/library/scala/collection/IterableProxy.scala @@ -13,7 +13,6 @@ package collection * to a different iterable object. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala index f87089cba832..3e2d26605265 100644 --- a/src/library/scala/collection/IterableProxyLike.scala +++ b/src/library/scala/collection/IterableProxyLike.scala @@ -19,7 +19,6 @@ import generic._ * all calls to a different Iterable object. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala index c254ed748008..306afecb6127 100644 --- a/src/library/scala/collection/IterableViewLike.scala +++ b/src/library/scala/collection/IterableViewLike.scala @@ -21,7 +21,6 @@ import scala.language.implicitConversions * All views for iterable collections are defined by re-interpreting the `iterator` method. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @tparam A the element type of the view * @tparam Coll the type of the underlying collection containing the elements. diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 3e865e851273..080b674f9daa 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -18,7 +18,6 @@ import immutable.Stream * * @author Martin Odersky * @author Matthias Zenger - * @version 2.8 * @since 2.8 */ object Iterator { @@ -323,7 +322,6 @@ import Iterator.empty * }}} * * @author Martin Odersky, Matthias Zenger - * @version 2.8 * @since 1 * @define willNotTerminateInf * Note: will not terminate for infinite iterators. diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index 4dba52dc743c..a4dd4afaf0c9 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -21,7 +21,6 @@ import scala.annotation.tailrec * Linear sequences do not add any new methods to `Seq`, but promise efficient implementations * of linear access patterns. * @author Martin Odersky - * @version 2.8 * @since 2.8 * * @tparam A the element type of the $coll diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index a087cb0f4542..863b3fd97a2c 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -47,7 +47,6 @@ import parallel.ParMap * @tparam This the type of the map itself. * * @author Martin Odersky - * @version 2.8 * * @define coll map * @define Coll Map diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala index 2faf6899734f..441bb5525b74 100644 --- a/src/library/scala/collection/MapProxy.scala +++ b/src/library/scala/collection/MapProxy.scala @@ -14,7 +14,6 @@ package collection * dynamically using object composition and forwarding. * * @author Matthias Zenger - * @version 1.0, 21/07/2003 * @since 1 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala index 73a69357882d..0ff51132b32d 100644 --- a/src/library/scala/collection/MapProxyLike.scala +++ b/src/library/scala/collection/MapProxyLike.scala @@ -15,7 +15,6 @@ package collection * all calls to a different Map object. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index f15419e54a26..dbbf9d42628d 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -42,7 +42,6 @@ import scala.math.Ordering * * @author Martin Odersky * @author Matthias Zenger - * @version 1.0, 16/07/2003 * @since 2.8 * * @define Coll `Seq` diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala index f2b39c7b55ff..d1f8432f1835 100644 --- a/src/library/scala/collection/SeqProxy.scala +++ b/src/library/scala/collection/SeqProxy.scala @@ -15,7 +15,6 @@ package collection * all calls to a different sequence object. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala index b493c707968e..2db0b27e08c5 100644 --- a/src/library/scala/collection/SeqProxyLike.scala +++ b/src/library/scala/collection/SeqProxyLike.scala @@ -20,7 +20,6 @@ import generic._ * all calls to a different sequence. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala index 1fbcb6531e1d..b6a12bc1ca20 100644 --- a/src/library/scala/collection/SeqViewLike.scala +++ b/src/library/scala/collection/SeqViewLike.scala @@ -21,7 +21,6 @@ import Seq.fill * `apply` methods. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @tparam A the element type of the view * @tparam Coll the type of the underlying collection containing the elements. diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala index 440452ce990c..dca877560e76 100644 --- a/src/library/scala/collection/SetLike.scala +++ b/src/library/scala/collection/SetLike.scala @@ -48,7 +48,6 @@ import parallel.ParSet * @tparam This the type of the set itself. * * @author Martin Odersky - * @version 2.8 * * @define coll set * @define Coll Set diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala index 4a3fc17a78b1..8b6e9d007fb8 100644 --- a/src/library/scala/collection/SetProxy.scala +++ b/src/library/scala/collection/SetProxy.scala @@ -15,7 +15,7 @@ package collection * * @author Matthias Zenger * @author Martin Odersky - * @version 2.0, 01/01/2007 + * @since 2.0 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") trait SetProxy[A] extends Set[A] with SetProxyLike[A, Set[A]] diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala index fa23fe545026..e191d1fe67ff 100644 --- a/src/library/scala/collection/SetProxyLike.scala +++ b/src/library/scala/collection/SetProxyLike.scala @@ -15,7 +15,7 @@ package collection * all calls to a different set. * * @author Martin Odersky - * @version 2.8 + * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") trait SetProxyLike[A, +This <: SetLike[A, This] with Set[A]] extends SetLike[A, This] with IterableProxyLike[A, This] { diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index 36e7eae79c77..b8f50f2725b6 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -16,7 +16,6 @@ import mutable.Builder * * @author Sean McDirmid * @author Martin Odersky - * @version 2.8 * @since 2.4 */ trait SortedMap[A, +B] extends Map[A, B] with SortedMapLike[A, B, SortedMap[A, B]] { diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index cf5e9c36c759..900d3b8608f1 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -17,7 +17,6 @@ import generic._ * * @author Sean McDirmid * @author Martin Odersky - * @version 2.8 * @since 2.8 */ trait SortedMapLike[A, +B, +This <: SortedMapLike[A, B, This] with SortedMap[A, B]] extends Sorted[A, This] with MapLike[A, B, This] { diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 0fa5ce09666a..2618dc5d1ebd 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -15,7 +15,6 @@ import generic._ * * @author Sean McDirmid * @author Martin Odersky - * @version 2.8 * @since 2.4 */ trait SortedSet[A] extends Set[A] with SortedSetLike[A, SortedSet[A]] { diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala index c38ea1f3ce6b..24e285b6475a 100644 --- a/src/library/scala/collection/SortedSetLike.scala +++ b/src/library/scala/collection/SortedSetLike.scala @@ -15,7 +15,6 @@ import generic._ * * @author Sean McDirmid * @author Martin Odersky - * @version 2.8 * @since 2.8 */ trait SortedSetLike[A, +This <: SortedSet[A] with SortedSetLike[A, This]] extends Sorted[A, This] with SetLike[A, This] { diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index bf6c9401374d..0bb4c6c9c726 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -58,7 +58,6 @@ import scala.language.higherKinds * order they were inserted into the `HashMap`. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @tparam A the element type of the collection * @tparam Repr the type of the actual collection containing the elements. diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index b87fcd166e75..f65eb877866e 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -21,7 +21,6 @@ import scala.reflect.ClassTag * * @author Martin Odersky * @author Paul Phillips - * @version 2.8 * @since 2.8 * * @define coll traversable or iterator diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala index 0c7219c5f943..1d0fdfcb4495 100644 --- a/src/library/scala/collection/TraversableProxy.scala +++ b/src/library/scala/collection/TraversableProxy.scala @@ -18,7 +18,6 @@ package collection * all calls to a different traversable object * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.3") diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala index c8b641f88bab..2a6e3c29bdfb 100644 --- a/src/library/scala/collection/TraversableProxyLike.scala +++ b/src/library/scala/collection/TraversableProxyLike.scala @@ -21,7 +21,6 @@ import scala.reflect.ClassTag * all calls to a different Traversable object. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index 5bc117ecdf40..25122d6186ab 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -61,7 +61,6 @@ trait ViewMkString[+A] { * All views for traversable collections are defined by creating a new `foreach` method. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @tparam A the element type of the view * @tparam Coll the type of the underlying collection containing the elements. diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala index 2e3aae31ac9d..e44075f655a8 100644 --- a/src/library/scala/collection/generic/BitSetFactory.scala +++ b/src/library/scala/collection/generic/BitSetFactory.scala @@ -20,7 +20,7 @@ import mutable.Builder * @define factoryInfo * This object provides a set of operations to create `$Coll` values. * @author Martin Odersky - * @version 2.8 + * @since 2.8 * @define canBuildFromInfo * The standard `CanBuildFrom` instance for $Coll objects. * @see CanBuildFrom diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala index 3c496051c4cb..e3922f791f6e 100644 --- a/src/library/scala/collection/generic/Clearable.scala +++ b/src/library/scala/collection/generic/Clearable.scala @@ -13,9 +13,8 @@ package generic /** This trait forms part of collections that can be cleared * with a clear() call. * - * @author Paul Phillips - * @version 2.10 - * @since 2.10 + * @author Paul Phillips + * @since 2.10 * @define coll clearable collection * @define Coll `Clearable` */ diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala index ae3150115fd2..0d27e980aa16 100644 --- a/src/library/scala/collection/generic/GenMapFactory.scala +++ b/src/library/scala/collection/generic/GenMapFactory.scala @@ -20,7 +20,6 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations needed to create `$Coll` values. * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define canBuildFromInfo * The standard `CanBuildFrom` instance for `$Coll` objects. diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala index 65404a49918c..d26cc20db2da 100644 --- a/src/library/scala/collection/generic/GenSetFactory.scala +++ b/src/library/scala/collection/generic/GenSetFactory.scala @@ -22,8 +22,7 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations needed to create `$Coll` values. * @author Martin Odersky - * @version 2.8 - * @since 2.8 + * @since 2.8 * @define canBuildFromInfo * The standard `CanBuildFrom` instance for `$Coll` objects. * @see CanBuildFrom diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index 7c2aa5615c2e..65528bdbb326 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -24,7 +24,6 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations to create `$Coll` values. * @author Martin Odersky - * @version 2.8 * @define canBuildFromInfo * The standard `CanBuildFrom` instance for $Coll objects. * @see CanBuildFrom diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala index a223c0c8a81f..a3f27c806f62 100644 --- a/src/library/scala/collection/generic/Growable.scala +++ b/src/library/scala/collection/generic/Growable.scala @@ -17,7 +17,6 @@ import scala.annotation.tailrec * a `clear` method. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define coll growable collection * @define Coll `Growable` diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala index 7d857bf1b4f6..87a1f0c6f1bb 100644 --- a/src/library/scala/collection/generic/ImmutableMapFactory.scala +++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala @@ -15,7 +15,6 @@ import scala.language.higherKinds /** A template for companion objects of `immutable.Map` and subclasses thereof. * @author Martin Odersky - * @version 2.8 * @since 2.8 */ abstract class ImmutableMapFactory[CC[A, +B] <: immutable.Map[A, B] with immutable.MapLike[A, B, CC[A, B]]] extends MapFactory[CC] diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala index 730e58a5275c..61ab647b7817 100644 --- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala +++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala @@ -22,7 +22,6 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations needed to create sorted maps of type `$Coll`. * @author Martin Odersky - * @version 2.8 * @define sortedMapCanBuildFromInfo * The standard `CanBuildFrom` instance for sorted maps */ diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala index 1fd4a8c99d93..fd41d17b7294 100644 --- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala +++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala @@ -22,7 +22,6 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations needed to create sorted sets of type `$Coll`. * @author Martin Odersky - * @version 2.8 * @define sortedSetCanBuildFromInfo * The standard `CanBuildFrom` instance for sorted sets */ diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala index 7f6eb6e131be..f97215fbf9d1 100644 --- a/src/library/scala/collection/generic/IterableForwarder.scala +++ b/src/library/scala/collection/generic/IterableForwarder.scala @@ -23,7 +23,6 @@ import scala.collection._ * target="ContentFrame">`IterableProxy`. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("forwarding is inherently unreliable since it is not automated and methods can be forgotten", "2.11.0") diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala index 255d6953030e..7c2d660de2f3 100644 --- a/src/library/scala/collection/generic/MapFactory.scala +++ b/src/library/scala/collection/generic/MapFactory.scala @@ -19,7 +19,6 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations needed to create `$Coll` values. * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define canBuildFromInfo * The standard `CanBuildFrom` instance for `$Coll` objects. diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala index 14c5b6bac3ce..70d03035949a 100644 --- a/src/library/scala/collection/generic/MutableMapFactory.scala +++ b/src/library/scala/collection/generic/MutableMapFactory.scala @@ -17,7 +17,6 @@ import scala.language.higherKinds /** A template for companion objects of `mutable.Map` and subclasses thereof. * @author Martin Odersky - * @version 2.8 * @since 2.8 */ abstract class MutableMapFactory[CC[A, B] <: mutable.Map[A, B] with mutable.MapLike[A, B, CC[A, B]]] diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala index cee93d2ddbcf..a7d4912bf701 100644 --- a/src/library/scala/collection/generic/SeqForwarder.scala +++ b/src/library/scala/collection/generic/SeqForwarder.scala @@ -22,7 +22,6 @@ import scala.collection.immutable.Range * The above methods are forwarded by subclass `SeqProxy`. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala index dea5bb7217c5..682d7d3ed66b 100644 --- a/src/library/scala/collection/generic/Shrinkable.scala +++ b/src/library/scala/collection/generic/Shrinkable.scala @@ -14,7 +14,6 @@ package generic * using a `-=` operator. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define coll shrinkable collection * @define Coll `Shrinkable` diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala index 32a900029654..9365de7949b5 100644 --- a/src/library/scala/collection/generic/Subtractable.scala +++ b/src/library/scala/collection/generic/Subtractable.scala @@ -18,7 +18,6 @@ package generic * @tparam A the type of the elements of the $coll. * @tparam Repr the type of the $coll itself * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define coll collection * @define Coll Subtractable diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala index ad6d8fd1982f..c56865e429f3 100644 --- a/src/library/scala/collection/generic/TraversableFactory.scala +++ b/src/library/scala/collection/generic/TraversableFactory.scala @@ -24,7 +24,6 @@ import scala.language.higherKinds * @define factoryInfo * This object provides a set of operations to create `$Coll` values. * @author Martin Odersky - * @version 2.8 * @define canBuildFromInfo * The standard `CanBuildFrom` instance for $Coll objects. * @see CanBuildFrom diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala index b94507d6ef5b..2bf995750b31 100644 --- a/src/library/scala/collection/generic/TraversableForwarder.scala +++ b/src/library/scala/collection/generic/TraversableForwarder.scala @@ -24,7 +24,6 @@ import scala.reflect.ClassTag * All calls creating a new traversable of the same kind. * * @author Martin Odersky - * @version 2.8 * @since 2.8 */ @deprecated("forwarding is inherently unreliable since it is not automated and new methods can be forgotten", "2.11.0") diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index dad24c172c69..c3217385d067 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -23,7 +23,6 @@ import parallel.immutable.ParHashMap * * @author Martin Odersky * @author Tiark Rompf - * @version 2.8 * @since 2.3 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#hash-tries "Scala's Collection Library overview"]] * section on `Hash Tries` for more information. diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 9db79c911da6..c6ee0e152ebe 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -25,7 +25,6 @@ import scala.annotation.tailrec * * @author Martin Odersky * @author Tiark Rompf - * @version 2.8 * @since 2.3 * @define Coll `immutable.HashSet` * @define coll immutable hash set diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 550b987cb606..0f13e34358e2 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -63,7 +63,6 @@ import java.io.{ObjectOutputStream, ObjectInputStream} * each reference to it. I.e. structural sharing is lost after serialization/deserialization. * * @author Martin Odersky and others - * @version 2.8 * @since 1.0 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#lists "Scala's Collection Library overview"]] * section on `Lists` for more information. @@ -418,7 +417,6 @@ sealed abstract class List[+A] extends AbstractSeq[A] /** The empty list. * * @author Martin Odersky - * @version 1.0, 15/07/2003 * @since 2.8 */ @SerialVersionUID(0 - 8256821097970055419L) @@ -440,7 +438,6 @@ case object Nil extends List[Nothing] { * @param tl the list containing the remaining elements of this list after the first one. * @tparam B the type of the list elements. * @author Martin Odersky - * @version 1.0, 15/07/2003 * @since 2.8 */ @SerialVersionUID(509929039250432923L) // value computed by serialver for 2.11.2, annotation added in 2.11.4 diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index ffad47878516..2e6325c027c4 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -57,7 +57,6 @@ object ListMap extends ImmutableMapFactory[ListMap] { * * @author Matthias Zenger * @author Martin Odersky - * @version 2.0, 01/01/2007 * @since 1 * @define Coll ListMap * @define coll list map diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index d9795e9161f0..b63f575a0fbd 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -52,7 +52,6 @@ object ListSet extends ImmutableSetFactory[ListSet] { * @tparam A the type of the elements contained in this list set * * @author Matthias Zenger - * @version 1.0, 09/07/2003 * @since 1 * @define Coll ListSet * @define coll list set diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index 5867383b522e..56c412ed3d5f 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -41,7 +41,6 @@ import parallel.immutable.ParMap * @tparam This The type of the actual map implementation. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define Coll immutable.Map * @define coll immutable map diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala index 0d1c17d4b337..9538dfbea521 100644 --- a/src/library/scala/collection/immutable/MapProxy.scala +++ b/src/library/scala/collection/immutable/MapProxy.scala @@ -20,7 +20,6 @@ package immutable * dynamically using object composition and forwarding. * * @author Matthias Zenger, Martin Odersky - * @version 2.0, 31/12/2006 * @since 2.8 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index f1b831bf7594..36491c9404c1 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -30,7 +30,6 @@ package immutable * }}} * * @author Paul Phillips - * @version 2.8 * @define Coll `NumericRange` * @define coll numeric range * @define mayNotTerminateInf diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index aae80cf148e7..67d5c8ef7501 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -25,7 +25,6 @@ import mutable.{ Builder, ListBuffer } * `n` remove operations with `O(1)` cost are guaranteed. Removing an item is on average `O(1)`. * * @author Erik Stenman - * @version 1.0, 08/07/2003 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-queues "Scala's Collection Library overview"]] * section on `Immutable Queues` for more information. diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 9f490f3e86b3..eb8a484a81b3 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -44,7 +44,6 @@ import scala.collection.parallel.immutable.ParRange * * @author Martin Odersky * @author Paul Phillips - * @version 2.8 * @since 2.5 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#ranges "Scala's Collection Library overview"]] * section on `Ranges` for more information. diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 0f3bd2e195b7..2a954cd63fe9 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -22,7 +22,6 @@ import mutable.Builder * * @author Sean McDirmid * @author Martin Odersky - * @version 2.8 * @since 2.4 * @define Coll immutable.SortedMap * @define coll immutable sorted map diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala index 75b2b1f4dca2..0607e5a557fb 100644 --- a/src/library/scala/collection/immutable/SortedSet.scala +++ b/src/library/scala/collection/immutable/SortedSet.scala @@ -19,7 +19,6 @@ import generic._ * * @author Sean McDirmid * @author Martin Odersky - * @version 2.8 * @since 2.4 * @define Coll `immutable.SortedSet` * @define coll immutable sorted set diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala index a4f75ea41911..51a59174697a 100644 --- a/src/library/scala/collection/immutable/Stack.scala +++ b/src/library/scala/collection/immutable/Stack.scala @@ -33,7 +33,6 @@ object Stack extends SeqFactory[Stack] { * @tparam A the type of the elements contained in this stack. * * @author Matthias Zenger - * @version 1.0, 10/07/2003 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#immutable-stacks "Scala's Collection Library overview"]] * section on `Immutable stacks` for more information. diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 49e919cd916a..4900cd9c20d7 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -186,7 +186,6 @@ import scala.language.implicitConversions * @tparam A the type of the elements contained in this stream. * * @author Martin Odersky, Matthias Zenger - * @version 1.1 08/08/03 * @since 2.8 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#streams "Scala's Collection Library overview"]] * section on `Streams` for more information. @@ -1070,7 +1069,6 @@ final class StreamIterator[+A] private() extends AbstractIterator[A] with Iterat * The object `Stream` provides helper functions to manipulate streams. * * @author Martin Odersky, Matthias Zenger - * @version 1.1 08/08/03 * @since 2.8 */ object Stream extends SeqFactory[Stream] { diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 05e04bb514f1..be7d705f5db7 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -32,7 +32,6 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * * @author Erik Stenman * @author Matthias Zenger - * @version 1.1, 03/05/2004 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] * section on `Red-Black Trees` for more information. diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index af3deb50a29c..a70599621d19 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -36,7 +36,6 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @param ordering the implicit ordering used to compare objects of type `A` * * @author Martin Odersky - * @version 2.0, 02/01/2007 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-immutable-collection-classes.html#red-black-trees "Scala's Collection Library overview"]] * section on `Red-Black Trees` for more information. diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 2e7feaa37e9b..382da333c216 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -22,7 +22,6 @@ import parallel.mutable.ParArray * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-buffers "Scala's Collection Library overview"]] * section on `Array Buffers` for more information. diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala index 80b38a847a55..d923065c4b4d 100644 --- a/src/library/scala/collection/mutable/ArrayLike.scala +++ b/src/library/scala/collection/mutable/ArrayLike.scala @@ -18,7 +18,6 @@ package mutable * @tparam Repr the type of the actual collection containing the elements. * * @define Coll `ArrayLike` - * @version 2.8 * @since 2.8 */ trait ArrayLike[A, +Repr] extends Any with IndexedSeqOptimized[A, Repr] { self => diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 22c98cd3c339..99afcd8c8164 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -20,7 +20,6 @@ import parallel.mutable.ParArray * primitive types are boxed. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#array-sequences "Scala's Collection Library overview"]] * section on `Array Sequences` for more information. diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala index 7ec7b0633363..d2d1b1b907ae 100644 --- a/src/library/scala/collection/mutable/Buffer.scala +++ b/src/library/scala/collection/mutable/Buffer.scala @@ -21,7 +21,6 @@ import generic._ * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * * @tparam A type of the elements contained in this buffer. diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index 4b3cad0ba1e3..d96182d12413 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -30,7 +30,6 @@ import scala.annotation.migration * * @author Martin Odersky * @author Matthias Zenger - * @version 2.8 * @since 2.8 * @define buffernote @note * This trait provides most of the operations of a `Buffer` independently of its representation. diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index 60f0e297466d..6af0256e2d95 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -18,7 +18,6 @@ import script._ * dynamically using object composition and forwarding. * * @author Matthias Zenger - * @version 1.0, 16/04/2004 * @since 1 * * @tparam A type of the elements the buffer proxy contains. diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala index 7f832c0766a9..ef6904ea0955 100644 --- a/src/library/scala/collection/mutable/DefaultMapModel.scala +++ b/src/library/scala/collection/mutable/DefaultMapModel.scala @@ -16,7 +16,6 @@ package mutable * class in terms of three functions: `findEntry`, `addEntry`, and `entries`. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 */ @deprecated("this trait will be removed", "2.11.0") diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 141468e17a49..5af84983d7e3 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -19,7 +19,6 @@ import generic._ * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#double-linked-lists "Scala's Collection Library overview"]] * section on `Double Linked Lists` for more information. diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala index e85ef05319ca..212569804003 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala @@ -47,7 +47,6 @@ import scala.annotation.migration * }}} * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 2.8 * * @tparam A type of the elements contained in the double linked list diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala index 27d554d98e44..3354a1978f92 100644 --- a/src/library/scala/collection/mutable/GrowingBuilder.scala +++ b/src/library/scala/collection/mutable/GrowingBuilder.scala @@ -18,7 +18,6 @@ import generic._ * GrowableBuilders can produce only a single instance of the collection they are growing. * * @author Paul Phillips - * @version 2.8 * @since 2.8 * * @define Coll `GrowingBuilder` diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index 05f078098adb..41ceeceeca3e 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -19,7 +19,6 @@ import scala.collection.parallel.mutable.ParHashSet * * @author Matthias Zenger * @author Martin Odersky - * @version 2.0, 31/12/2006 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#hash-tables "Scala's Collection Library overview"]] * section on `Hash Tables` for more information. diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 7ee1987e4621..bb95f476f50a 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -30,7 +30,6 @@ import scala.util.hashing.byteswap32 * * @author Matthias Zenger * @author Martin Odersky - * @version 2.0, 31/12/2006 * @since 1 * * @tparam A type of the elements contained in this hash table. diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala index 13e2f32225e5..776806a0dcab 100644 --- a/src/library/scala/collection/mutable/History.scala +++ b/src/library/scala/collection/mutable/History.scala @@ -19,7 +19,6 @@ package mutable * up to maximum number of `maxHistory` events. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 * * @tparam Evt Type of events. diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala index 7ab4dd2d9df2..355d5092738f 100644 --- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala @@ -22,7 +22,6 @@ import scala.annotation.migration * * @author Matthias Zenger * @author Martin Odersky - * @version 2.0, 01/01/2007 * @since 1 */ @deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala index aa21c4cc112d..93131d12c985 100644 --- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala @@ -17,7 +17,6 @@ package mutable * return the representation of an empty set. * * @author Matthias Zenger - * @version 1.0, 21/07/2003 * @since 1 */ @deprecated("adaptors are inherently unreliable and prone to performance problems", "2.11.0") diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala index 4cf794c32fac..f902e10a5c8a 100644 --- a/src/library/scala/collection/mutable/IndexedSeqLike.scala +++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala @@ -29,7 +29,6 @@ package mutable * @define coll mutable indexed sequence * @define indexedSeqInfo * @author Martin Odersky - * @version 2.8 * @since 2.8 * @define willNotTerminateInf * @define mayNotTerminateInf diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala index b525baaf5f87..91079b937807 100644 --- a/src/library/scala/collection/mutable/IndexedSeqView.scala +++ b/src/library/scala/collection/mutable/IndexedSeqView.scala @@ -22,7 +22,6 @@ import TraversableView.NoBuilder * others will just yield a plain indexed sequence of type `collection.IndexedSeq`. * Because this is a leaf class there is no associated `Like` class. * @author Martin Odersky - * @version 2.8 * @since 2.8 * @tparam A the element type of the view * @tparam Coll the type of the underlying collection containing the elements. diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index f00cbd90dc7a..fb91e1629a1f 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -19,7 +19,6 @@ import generic._ * @author Matthias Zenger * @author Martin Odersky * @author Pavel Pavlov - * @version 2.0, 31/12/2006 * @since 1 * * @tparam A the type of the elements contained in this set. diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala index d21a7a5446af..9b815d0bbc93 100644 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ b/src/library/scala/collection/mutable/LinkedList.scala @@ -32,7 +32,6 @@ import generic._ * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#linked-lists "Scala's Collection Library overview"]] * section on `Linked Lists` for more information. diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index 27c4466c9968..2caef41dcbb1 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -21,7 +21,6 @@ import scala.annotation.tailrec * * @author Matthias Zenger * @author Martin Odersky - * @version 1.0, 08/07/2003 * @since 2.8 * * @tparam A type of the elements contained in the linked list diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index 3f7b7ab16e37..145431db25ed 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -19,7 +19,6 @@ import java.io.{ObjectOutputStream, ObjectInputStream} * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#list-buffers "Scala's Collection Library overview"]] * section on `List Buffers` for more information. diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala index 63b14d328a94..a43cca6e0ec4 100644 --- a/src/library/scala/collection/mutable/MapProxy.scala +++ b/src/library/scala/collection/mutable/MapProxy.scala @@ -17,7 +17,6 @@ package mutable * dynamically using object composition and forwarding. * * @author Matthias Zenger, Martin Odersky - * @version 2.0, 31/12/2006 * @since 1 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala index ac2ebf31d8bd..b2789041bcc5 100644 --- a/src/library/scala/collection/mutable/MultiMap.scala +++ b/src/library/scala/collection/mutable/MultiMap.scala @@ -51,7 +51,6 @@ package mutable * @define Coll `MultiMap` * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 */ trait MultiMap[A, B] extends Map[A, Set[B]] { diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index 384b7c3eedae..6ed9c730967b 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -19,7 +19,6 @@ import immutable.List * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @define Coll `mutable.MutableList` * @define coll mutable list diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala index 53d26f4c6f00..5bc03c2eff4f 100644 --- a/src/library/scala/collection/mutable/ObservableBuffer.scala +++ b/src/library/scala/collection/mutable/ObservableBuffer.scala @@ -20,7 +20,6 @@ import script._ * events of the type `Message`. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 */ @deprecated("observables are deprecated because scripting is deprecated", "2.11.0") diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala index 421302b70036..38f7ed2d76a4 100644 --- a/src/library/scala/collection/mutable/ObservableMap.scala +++ b/src/library/scala/collection/mutable/ObservableMap.scala @@ -22,7 +22,6 @@ import script._ * * @author Matthias Zenger * @author Martin Odersky - * @version 2.0, 31/12/2006 * @since 1 */ @deprecated("observables are deprecated because scripting is deprecated", "2.11.0") diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala index eb55a1f822a5..ea23426f327e 100644 --- a/src/library/scala/collection/mutable/ObservableSet.scala +++ b/src/library/scala/collection/mutable/ObservableSet.scala @@ -20,7 +20,6 @@ import script._ * events of the type `Message`. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 */ @deprecated("observables are deprecated because scripting is deprecated", "2.11.0") diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index ed43ef6db96c..ce8bb1a3c425 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -36,7 +36,6 @@ import generic._ * @param ord implicit ordering used to compare the elements of type `A`. * * @author Matthias Zenger - * @version 1.0, 03/05/2004 * @since 1 * * @define Coll PriorityQueue @@ -357,7 +356,6 @@ object PriorityQueue extends OrderedTraversableFactory[PriorityQueue] { * `Ordered[T]` class. * * @author Matthias Zenger - * @version 1.0, 03/05/2004 * @since 1 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") @@ -442,7 +440,6 @@ sealed abstract class PriorityQueueProxy[A](implicit ord: Ordering[A]) extends P * @param ord implicit ordering used to compared elements of type `A` * * @author Matthias Zenger - * @version 1.0, 03/05/2004 * @since 1 * @define Coll `SynchronizedPriorityQueue` * @define coll synchronized priority queue diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala index 22bbea16efc3..883effb8b1e3 100644 --- a/src/library/scala/collection/mutable/Publisher.scala +++ b/src/library/scala/collection/mutable/Publisher.scala @@ -24,7 +24,6 @@ package mutable * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 */ trait Publisher[Evt] { diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index fd5fe9aecc9c..9a3b4215d571 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -19,7 +19,6 @@ import generic._ * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#queues "Scala's Collection Library overview"]] * section on `Queues` for more information. diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala index e780cc2cf05f..d19942e0d1d9 100644 --- a/src/library/scala/collection/mutable/QueueProxy.scala +++ b/src/library/scala/collection/mutable/QueueProxy.scala @@ -18,7 +18,6 @@ package mutable * @tparam A type of the elements in this queue proxy. * * @author Matthias Zenger - * @version 1.1, 03/05/2004 * @since 1 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/mutable/RedBlackTree.scala b/src/library/scala/collection/mutable/RedBlackTree.scala index e4793242bfa3..953c0435177b 100644 --- a/src/library/scala/collection/mutable/RedBlackTree.scala +++ b/src/library/scala/collection/mutable/RedBlackTree.scala @@ -9,7 +9,6 @@ import scala.collection.Iterator * The trees implemented in this object are *not* thread safe. * * @author Rui Gonçalves - * @version 2.12 * @since 2.12 */ private[collection] object RedBlackTree { diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala index 50d3513784ad..eb4c2042ed1f 100644 --- a/src/library/scala/collection/mutable/ResizableArray.scala +++ b/src/library/scala/collection/mutable/ResizableArray.scala @@ -19,7 +19,6 @@ import generic._ * * @author Matthias Zenger, Burak Emir * @author Martin Odersky - * @version 2.8 * @since 1 */ trait ResizableArray[A] extends IndexedSeq[A] diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala index 725a8113ec5e..a8713ace33fa 100644 --- a/src/library/scala/collection/mutable/RevertibleHistory.scala +++ b/src/library/scala/collection/mutable/RevertibleHistory.scala @@ -22,7 +22,6 @@ package mutable * @tparam Pub type of the publisher * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 2.8 */ class RevertibleHistory[Evt <: Undoable, Pub] extends History[Evt, Pub] with Undoable with Serializable { diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index 81c94133520e..1fde3c3feced 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -26,7 +26,6 @@ import parallel.mutable.ParSet * @tparam This the type of the set itself. * * @author Martin Odersky - * @version 2.8 * @since 2.8 * * @define setNote diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala index 43b6aa57af68..ffed1b775e41 100644 --- a/src/library/scala/collection/mutable/SetProxy.scala +++ b/src/library/scala/collection/mutable/SetProxy.scala @@ -15,7 +15,6 @@ package mutable * dynamically using object composition and forwarding. * * @author Matthias Zenger - * @version 1.1, 09/05/2004 * @since 1 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/mutable/SortedMap.scala b/src/library/scala/collection/mutable/SortedMap.scala index 806b30e79a52..c7f21a67f86f 100644 --- a/src/library/scala/collection/mutable/SortedMap.scala +++ b/src/library/scala/collection/mutable/SortedMap.scala @@ -11,7 +11,6 @@ import generic._ * @tparam B the type of the values associated with the keys. * * @author Rui Gonçalves - * @version 2.12 * @since 2.12 * * @define Coll mutable.SortedMap diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index 28d50af1f979..ad117762155e 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -43,7 +43,6 @@ object Stack extends SeqFactory[Stack] { * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 * @see [[http://docs.scala-lang.org/overviews/collections/concrete-mutable-collection-classes.html#stacks "Scala's Collection Library overview"]] * section on `Stacks` for more information. diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala index ac52bbba2197..b8bfa3d3ecb9 100644 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ b/src/library/scala/collection/mutable/StackProxy.scala @@ -16,7 +16,6 @@ package mutable * @tparam A type of the elements in this stack proxy. * * @author Matthias Zenger - * @version 1.0, 10/05/2004 * @since 1 */ @deprecated("proxying is deprecated due to lack of use and compiler-level support", "2.11.0") diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala index d60ae47a5d41..6bfda879555a 100644 --- a/src/library/scala/collection/mutable/StringBuilder.scala +++ b/src/library/scala/collection/mutable/StringBuilder.scala @@ -20,7 +20,6 @@ import immutable.StringLike * * @author Stephane Micheloud * @author Martin Odersky - * @version 2.8 * @since 2.7 * @define Coll `mutable.IndexedSeq` * @define coll string builder diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala index c2aa9be72ddb..929f44ab3f36 100644 --- a/src/library/scala/collection/mutable/Subscriber.scala +++ b/src/library/scala/collection/mutable/Subscriber.scala @@ -16,7 +16,6 @@ package mutable * * @author Matthias Zenger * @author Martin Odersky - * @version 2.8 * @since 1 */ trait Subscriber[-Evt, -Pub] { diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala index 9c27f8b003f9..7d1984052907 100644 --- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala @@ -20,7 +20,6 @@ import script._ * @tparam A type of the elements contained in this buffer. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 * @define Coll `SynchronizedBuffer` * @define coll synchronized buffer diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala index 8618798dbd95..0c5f0d969fcb 100644 --- a/src/library/scala/collection/mutable/SynchronizedMap.scala +++ b/src/library/scala/collection/mutable/SynchronizedMap.scala @@ -19,7 +19,6 @@ import scala.annotation.migration * @tparam B type of the values associated with keys. * * @author Matthias Zenger, Martin Odersky - * @version 2.0, 31/12/2006 * @since 1 * @define Coll `SynchronizedMap` * @define coll synchronized map diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala index ee44f07df214..f626aa99176b 100644 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala @@ -20,7 +20,6 @@ package mutable * @tparam A type of elements contained in this synchronized queue. * * @author Matthias Zenger - * @version 1.0, 03/05/2004 * @since 1 * @define Coll `SynchronizedQueue` * @define coll synchronized queue diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index 399630eb3c72..399d2112bff1 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -19,7 +19,6 @@ import script._ * @tparam A type of the elements contained in this synchronized set. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 * @define Coll `SynchronizedSet` * @define coll synchronized set diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala index 2954a1f768b6..1eec10fb124d 100644 --- a/src/library/scala/collection/mutable/SynchronizedStack.scala +++ b/src/library/scala/collection/mutable/SynchronizedStack.scala @@ -20,7 +20,6 @@ package mutable * @tparam A type of the elements contained in this stack. * * @author Matthias Zenger - * @version 1.0, 03/05/2004 * @since 1 * @define Coll `SynchronizedStack` * @define coll synchronized stack diff --git a/src/library/scala/collection/mutable/TreeMap.scala b/src/library/scala/collection/mutable/TreeMap.scala index 14ae7c9c8cc8..ce0db0c40800 100644 --- a/src/library/scala/collection/mutable/TreeMap.scala +++ b/src/library/scala/collection/mutable/TreeMap.scala @@ -28,7 +28,6 @@ object TreeMap extends MutableSortedMapFactory[TreeMap] { * @tparam B the type of the values associated with the keys. * * @author Rui Gonçalves - * @version 2.12 * @since 2.12 * * @define Coll mutable.TreeMap diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index ada6f145ad42..843bdae45bb9 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -40,7 +40,6 @@ object TreeSet extends MutableSortedSetFactory[TreeSet] { * @tparam A the type of the keys contained in this tree set. * * @author Rui Gonçalves - * @version 2.12 * @since 2.10 * * @define Coll mutable.TreeSet diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala index 482d61816530..cadc87c08516 100644 --- a/src/library/scala/collection/mutable/Undoable.scala +++ b/src/library/scala/collection/mutable/Undoable.scala @@ -17,7 +17,6 @@ package mutable * `undo` which can be used to undo the last operation. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 1 */ trait Undoable { diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index 0b5ebe7e9a85..5b6ec970b7d2 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -26,7 +26,6 @@ import java.util.Arrays * @tparam T type of the elements in this wrapped array. * * @author Martin Odersky, Stephane Micheloud - * @version 1.0 * @since 2.8 * @define Coll `WrappedArray` * @define coll wrapped array diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala index 8a0b10c331ee..0797b355ec10 100644 --- a/src/library/scala/collection/script/Location.scala +++ b/src/library/scala/collection/script/Location.scala @@ -14,7 +14,6 @@ package script * class [[scala.collection.script.Message]]. * * @author Matthias Zenger - * @version 1.0, 10/05/2004 * @since 2.8 */ diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala index a6ba9d95233e..8912084f6ac6 100644 --- a/src/library/scala/collection/script/Message.scala +++ b/src/library/scala/collection/script/Message.scala @@ -18,7 +18,6 @@ import mutable.ArrayBuffer * `Remove`, `Include`, `Reset`, and `Script`. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 * @since 2.8 */ @deprecated("scripting is deprecated", "2.11.0") @@ -28,7 +27,6 @@ trait Message[+A] * to collection classes. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 */ @deprecated("scripting is deprecated", "2.11.0") case class Include[+A](location: Location, elem: A) extends Message[A] { @@ -39,7 +37,6 @@ case class Include[+A](location: Location, elem: A) extends Message[A] { * of elements from collection classes. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 */ @deprecated("scripting is deprecated", "2.11.0") case class Update[+A](location: Location, elem: A) extends Message[A] { @@ -50,7 +47,6 @@ case class Update[+A](location: Location, elem: A) extends Message[A] { * from collection classes. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 */ @deprecated("scripting is deprecated", "2.11.0") case class Remove[+A](location: Location, elem: A) extends Message[A] { @@ -60,7 +56,6 @@ case class Remove[+A](location: Location, elem: A) extends Message[A] { /** This command refers to reset operations. * * @author Matthias Zenger - * @version 1.0, 08/07/2003 */ @deprecated("scripting is deprecated", "2.11.0") case class Reset[+A]() extends Message[A] @@ -69,7 +64,6 @@ case class Reset[+A]() extends Message[A] * of a sequence of other messages. * * @author Matthias Zenger - * @version 1.0, 10/05/2004 */ @deprecated("scripting is deprecated", "2.11.0") class Script[A] extends ArrayBuffer[Message[A]] with Message[A] { diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala index 8965286b0db0..840f2b980368 100644 --- a/src/library/scala/collection/script/Scriptable.scala +++ b/src/library/scala/collection/script/Scriptable.scala @@ -14,7 +14,6 @@ package script * objects of that class. * * @author Matthias Zenger - * @version 1.0, 09/05/2004 * @since 2.8 */ @deprecated("scripting is deprecated", "2.11.0") diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala index 89ad7d8c0e93..8a2e69192f62 100644 --- a/src/library/scala/concurrent/Channel.scala +++ b/src/library/scala/concurrent/Channel.scala @@ -15,7 +15,6 @@ package scala.concurrent * * @tparam A type of data exchanged * @author Martin Odersky - * @version 1.0, 10/03/2003 */ class Channel[A] { class LinkedList[A] { diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala index 595d411e2a1e..476fa88d44fa 100644 --- a/src/library/scala/concurrent/DelayedLazyVal.scala +++ b/src/library/scala/concurrent/DelayedLazyVal.scala @@ -21,7 +21,7 @@ package scala.concurrent * @param body the computation to run to completion in another thread * * @author Paul Phillips - * @version 2.8 + * @since 2.8 */ class DelayedLazyVal[T](f: () => T, body: => Unit)(implicit exec: ExecutionContext){ @volatile private[this] var _isDone = false diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala index 757fb94cc774..06938c7e4b94 100644 --- a/src/library/scala/concurrent/Lock.scala +++ b/src/library/scala/concurrent/Lock.scala @@ -13,7 +13,6 @@ package scala.concurrent /** This class ... * * @author Martin Odersky - * @version 1.0, 10/03/2003 */ @deprecated("use java.util.concurrent.locks.Lock", "2.11.2") class Lock { diff --git a/src/library/scala/concurrent/SyncChannel.scala b/src/library/scala/concurrent/SyncChannel.scala index 735598935c11..6aae1fbe0702 100644 --- a/src/library/scala/concurrent/SyncChannel.scala +++ b/src/library/scala/concurrent/SyncChannel.scala @@ -13,7 +13,7 @@ package scala.concurrent * data to be written has been read by a corresponding reader thread. * * @author Philipp Haller - * @version 2.0, 04/17/2008 + * @since 2.0 */ class SyncChannel[A] { diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index 4b42582c0898..e1370471e559 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -15,7 +15,6 @@ import java.util.concurrent.TimeUnit * * @tparam A type of the contained value * @author Martin Odersky - * @version 1.0, 10/03/2003 */ class SyncVar[A] { private var isDefined: Boolean = false diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala index f188ccab07c1..98e5f140525a 100644 --- a/src/library/scala/inline.scala +++ b/src/library/scala/inline.scala @@ -37,6 +37,5 @@ package scala * }}} * * @author Lex Spoon - * @version 1.0, 2007-5-21 */ class inline extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index b4f542a25209..17260b5b1e44 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -17,7 +17,6 @@ import java.net.{ URI, URL } * representation of a source file. * * @author Burak Emir, Paul Phillips - * @version 1.0, 19/08/2004 */ object Source { val DefaultBufSize = 2048 diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index 4bc0c0cf950d..cb6af7553386 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -18,7 +18,6 @@ import scala.collection.immutable.NumericRange /** * @author Stephane Micheloud * @author Rex Kerr - * @version 1.1 * @since 2.7 */ object BigDecimal { @@ -394,7 +393,6 @@ object BigDecimal { * * @author Stephane Micheloud * @author Rex Kerr - * @version 1.1 */ final class BigDecimal(val bigDecimal: BigDec, val mc: MathContext) extends ScalaNumber with ScalaNumericConversions with Serializable with Ordered[BigDecimal] { diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 707a5c076967..9bf0dc331821 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -14,7 +14,6 @@ import scala.language.implicitConversions /** * @author Martin Odersky - * @version 1.0, 15/07/2003 * @since 2.1 */ object BigInt { @@ -107,7 +106,6 @@ object BigInt { /** * @author Martin Odersky - * @version 1.0, 15/07/2003 */ final class BigInt(val bigInteger: BigInteger) extends ScalaNumber diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala index 45b2b3629de1..49b60653fb3c 100644 --- a/src/library/scala/math/Equiv.scala +++ b/src/library/scala/math/Equiv.scala @@ -26,7 +26,6 @@ import java.util.Comparator * `equiv(x, z) == true` for any `x`, `y`, and `z` of type `T`. * * @author Geoffrey Washburn, Paul Phillips - * @version 1.0, 2008-04-03 * @since 2.7 */ diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala index 51f2765a63c2..1f3d10e083bd 100644 --- a/src/library/scala/math/Ordered.scala +++ b/src/library/scala/math/Ordered.scala @@ -52,7 +52,6 @@ import scala.language.implicitConversions * * @see [[scala.math.Ordering]], [[scala.math.PartiallyOrdered]] * @author Martin Odersky - * @version 1.1, 2006-07-24 */ trait Ordered[A] extends Any with java.lang.Comparable[A] { diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index 4a1c01881cf2..a0a2ea77adc0 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -61,7 +61,6 @@ import scala.language.{implicitConversions, higherKinds} * implicit orderings. * * @author Geoffrey Washburn - * @version 0.9.5, 2008-04-15 * @since 2.7 * @see [[scala.math.Ordered]], [[scala.util.Sorting]] */ diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala index d8ab265f7c7b..5c9f0877bf6e 100644 --- a/src/library/scala/math/PartialOrdering.scala +++ b/src/library/scala/math/PartialOrdering.scala @@ -34,7 +34,6 @@ package math * [[scala.math.Equiv Equiv]] trait. * * @author Geoffrey Washburn - * @version 1.0, 2008-04-0-3 * @since 2.7 */ diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala index f58210d6a7f7..6f09a1d5a737 100644 --- a/src/library/scala/math/PartiallyOrdered.scala +++ b/src/library/scala/math/PartiallyOrdered.scala @@ -14,7 +14,6 @@ package math /** A class for partially ordered data. * * @author Martin Odersky - * @version 1.0, 23/04/2004 */ trait PartiallyOrdered[+A] { diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala index 6c21ed667d35..b4b0b2727bf2 100644 --- a/src/library/scala/noinline.scala +++ b/src/library/scala/noinline.scala @@ -37,7 +37,6 @@ package scala * }}} * * @author Lex Spoon - * @version 1.0, 2007-5-21 * @since 2.5 */ class noinline extends scala.annotation.StaticAnnotation diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala index f54ef8629f11..16ad26582323 100644 --- a/src/library/scala/runtime/ScalaNumberProxy.scala +++ b/src/library/scala/runtime/ScalaNumberProxy.scala @@ -18,7 +18,6 @@ import Proxy.Typed * As with all classes in scala.runtime.*, this is not a supported API. * * @author Paul Phillips - * @version 2.9 * @since 2.9 */ trait ScalaNumberProxy[T] extends Any with ScalaNumericAnyConversions with Typed[T] with OrderedProxy[T] { diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala index 52a3d89ecba3..bad3f32713ee 100644 --- a/src/library/scala/sys/Prop.scala +++ b/src/library/scala/sys/Prop.scala @@ -16,7 +16,6 @@ package sys * See `scala.sys.SystemProperties` for an example usage. * * @author Paul Phillips - * @version 2.9 * @since 2.9 */ trait Prop[+T] { diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala index 6018ac852b12..9de15387f049 100644 --- a/src/library/scala/sys/ShutdownHookThread.scala +++ b/src/library/scala/sys/ShutdownHookThread.scala @@ -13,7 +13,6 @@ package sys * how to unregister itself. * * @author Paul Phillips - * @version 2.9 * @since 2.9 */ class ShutdownHookThread private (name: String) extends Thread(name) { diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index e5606f3c3b74..8142d01fb812 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -25,7 +25,6 @@ import scala.language.implicitConversions * @define coll mutable map * * @author Paul Phillips - * @version 2.9 * @since 2.9 */ class SystemProperties diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala index e493603bc24e..1d0687b887e6 100644 --- a/src/library/scala/sys/package.scala +++ b/src/library/scala/sys/package.scala @@ -16,7 +16,6 @@ import scala.collection.JavaConverters._ * world outside of it. * * @author Paul Phillips - * @version 2.9 * @since 2.9 */ package object sys { diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala index 5a5dd9a1f56b..5de4b8edd30a 100644 --- a/src/library/scala/throws.scala +++ b/src/library/scala/throws.scala @@ -20,7 +20,6 @@ package scala * }}} * * @author Nikolay Mihaylov - * @version 1.0, 19/05/2006 * @since 2.1 */ class throws[T <: Throwable](cause: String = "") extends scala.annotation.StaticAnnotation { diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala index 963fe1c49773..4b2d1a542a14 100644 --- a/src/library/scala/util/DynamicVariable.scala +++ b/src/library/scala/util/DynamicVariable.scala @@ -35,7 +35,7 @@ import java.lang.InheritableThreadLocal * are independent of those for the original thread. * * @author Lex Spoon - * @version 1.1, 2007-5-21 + * @since 2.6 */ class DynamicVariable[T](init: T) { private val tl = new InheritableThreadLocal[T] { diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 5833cbf6828d..09d1de71cf1e 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -114,7 +114,6 @@ package util * }}} * * @author Tony Morris, Workingmouse - * @version 2.0, 2016-07-15 * @since 2.7 */ sealed abstract class Either[+A, +B] extends Product with Serializable { @@ -420,7 +419,6 @@ sealed abstract class Either[+A, +B] extends Product with Serializable { /** The left side of the disjoint union, as opposed to the [[scala.util.Right]] side. * * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 */ final case class Left[+A, +B](@deprecatedName('a, "2.12.0") value: A) extends Either[A, B] { def isLeft = true @@ -432,7 +430,6 @@ final case class Left[+A, +B](@deprecatedName('a, "2.12.0") value: A) extends Ei /** The right side of the disjoint union, as opposed to the [[scala.util.Left]] side. * * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 */ final case class Right[+A, +B](@deprecatedName('b, "2.12.0") value: B) extends Either[A, B] { def isLeft = false @@ -477,7 +474,6 @@ object Either { /** Projects an `Either` into a `Left`. * * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 * @see [[scala.util.Either#left]] */ final case class LeftProjection[+A, +B](e: Either[A, B]) { @@ -622,7 +618,6 @@ object Either { * 2.11 and 2.12.) * * @author Tony Morris, Workingmouse - * @version 1.0, 11/10/2008 */ final case class RightProjection[+A, +B](e: Either[A, B]) { diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala index b8df29ef7679..6cf445b9ac2b 100644 --- a/src/library/scala/util/MurmurHash.scala +++ b/src/library/scala/util/MurmurHash.scala @@ -16,7 +16,6 @@ package util * tuples). * * @author Rex Kerr - * @version 2.9 * @since 2.9 */ diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index 3bda7c0d3919..7005a892fb00 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -34,7 +34,6 @@ import scala.math.Ordering * @author Ross Judson * @author Adriaan Moors * @author Rex Kerr - * @version 1.1 */ object Sorting { /** Sort an array of Doubles using `java.util.Arrays.sort`. */ diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 8d357a478a1c..8423d3a11968 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -182,7 +182,6 @@ import java.util.regex.{ Pattern, Matcher } * @author Thibaud Hottelier * @author Philipp Haller * @author Martin Odersky - * @version 1.1, 29/01/2008 * * @param pattern The compiled pattern * @param groupNames A mapping from names to indices in capture groups diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala index c71440bfb59e..4f061d334691 100644 --- a/src/manual/scala/man1/Command.scala +++ b/src/manual/scala/man1/Command.scala @@ -7,7 +7,6 @@ package scala.man1 /** * @author Stephane Micheloud - * @version 1.0 */ trait Command { import _root_.scala.tools.docutil.ManPage._ diff --git a/src/manual/scala/man1/fsc.scala b/src/manual/scala/man1/fsc.scala index c7399da635a1..bb16a53a87d7 100644 --- a/src/manual/scala/man1/fsc.scala +++ b/src/manual/scala/man1/fsc.scala @@ -7,7 +7,6 @@ package scala.man1 /** * @author Lex Spoon - * @version 1.0 */ object fsc extends Command { import _root_.scala.tools.docutil.ManPage._ diff --git a/src/manual/scala/man1/scala.scala b/src/manual/scala/man1/scala.scala index 3cfa9f8cb160..f7a0e7f61da2 100644 --- a/src/manual/scala/man1/scala.scala +++ b/src/manual/scala/man1/scala.scala @@ -7,7 +7,6 @@ package scala.man1 /** * @author Stephane Micheloud - * @version 1.0 */ object scala extends Command { import _root_.scala.tools.docutil.ManPage._ diff --git a/src/manual/scala/man1/scaladoc.scala b/src/manual/scala/man1/scaladoc.scala index 1737c5efa06b..922b3d242aba 100644 --- a/src/manual/scala/man1/scaladoc.scala +++ b/src/manual/scala/man1/scaladoc.scala @@ -7,7 +7,6 @@ package scala.man1 /** * @author Gilles Dubochet - * @version 1.0 */ object scaladoc extends Command { import _root_.scala.tools.docutil.ManPage._ diff --git a/src/manual/scala/man1/scalap.scala b/src/manual/scala/man1/scalap.scala index b58fe6a81ffc..34d60dbc8f0b 100644 --- a/src/manual/scala/man1/scalap.scala +++ b/src/manual/scala/man1/scalap.scala @@ -7,7 +7,6 @@ package scala.man1 /** * @author Stephane Micheloud - * @version 1.0 */ object scalap extends Command { import _root_.scala.tools.docutil.ManPage._ From 487472bb69adf947d8b212f5fa8c471942d15849 Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Sat, 17 Mar 2018 20:50:40 +0000 Subject: [PATCH 1352/2793] remove duplicated code --- src/reflect/scala/reflect/internal/Symbols.scala | 13 +++---------- 1 file changed, 3 insertions(+), 10 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c892db898724..4a9d571e7ee9 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -737,19 +737,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => flags & mask } /** Does symbol have ANY flag in `mask` set? */ - final def hasFlag(mask: Long): Boolean = { - // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. - if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize - (flags & mask) != 0 - } + final def hasFlag(mask: Long): Boolean = getFlag(mask) != 0 + def hasFlag(mask: Int): Boolean = hasFlag(mask.toLong) /** Does symbol have ALL the flags in `mask` set? */ - final def hasAllFlags(mask: Long): Boolean = { - // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. - if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize - (flags & mask) == mask - } + final def hasAllFlags(mask: Long): Boolean = getFlag(mask) == mask def setFlag(mask: Long): this.type = { _rawflags |= mask ; this } def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this } From e4811535e29d2512f56d24666e8fe2d22b5a249d Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Mon, 26 Mar 2018 21:17:27 +0100 Subject: [PATCH 1353/2793] avoid unneeded operations on some Flag access, e.g. outer reference for phase and some bit twiddling adjusted mima filters for additional methods --- src/reflect/mima-filters/2.12.0.backwards.excludes | 1 + src/reflect/mima-filters/2.12.0.forwards.excludes | 1 + src/reflect/scala/reflect/internal/Flags.scala | 8 +++++++- src/reflect/scala/reflect/internal/Symbols.scala | 7 +++---- .../scala/reflect/runtime/SynchronizedSymbols.scala | 5 +++++ 5 files changed, 17 insertions(+), 5 deletions(-) diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index c476274834f4..c8b8112caa95 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -6,6 +6,7 @@ ProblemFilters.exclude[IncompatibleMethTypeProblem]("scala.reflect.runtime.Symbo ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps#SynchronizedBaseTypeSeq.lateMap") ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$exists") +ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$getFlag") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index eaf76f7a435f..18ee15084253 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -15,6 +15,7 @@ ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive. ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$LeakyEntry") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.exists") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.getFlag") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settings.isScala213") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 1ccd499f220c..9a0849cbde06 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -201,12 +201,18 @@ class Flags extends ModifierFlags { final val LateShift = 47 final val AntiShift = 56 + /** all of the flags that are unaffected by phase */ + // (-1L & ~LateFlags & ~AntiFlags & ~(LateFlags >>> LateShift) & ~(AntiFlags >>> AntiShift)) + // will revert to a formula before commit, but currently constant folder does not fold this to a constant + // but we need this to be a constant now for benchmarking + final val PhaseIndependentFlags = 0xF807FFFFFFFFFE08L + // Flags which sketchily share the same slot // 16: BYNAMEPARAM/M CAPTURED COVARIANT/M // 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL // 25: DEFAULTPARAM/M TRAIT/M // 35: EXISTENTIAL MIXEDIN - val OverloadedFlagsMask = 0L | BYNAMEPARAM | CONTRAVARIANT | DEFAULTPARAM | EXISTENTIAL + final val OverloadedFlagsMask = 0L | BYNAMEPARAM | CONTRAVARIANT | DEFAULTPARAM | EXISTENTIAL // ------- late flags (set by a transformer phase) --------------------------------- // diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 4a9d571e7ee9..82e339399bd5 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -732,9 +732,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => * we'd like to expose to reflection users. Therefore a proposed solution is to check whether we're in a * runtime reflection universe, and if yes and if we've not yet loaded the requested info, then to commence initialization. */ - final def getFlag(mask: Long): Long = { - if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize - flags & mask + def getFlag(mask: Long): Long = { + mask & (if ((mask & PhaseIndependentFlags) == mask) rawflags else flags) } /** Does symbol have ANY flag in `mask` set? */ final def hasFlag(mask: Long): Boolean = getFlag(mask) != 0 @@ -746,7 +745,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def setFlag(mask: Long): this.type = { _rawflags |= mask ; this } def resetFlag(mask: Long): this.type = { _rawflags &= ~mask ; this } - def resetFlags() { rawflags = 0 } + def resetFlags() { rawflags = 0L } /** Default implementation calls the generic string function, which * will print overloaded flags as . Subclasses diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index da34ff20048b..2bbb4cc959a1 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -125,6 +125,11 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb gilSynchronized { body } } + override final def getFlag(mask: Long): Long = { + if (!isCompilerUniverse && !isThreadsafe(purpose = FlagOps(mask))) initialize + super.getFlag(mask) + } + override def validTo = gilSynchronizedIfNotThreadsafe { super.validTo } override def info = gilSynchronizedIfNotThreadsafe { super.info } override def rawInfo: Type = gilSynchronizedIfNotThreadsafe { super.rawInfo } From 4e246a50f13169d363987084ccd212a6c2e2427b Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Mon, 26 Mar 2018 21:28:32 +0100 Subject: [PATCH 1354/2793] use inheritance to avoid initialisation checks for privateWithin avoid repeated calls to privateWithin from the same method adjust mima filters --- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 5 +++-- src/reflect/mima-filters/2.12.0.backwards.excludes | 1 + src/reflect/mima-filters/2.12.0.forwards.excludes | 1 + src/reflect/scala/reflect/internal/Symbols.scala | 12 +++++++----- .../scala/reflect/runtime/SynchronizedSymbols.scala | 6 ++++++ 5 files changed, 18 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index edd95007c604..6ffd8820192d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -225,11 +225,12 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT checkCompanionNameClashes(sym) val decls = sym.info.decls for (s <- decls) { - if (s.privateWithin.isClass && !s.isProtected && !s.privateWithin.isModuleClass && + val privateWithin = s.privateWithin + if (privateWithin.isClass && !s.isProtected && !privateWithin.isModuleClass && !s.hasFlag(EXPANDEDNAME) && !s.isConstructor) { val savedName = s.name decls.unlink(s) - s.expandName(s.privateWithin) + s.expandName(privateWithin) decls.enter(s) log("Expanded '%s' to '%s' in %s".format(savedName, s.name, sym)) } diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index c8b8112caa95..45f5696a8087 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -7,6 +7,7 @@ ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Synchr ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$exists") ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$getFlag") +ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$privateWithin") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index 18ee15084253..8e5c6d7b62e9 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -16,6 +16,7 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$Lea ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.exists") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.getFlag") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.privateWithin") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settings.isScala213") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 82e339399bd5..da7adf9d2163 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1421,15 +1421,16 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ private[this] var _privateWithin: Symbol = _ def privateWithin = { - // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. - if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize _privateWithin } def privateWithin_=(sym: Symbol) { _privateWithin = sym } def setPrivateWithin(sym: Symbol): this.type = { privateWithin_=(sym) ; this } /** Does symbol have a private or protected qualifier set? */ - final def hasAccessBoundary = (privateWithin != null) && (privateWithin != NoSymbol) + final def hasAccessBoundary = { + val pw = privateWithin + (pw ne null) && (pw ne NoSymbol) + } // ------ info and type ------------------------------------------------------------------- @@ -2476,8 +2477,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ final def caseModule: Symbol = { var modname = name.toTermName - if (privateWithin.isClass && !privateWithin.isModuleClass && !hasFlag(EXPANDEDNAME)) - modname = nme.expandedName(modname, privateWithin) + val pw = privateWithin + if (pw.isClass && !pw.isModuleClass && !hasFlag(EXPANDEDNAME)) + modname = nme.expandedName(modname, pw) initialize.owner.info.decl(modname).suchThat(_.isModule) } diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index 2bbb4cc959a1..a8416758ee5c 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -92,6 +92,12 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb else purpose.isFlagRelated && (_initializationMask & purpose.mask & TopLevelPickledFlags) == 0 } + override final def privateWithin: Symbol = { + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize + super.privateWithin + } + /** Communicates with completers declared in scala.reflect.runtime.SymbolLoaders * about the status of initialization of the underlying symbol. * From 0d3e383bbb844ca5c8431095e4504a444480ef76 Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Mon, 2 Apr 2018 15:46:19 +0100 Subject: [PATCH 1355/2793] use inheritance to avoid initialisation checks avoid varags in helper methods to reduce memory pressure make annotation helper functions final where appropriate adjust mima filters --- .../mima-filters/2.12.0.backwards.excludes | 1 + .../mima-filters/2.12.0.forwards.excludes | 1 + .../scala/reflect/internal/Symbols.scala | 33 +++++++++++++++---- .../reflect/runtime/SynchronizedSymbols.scala | 6 ++++ 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index 45f5696a8087..6064fc88b800 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -8,6 +8,7 @@ ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Synchr ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$exists") ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$getFlag") ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$privateWithin") +ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.scala$reflect$runtime$SynchronizedSymbols$SynchronizedSymbol$$super$annotations") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index 8e5c6d7b62e9..fcac3f3749b7 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -17,6 +17,7 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$Lea ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.exists") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.getFlag") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.privateWithin") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol.annotations") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settings.isScala213") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index da7adf9d2163..9111bc650f80 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1842,8 +1842,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => * the annotations attached to member a definition (class, method, type, field). */ def annotations: List[AnnotationInfo] = { - // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. - if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize _annotations } @@ -1864,21 +1862,42 @@ trait Symbols extends api.Symbols { self: SymbolTable => def addAnnotation(annot: AnnotationInfo): this.type = setAnnotations(annot :: annotations) - // Convenience for the overwhelmingly common case - def addAnnotation(sym: Symbol, args: Tree*): this.type = { + // Convenience for the overwhelmingly common cases, and avoid varags and listbuilders + final def addAnnotation(sym: Symbol): this.type = { + addAnnotation(sym, Nil) + } + final def addAnnotation(sym: Symbol, arg: Tree): this.type = { + addAnnotation(sym, arg :: Nil) + } + final def addAnnotation(sym: Symbol, arg1: Tree, arg2: Tree): this.type = { + addAnnotation(sym, arg1 :: arg2 :: Nil) + } + final def addAnnotation(sym: Symbol, args: Tree*): this.type = { + addAnnotation(sym, args.toList) + } + final def addAnnotation(sym: Symbol, args: List[Tree]): this.type = { // The assertion below is meant to prevent from issues like scala/bug#7009 but it's disabled // due to problems with cycles while compiling Scala library. It's rather shocking that // just checking if sym is monomorphic type introduces nasty cycles. We are definitively // forcing too much because monomorphism is a local property of a type that can be checked // syntactically // assert(sym.initialize.isMonomorphicType, sym) - addAnnotation(AnnotationInfo(sym.tpe, args.toList, Nil)) + addAnnotation(AnnotationInfo(sym.tpe, args, Nil)) } /** Use that variant if you want to pass (for example) an applied type */ - def addAnnotation(tp: Type, args: Tree*): this.type = { + final def addAnnotation(tp: Type): this.type = { + addAnnotation(tp, Nil) + } + final def addAnnotation(tp: Type, arg: Tree): this.type = { + addAnnotation(tp, arg:: Nil) + } + final def addAnnotation(tp: Type, arg1: Tree, arg2: Tree): this.type = { + addAnnotation(tp, arg1 :: arg2 :: Nil) + } + final def addAnnotation(tp: Type, args: List[Tree]): this.type = { assert(tp.typeParams.isEmpty, tp) - addAnnotation(AnnotationInfo(tp, args.toList, Nil)) + addAnnotation(AnnotationInfo(tp, args, Nil)) } // ------ comparisons ---------------------------------------------------------------- diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index a8416758ee5c..aa9aab93d52d 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -98,6 +98,12 @@ private[reflect] trait SynchronizedSymbols extends internal.Symbols { self: Symb super.privateWithin } + override def annotations: List[AnnotationInfo] = { + // See `getFlag` to learn more about the `isThreadsafe` call in the body of this method. + if (!isCompilerUniverse && !isThreadsafe(purpose = AllOps)) initialize + super.annotations + } + /** Communicates with completers declared in scala.reflect.runtime.SymbolLoaders * about the status of initialization of the underlying symbol. * From da62c70b1c3fd946e61e017a2d364a31921561a7 Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Wed, 4 Apr 2018 01:56:20 +0100 Subject: [PATCH 1356/2793] reduce number of getFlag and related calls, combine call to check multiple flags in a single call where applicable --- src/compiler/scala/tools/nsc/transform/Fields.scala | 9 ++++----- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 4 ++-- .../scala/reflect/internal/ReificationSupport.scala | 6 +++--- src/reflect/scala/reflect/internal/Symbols.scala | 6 +++--- 4 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index c07d6b954db0..029b7b951b4d 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -125,11 +125,10 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor def checkAndClearOverriddenTraitSetter(setter: Symbol) = checkAndClear(OVERRIDDEN_TRAIT_SETTER)(setter) def checkAndClearNeedsTrees(setter: Symbol) = checkAndClear(NEEDS_TREES)(setter) def checkAndClear(flag: Long)(sym: Symbol) = - sym.hasFlag(flag) match { - case overridden => - sym resetFlag flag - overridden - } + if (sym.hasFlag(flag)) { + sym resetFlag flag + true + } else false private def isOverriddenAccessor(member: Symbol, site: Symbol): Boolean = { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 6ffd8820192d..6ba13fd56b70 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -226,8 +226,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val decls = sym.info.decls for (s <- decls) { val privateWithin = s.privateWithin - if (privateWithin.isClass && !s.isProtected && !privateWithin.isModuleClass && - !s.hasFlag(EXPANDEDNAME) && !s.isConstructor) { + if (privateWithin.isClass && !s.hasFlag(EXPANDEDNAME | PROTECTED) && !privateWithin.isModuleClass && + !s.isConstructor) { val savedName = s.name decls.unlink(s) s.expandName(privateWithin) diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index e8c117c80852..28b01eb59906 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -666,7 +666,7 @@ trait ReificationSupport { self: SymbolTable => def transformStats(trees: List[Tree]): List[Tree] = trees match { case Nil => Nil case ValDef(mods, _, SyntacticEmptyTypeTree(), Match(MaybeTyped(MaybeUnchecked(value), tpt), CaseDef(pat, EmptyTree, SyntacticTuple(ids)) :: Nil)) :: tail - if mods.hasFlag(SYNTHETIC) && mods.hasFlag(ARTIFACT) => + if mods.hasAllFlags(SYNTHETIC | ARTIFACT) => ids match { case Nil => ValDef(NoMods, nme.QUASIQUOTE_PAT_DEF, Typed(pat, tpt), transform(value)) :: transformStats(tail) @@ -704,7 +704,7 @@ trait ReificationSupport { self: SymbolTable => protected object UnSyntheticParam { def unapply(tree: Tree): Option[TermName] = tree match { case ValDef(mods, name, _, EmptyTree) - if mods.hasFlag(SYNTHETIC) && mods.hasFlag(PARAM) => + if mods.hasAllFlags(SYNTHETIC | PARAM) => Some(name) case _ => None } @@ -899,7 +899,7 @@ trait ReificationSupport { self: SymbolTable => if pf.tpe != null && pf.tpe.typeSymbol.eq(PartialFunctionClass) && abspf.tpe != null && abspf.tpe.typeSymbol.eq(AbstractPartialFunctionClass) && ser.tpe != null && ser.tpe.typeSymbol.eq(SerializableClass) && - clsMods.hasFlag(FINAL) && clsMods.hasFlag(SYNTHETIC) => + clsMods.hasAllFlags(FINAL | SYNTHETIC) => Some(cases) case _ => None } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 9111bc650f80..c5cee9c72398 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -104,7 +104,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // `isByNameParam` is only true for a call-by-name parameter of a *method*, // an argument of the primary constructor seen in the class body is excluded by `isValueParameter` def isByNameParam: Boolean = this.isValueParameter && (this hasFlag BYNAMEPARAM) - def isImplementationArtifact: Boolean = (this hasFlag BRIDGE) || (this hasFlag VBRIDGE) || (this hasFlag ARTIFACT) + def isImplementationArtifact: Boolean = this hasFlag (BRIDGE | VBRIDGE | ARTIFACT) def isJava: Boolean = isJavaDefined def isField: Boolean = isTerm && !isModule && (!isMethod || owner.isTrait && isAccessor) @@ -113,8 +113,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => def isVar: Boolean = isField && !isLazy && isMutableVal def isAbstract: Boolean = isAbstractClass || isDeferred || isAbstractType - def isPrivateThis = (this hasFlag PRIVATE) && (this hasFlag LOCAL) - def isProtectedThis = (this hasFlag PROTECTED) && (this hasFlag LOCAL) + def isPrivateThis = this hasAllFlags (PRIVATE | LOCAL) + def isProtectedThis = this hasAllFlags (PROTECTED | LOCAL) def isJavaEnum: Boolean = hasJavaEnumFlag def isJavaAnnotation: Boolean = hasJavaAnnotationFlag From 688e7cbc92bce5f3649a4e69d1ed7ca443fd679d Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Wed, 16 May 2018 22:00:21 +0100 Subject: [PATCH 1357/2793] add an assertion for PhaseIndependentFlags --- src/reflect/scala/reflect/internal/Flags.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 9a0849cbde06..77b733098d77 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -202,10 +202,12 @@ class Flags extends ModifierFlags { final val AntiShift = 56 /** all of the flags that are unaffected by phase */ - // (-1L & ~LateFlags & ~AntiFlags & ~(LateFlags >>> LateShift) & ~(AntiFlags >>> AntiShift)) - // will revert to a formula before commit, but currently constant folder does not fold this to a constant - // but we need this to be a constant now for benchmarking final val PhaseIndependentFlags = 0xF807FFFFFFFFFE08L + //this should be + // final val PhaseIndependentFlags = (-1L & ~LateFlags & ~AntiFlags & ~(LateFlags >>> LateShift) & ~(AntiFlags >>> AntiShift))) + // but the constant folder doesnt optimise this! Good news is that is expected to be fixed soon :-) + assert (PhaseIndependentFlags == (-1L & ~LateFlags & ~AntiFlags & ~(LateFlags >>> LateShift) & ~(AntiFlags >>> AntiShift))) + // Flags which sketchily share the same slot // 16: BYNAMEPARAM/M CAPTURED COVARIANT/M From 3d27db33c184a67e20c4881252e6ac32d0b3621d Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 16 May 2018 07:11:18 -0400 Subject: [PATCH 1358/2793] Setter rewrite uses symbol's name .. not the name that was given, because it may have been introduced by a renaming import. Fixes scala/bug#10886. --- .../scala/tools/nsc/typechecker/Typers.scala | 6 +++--- test/files/neg/t10886.check | 15 +++++++++++++++ test/files/neg/t10886.scala | 13 +++++++++++++ test/files/pos/t10886.scala | 14 ++++++++++++++ 4 files changed, 45 insertions(+), 3 deletions(-) create mode 100644 test/files/neg/t10886.check create mode 100644 test/files/neg/t10886.scala create mode 100644 test/files/pos/t10886.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c8404236b57a..3f90ee9afcfd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4393,8 +4393,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (treeInfo.mayBeVarGetter(varsym)) { lhs1 match { - case treeInfo.Applied(Select(qual, name), _, _) => - val sel = Select(qual, name.setterName) setPos lhs.pos + case treeInfo.Applied(Select(qual, _), _, _) => + val sel = Select(qual, varsym.name.setterName) setPos lhs.pos val app = Apply(sel, List(rhs)) setPos tree.pos return typed(app, mode, pt) @@ -4845,7 +4845,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case Select(qualqual, vname) => gen.evalOnce(qualqual, context.owner, context.unit) { qq => val qq1 = qq() - mkAssign(Select(qq1, vname) setPos qual.pos) + mkAssign(Select(qq1, qual.symbol) setPos qual.pos) } case Apply(fn, extra) if qual.isInstanceOf[ApplyToImplicitArgs] => diff --git a/test/files/neg/t10886.check b/test/files/neg/t10886.check new file mode 100644 index 000000000000..824f80b8713d --- /dev/null +++ b/test/files/neg/t10886.check @@ -0,0 +1,15 @@ +t10886.scala:9: error: reassignment to val + y = 1 + ^ +t10886.scala:10: error: value ~~_= is not a member of object Test.A + !! = 2 + ^ +t10886.scala:11: error: value += is not a member of Int + Expression does not convert to assignment because receiver is not assignable. + y += 3 + ^ +t10886.scala:12: error: value -= is not a member of Int + Expression does not convert to assignment because receiver is not assignable. + !! -= 4 + ^ +four errors found diff --git a/test/files/neg/t10886.scala b/test/files/neg/t10886.scala new file mode 100644 index 000000000000..fc660adf6af6 --- /dev/null +++ b/test/files/neg/t10886.scala @@ -0,0 +1,13 @@ +object Test { + object A { + val x: Int = 0 + def ~~ : Int = 0 + } + + import A.{x => y, ~~ => !!} + + y = 1 + !! = 2 + y += 3 + !! -= 4 +} diff --git a/test/files/pos/t10886.scala b/test/files/pos/t10886.scala new file mode 100644 index 000000000000..554714e0a3d8 --- /dev/null +++ b/test/files/pos/t10886.scala @@ -0,0 +1,14 @@ +object Test { + object A { + var x: Int = 0 + var ~~ : Int = 0 + } + + import A.{x => y, ~~ => !!} + + y = 1 + !! = 2 + y += 3 + !! -= 4 +} + From 390552d379a6bc34cf770cc164ba46bf17c08f67 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 21 May 2018 23:19:41 -0700 Subject: [PATCH 1359/2793] Avoid extra hasNext in trailing Once trailing was advanced, hasNext was needlessly delegated to the underlying iterator on every invocation. --- src/library/scala/collection/Iterator.scala | 39 +++++++++---------- .../junit/scala/collection/IteratorTest.scala | 24 ++++++++++++ 2 files changed, 42 insertions(+), 21 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 3e865e851273..f3293301fd90 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -290,8 +290,6 @@ object Iterator { } } -import Iterator.empty - /** Iterators are data structures that allow to iterate over a sequence * of elements. They have a `hasNext` method for checking * if there is a next element available, and a `next` method @@ -357,6 +355,8 @@ import Iterator.empty trait Iterator[+A] extends TraversableOnce[A] { self => + import Iterator.empty + def seq: Iterator[A] = this /** Tests whether this iterator can provide another element. @@ -760,34 +760,31 @@ trait Iterator[+A] extends TraversableOnce[A] { * -1 not yet accessed * 0 single element waiting in leading * 1 defer to self + * 2 self.hasNext already + * 3 exhausted */ private[this] var status = -1 - def hasNext = { - if (status > 0) self.hasNext - else { - if (status == 0) true - else if (myLeading.finish()) { - status = 0 - true - } - else { - status = 1 - myLeading = null - self.hasNext - } - } + def hasNext = status match { + case 3 => false + case 2 => true + case 1 => if (self.hasNext) { status = 2 ; true } else { status = 3 ; false } + case 0 => true + case _ => + if (myLeading.finish()) { status = 0 ; true } else { status = 1 ; myLeading = null ; hasNext } } def next() = { if (hasNext) { - if (status > 0) self.next() - else { + if (status == 0) { status = 1 - val ans = myLeading.trailer + val res = myLeading.trailer myLeading = null - ans + res + } else { + status = 1 + self.next() } } - else Iterator.empty.next() + else empty.next() } override def toString = "unknown-if-empty iterator" diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 37b5092cb590..191db83c3f57 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -325,4 +325,28 @@ class IteratorTest { assertSameElements(List(10,11,13), scan) assertSameElements(List(10,-1,-1,-11,11,-2,-2,-13,13,-3), results) } + @Test def `scan trailing avoids extra hasNext`(): Unit = { + val it = new AbstractIterator[Int] { + var i = 0 + var checkedAt = -1 + def hasNext = + if (checkedAt == i) false + else { + checkedAt = i + true + } + def next() = { + i += 1 + i + } + } + val (lo, hi) = it.span(_ < 3) + assertTrue(lo.hasNext) + assertEquals(1, lo.next()) + assertTrue(hi.hasNext) + assertEquals(3, hi.next()) + assertTrue(hi.hasNext) + assertTrue(hi.hasNext) // no longer delegated + assertTrue(hi.hasNext) + } } From 8d392b3d3b5a5b33755265ce74d73916729cb5e9 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 22 May 2018 10:43:46 -0700 Subject: [PATCH 1360/2793] Avoid side-effects in Iterator.toString Don't probe hasNext in Iterator.toString. --- src/library/scala/collection/Iterator.scala | 6 +-- test/files/run/t4671.check | 4 +- test/files/run/t8690.check | 2 +- test/files/run/view-iterator-stream.check | 48 ++++++++++----------- 4 files changed, 29 insertions(+), 31 deletions(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index f3293301fd90..ff707273dd96 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -786,8 +786,6 @@ trait Iterator[+A] extends TraversableOnce[A] { } else empty.next() } - - override def toString = "unknown-if-empty iterator" } (leading, trailing) @@ -1418,11 +1416,11 @@ trait Iterator[+A] extends TraversableOnce[A] { /** Converts this iterator to a string. * - * @return `"empty iterator"` or `"non-empty iterator"`, depending on + * @return `""` * whether or not the iterator is empty. * @note Reuse: $preservesIterator */ - override def toString = (if (hasNext) "non-empty" else "empty")+" iterator" + override def toString = "" } /** Explicit instantiation of the `Iterator` trait to reduce class file size in subclasses. */ diff --git a/test/files/run/t4671.check b/test/files/run/t4671.check index b267befee997..b6d050c2cd5f 100644 --- a/test/files/run/t4671.check +++ b/test/files/run/t4671.check @@ -3,7 +3,7 @@ scala> object o { val file = sys.props("partest.cwd") + "/t4671.scala" } defined object o scala> val s = scala.io.Source.fromFile(o.file) -s: scala.io.BufferedSource = non-empty iterator +s: scala.io.BufferedSource = scala> println(s.getLines.mkString("\n")) import scala.tools.partest.ReplTest @@ -23,7 +23,7 @@ println(s.mkString("")) scala> scala> val s = scala.io.Source.fromFile(o.file) -s: scala.io.BufferedSource = non-empty iterator +s: scala.io.BufferedSource = scala> println(s.mkString("")) import scala.tools.partest.ReplTest diff --git a/test/files/run/t8690.check b/test/files/run/t8690.check index 72f076c4d88e..d37e36a53853 100644 --- a/test/files/run/t8690.check +++ b/test/files/run/t8690.check @@ -1,2 +1,2 @@ -non-empty iterator + abcdef diff --git a/test/files/run/view-iterator-stream.check b/test/files/run/view-iterator-stream.check index 2da02c865c89..39de54a67e23 100644 --- a/test/files/run/view-iterator-stream.check +++ b/test/files/run/view-iterator-stream.check @@ -4,9 +4,9 @@ ------------------- toIndexedSeq -> toIterator -> toStream Stream(22, ?) 22 23 24 25 toIndexedSeq -> toIterator -> view StreamView(...) 22 23 24 25 -toIndexedSeq -> toStream -> toIterator non-empty iterator 22 23 24 25 +toIndexedSeq -> toStream -> toIterator 22 23 24 25 toIndexedSeq -> toStream -> view StreamView(...) 22 23 24 25 -toIndexedSeq -> view -> toIterator non-empty iterator 22 23 24 25 +toIndexedSeq -> view -> toIterator 22 23 24 25 toIndexedSeq -> view -> toStream Stream(22, ?) 22 23 24 25 toIterator -> toIndexedSeq -> toStream Stream(22, ?) 22 23 24 25 toIterator -> toIndexedSeq -> view SeqView(...) 22 23 24 25 @@ -14,27 +14,27 @@ toIterator -> toStream -> toIndexedSeq Vector(22, 23, 24, 25) 22 toIterator -> toStream -> view StreamView(...) 22 23 24 25 toIterator -> view -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 toIterator -> view -> toStream Stream(22, ?) 22 23 24 25 -toStream -> toIndexedSeq -> toIterator non-empty iterator 22 23 24 25 +toStream -> toIndexedSeq -> toIterator 22 23 24 25 toStream -> toIndexedSeq -> view SeqView(...) 22 23 24 25 toStream -> toIterator -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 toStream -> toIterator -> view StreamView(...) 22 23 24 25 toStream -> view -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -toStream -> view -> toIterator non-empty iterator 22 23 24 25 -view -> toIndexedSeq -> toIterator non-empty iterator 22 23 24 25 +toStream -> view -> toIterator 22 23 24 25 +view -> toIndexedSeq -> toIterator 22 23 24 25 view -> toIndexedSeq -> toStream Stream(22, ?) 22 23 24 25 view -> toIterator -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 view -> toIterator -> toStream Stream(22, ?) 22 23 24 25 view -> toStream -> toIndexedSeq Vector(22, 23, 24, 25) 22 23 24 25 -view -> toStream -> toIterator non-empty iterator 22 23 24 25 +view -> toStream -> toIterator 22 23 24 25 ** take 20 -> drop 10 -> slice(1, 5) ** ------------------- toIndexedSeq -> toIterator -> toStream Stream(12, ?) 12 13 14 15 toIndexedSeq -> toIterator -> view StreamView(...) 12 13 14 15 -toIndexedSeq -> toStream -> toIterator non-empty iterator 12 13 14 15 +toIndexedSeq -> toStream -> toIterator 12 13 14 15 toIndexedSeq -> toStream -> view StreamView(...) 12 13 14 15 -toIndexedSeq -> view -> toIterator non-empty iterator 12 13 14 15 +toIndexedSeq -> view -> toIterator 12 13 14 15 toIndexedSeq -> view -> toStream Stream(12, ?) 12 13 14 15 toIterator -> toIndexedSeq -> toStream Stream(12, ?) 12 13 14 15 toIterator -> toIndexedSeq -> view SeqView(...) 12 13 14 15 @@ -42,27 +42,27 @@ toIterator -> toStream -> toIndexedSeq Vector(12, 13, 14, 15) 12 toIterator -> toStream -> view StreamView(...) 12 13 14 15 toIterator -> view -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 toIterator -> view -> toStream Stream(12, ?) 12 13 14 15 -toStream -> toIndexedSeq -> toIterator non-empty iterator 12 13 14 15 +toStream -> toIndexedSeq -> toIterator 12 13 14 15 toStream -> toIndexedSeq -> view SeqView(...) 12 13 14 15 toStream -> toIterator -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 toStream -> toIterator -> view StreamView(...) 12 13 14 15 toStream -> view -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -toStream -> view -> toIterator non-empty iterator 12 13 14 15 -view -> toIndexedSeq -> toIterator non-empty iterator 12 13 14 15 +toStream -> view -> toIterator 12 13 14 15 +view -> toIndexedSeq -> toIterator 12 13 14 15 view -> toIndexedSeq -> toStream Stream(12, ?) 12 13 14 15 view -> toIterator -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 view -> toIterator -> toStream Stream(12, ?) 12 13 14 15 view -> toStream -> toIndexedSeq Vector(12, 13, 14, 15) 12 13 14 15 -view -> toStream -> toIterator non-empty iterator 12 13 14 15 +view -> toStream -> toIterator 12 13 14 15 ** slice(20, 40) -> drop 10 -> take 5 ** ------------------- toIndexedSeq -> toIterator -> toStream Stream(31, ?) 31 32 33 34 35 toIndexedSeq -> toIterator -> view StreamView(...) 31 32 33 34 35 -toIndexedSeq -> toStream -> toIterator non-empty iterator 31 32 33 34 35 +toIndexedSeq -> toStream -> toIterator 31 32 33 34 35 toIndexedSeq -> toStream -> view StreamView(...) 31 32 33 34 35 -toIndexedSeq -> view -> toIterator non-empty iterator 31 32 33 34 35 +toIndexedSeq -> view -> toIterator 31 32 33 34 35 toIndexedSeq -> view -> toStream Stream(31, ?) 31 32 33 34 35 toIterator -> toIndexedSeq -> toStream Stream(31, ?) 31 32 33 34 35 toIterator -> toIndexedSeq -> view SeqView(...) 31 32 33 34 35 @@ -70,27 +70,27 @@ toIterator -> toStream -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 toIterator -> toStream -> view StreamView(...) 31 32 33 34 35 toIterator -> view -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 toIterator -> view -> toStream Stream(31, ?) 31 32 33 34 35 -toStream -> toIndexedSeq -> toIterator non-empty iterator 31 32 33 34 35 +toStream -> toIndexedSeq -> toIterator 31 32 33 34 35 toStream -> toIndexedSeq -> view SeqView(...) 31 32 33 34 35 toStream -> toIterator -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 toStream -> toIterator -> view StreamView(...) 31 32 33 34 35 toStream -> view -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -toStream -> view -> toIterator non-empty iterator 31 32 33 34 35 -view -> toIndexedSeq -> toIterator non-empty iterator 31 32 33 34 35 +toStream -> view -> toIterator 31 32 33 34 35 +view -> toIndexedSeq -> toIterator 31 32 33 34 35 view -> toIndexedSeq -> toStream Stream(31, ?) 31 32 33 34 35 view -> toIterator -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 view -> toIterator -> toStream Stream(31, ?) 31 32 33 34 35 view -> toStream -> toIndexedSeq Vector(31, 32, 33, 34, 35) 31 32 33 34 35 -view -> toStream -> toIterator non-empty iterator 31 32 33 34 35 +view -> toStream -> toIterator 31 32 33 34 35 ** slice(20, 40) -> take 10 -> drop 5 ** ------------------- toIndexedSeq -> toIterator -> toStream Stream(26, ?) 26 27 28 29 30 toIndexedSeq -> toIterator -> view StreamView(...) 26 27 28 29 30 -toIndexedSeq -> toStream -> toIterator non-empty iterator 26 27 28 29 30 +toIndexedSeq -> toStream -> toIterator 26 27 28 29 30 toIndexedSeq -> toStream -> view StreamView(...) 26 27 28 29 30 -toIndexedSeq -> view -> toIterator non-empty iterator 26 27 28 29 30 +toIndexedSeq -> view -> toIterator 26 27 28 29 30 toIndexedSeq -> view -> toStream Stream(26, ?) 26 27 28 29 30 toIterator -> toIndexedSeq -> toStream Stream(26, ?) 26 27 28 29 30 toIterator -> toIndexedSeq -> view SeqView(...) 26 27 28 29 30 @@ -98,15 +98,15 @@ toIterator -> toStream -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 toIterator -> toStream -> view StreamView(...) 26 27 28 29 30 toIterator -> view -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 toIterator -> view -> toStream Stream(26, ?) 26 27 28 29 30 -toStream -> toIndexedSeq -> toIterator non-empty iterator 26 27 28 29 30 +toStream -> toIndexedSeq -> toIterator 26 27 28 29 30 toStream -> toIndexedSeq -> view SeqView(...) 26 27 28 29 30 toStream -> toIterator -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 toStream -> toIterator -> view StreamView(...) 26 27 28 29 30 toStream -> view -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -toStream -> view -> toIterator non-empty iterator 26 27 28 29 30 -view -> toIndexedSeq -> toIterator non-empty iterator 26 27 28 29 30 +toStream -> view -> toIterator 26 27 28 29 30 +view -> toIndexedSeq -> toIterator 26 27 28 29 30 view -> toIndexedSeq -> toStream Stream(26, ?) 26 27 28 29 30 view -> toIterator -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 view -> toIterator -> toStream Stream(26, ?) 26 27 28 29 30 view -> toStream -> toIndexedSeq Vector(26, 27, 28, 29, 30) 26 27 28 29 30 -view -> toStream -> toIterator non-empty iterator 26 27 28 29 30 +view -> toStream -> toIterator 26 27 28 29 30 From 537a9f5830d34ec7d2d5e5ba068c14ab90af149c Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 22 May 2018 17:39:02 -0400 Subject: [PATCH 1361/2793] Outer parameters have the ACC_SYNTHETIC flag in bytecode. Apparently the generic signature for constructors is not expected to mention the outer accessor, but the descriptor obviously must. This discrepancy must be handled by the Java reflection method `Parameter#getParameterizedType`, which knows to ignore synthetic (or "mandated") method parameters that it sees in the descriptor while parsing the signature. This relies heavily on the `MethodParameters` classfile attribute, and experimentation shows that stripping that information from the classfile causes `getParameterizedType` to report only the erased types that it sees in the descriptor. Javac, with `-parameters`, emits the outer accessor with the `ACC_MANDATED` flag, which we don't emit (and doesn't appear to be a public API yet). However, it interprets `ACC_SYNTHETIC` in the same way, and we do emit that (now). This should be a one-liner, but GenBCode reads the parameter symbols off the `DefDef`, not the method's symbol's info. This shouldn't matter, but I did notice that we make another, *different* symbol for the parameter to use in the method's info. (It's also got a different name: `arg$outer` rather than `$outer`.) To be safe, I marked them both `ARTIFACT`. Fixes scala/bug#10880. --- .../tools/nsc/transform/ExplicitOuter.scala | 4 ++-- test/files/jvm/t10880.check | 2 ++ test/files/jvm/t10880.scala | 18 ++++++++++++++++++ 3 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 test/files/jvm/t10880.check create mode 100644 test/files/jvm/t10880.scala diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 94dcb8405f3b..85a6fa220088 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -160,7 +160,7 @@ abstract class ExplicitOuter extends InfoTransform val paramsWithOuter = if (sym.isClassConstructor && isInner(sym.owner)) // 1 - sym.newValueParameter(nme.OUTER_ARG, sym.pos).setInfo(sym.owner.outerClass.thisType) :: params + sym.newValueParameter(nme.OUTER_ARG, sym.pos, ARTIFACT).setInfo(sym.owner.outerClass.thisType) :: params else params if ((resTpTransformed ne resTp) || (paramsWithOuter ne params)) MethodType(paramsWithOuter, resTpTransformed) @@ -399,7 +399,7 @@ abstract class ExplicitOuter extends InfoTransform reporter.error(tree.pos, s"Implementation restriction: ${clazz.fullLocationString} requires premature access to ${clazz.outerClass}.") } val outerParam = - sym.newValueParameter(nme.OUTER, sym.pos) setInfo clazz.outerClass.thisType + sym.newValueParameter(nme.OUTER, sym.pos, ARTIFACT) setInfo clazz.outerClass.thisType ((ValDef(outerParam) setType NoType) :: vparamss.head) :: vparamss.tail } else vparamss super.transform(copyDefDef(tree)(vparamss = vparamss1)) diff --git a/test/files/jvm/t10880.check b/test/files/jvm/t10880.check new file mode 100644 index 000000000000..87f09c43838a --- /dev/null +++ b/test/files/jvm/t10880.check @@ -0,0 +1,2 @@ +List(class Provides, Provides) +List(Provides) diff --git a/test/files/jvm/t10880.scala b/test/files/jvm/t10880.scala new file mode 100644 index 000000000000..6edc0a62dcce --- /dev/null +++ b/test/files/jvm/t10880.scala @@ -0,0 +1,18 @@ +trait Provider[T] { + def provide: T +} + +class Provides[T] { + def provide(t: T): Provider[T] = new Provider[T] { def provide = t } +} + +object Test extends App { + + val ctor = Class.forName("Provides$$anon$1") + .getDeclaredConstructors + .head + + println(ctor.getParameters.map(_.getParameterizedType).toList) + println(ctor.getGenericParameterTypes.toList) + +} \ No newline at end of file From 427c09cfab0e6d600b05e4ca8a2cbe66fce2bb74 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Mon, 21 May 2018 14:22:15 -0700 Subject: [PATCH 1362/2793] More noise when going silent Report `silent` status with an interactive reader or in friendly verbose `info` mode. ``` $ skala Welcome to Scala 2.12.7 (OpenJDK 64-Bit Server VM 1.8.0_171) scala> :load sc.sc Loading sc.sc... res0: String = hello res2: String = goodbye scala> :quit $ skala -Dscala.repl.info Welcome to Scala 2.12.7 (OpenJDK 64-Bit Server VM 1.8.0_171) [info] started at Mon May 21 14:33:00 PDT 2018 scala 2.12.7-20180521-212215-2f5c49c> :load sc.sc Loading sc.sc... res0: String = hello Result printing is off. Result printing is on. res2: String = goodbye scala 2.12.7-20180521-212215-2f5c49c> :q amarki@amarki-462836:~/projects/scala$ cat sc.sc "hello" :silent "ha, no way" :silent "goodbye" ``` --- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 2 +- test/files/run/t6507.check | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 5ea22049c530..804915dd7a91 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -742,7 +742,7 @@ class ILoop(in0: Option[BufferedReader], protected val out: JPrintWriter) extend def verbosity() = { intp.printResults = !intp.printResults - replinfo(s"Result printing is ${ if (intp.printResults) "on" else "off" }.") + if (in.interactive || isReplInfo) echo(s"Result printing is ${ if (intp.printResults) "on" else "off" }.") } /** Run one command submitted by the user. Two values are returned: diff --git a/test/files/run/t6507.check b/test/files/run/t6507.check index 75cf39230422..03a9f2d49e48 100644 --- a/test/files/run/t6507.check +++ b/test/files/run/t6507.check @@ -1,5 +1,6 @@ scala> :silent +Result printing is off. scala> class A { override def toString() = { println("!"); "A" } } @@ -12,6 +13,7 @@ scala> b = new A scala> new A scala> :silent +Result printing is on. scala> res0 ! From 7e13e4721922224fd0f4fd0aa0785059024ead28 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Mar 2018 10:45:49 +1000 Subject: [PATCH 1363/2793] Honour Thread.interrupt detected by NIO operations NIO file operations check for thread interruption and throw an exception. If we see one of these, all subsequent operations will fail in the same way, so we should fail fast. Since the change to use NIO for file writing, and prior to this change, hitting CTRL-C during `sbt compile` would spew out a stack trace for each classfile. --- .../scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala | 3 +++ src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala | 2 ++ 2 files changed, 5 insertions(+) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index c4350e2ca052..b9d5a98658a2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -1,6 +1,7 @@ package scala.tools.nsc package backend.jvm +import java.nio.channels.ClosedByInterruptException import java.nio.file.Path import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy import java.util.concurrent._ @@ -153,6 +154,8 @@ private[jvm] object GeneratedClassHandler { // We know the future is complete, throw the exception if it completed with a failure unitInPostProcess.task.value.get.get } catch { + case _: ClosedByInterruptException => throw new InterruptedException() + case ex: InterruptedException => throw ex case NonFatal(t) => t.printStackTrace() frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.paths.sourceFile} $t") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index c4f8233de09c..78b65dedcc03 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -1,6 +1,7 @@ package scala.tools.nsc package backend.jvm +import java.nio.channels.ClosedByInterruptException import java.util.concurrent.ConcurrentHashMap import scala.reflect.internal.util.{NoPosition, Position, StringContextStripMarginOps} @@ -63,6 +64,7 @@ abstract class PostProcessor extends PerRunInit { backendReporting.error(NoPosition, s"Could not write class ${internalName} because it exceeds JVM code size limits. ${e.getMessage}") null + case ex: ClosedByInterruptException => throw new InterruptedException case ex: Throwable => ex.printStackTrace() backendReporting.error(NoPosition, s"Error while emitting ${internalName}\n${ex.getMessage}") From 8fe05cc85331fdaea9d43952c00232817ed8c163 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Mar 2018 15:35:32 +1000 Subject: [PATCH 1364/2793] Delete empty classfile if thread is interrupted during writing --- .../tools/nsc/backend/jvm/ClassfileWriters.scala | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 4d9b478c7dc4..639f79bd5c2c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -2,7 +2,7 @@ package scala.tools.nsc.backend.jvm import java.io.{BufferedOutputStream, DataOutputStream, FileOutputStream, IOException} import java.nio.ByteBuffer -import java.nio.channels.FileChannel +import java.nio.channels.{ClosedByInterruptException, FileChannel} import java.nio.charset.StandardCharsets import java.nio.file._ import java.nio.file.attribute.FileAttribute @@ -179,7 +179,17 @@ abstract class ClassfileWriters { case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) } - os.write(ByteBuffer.wrap(bytes), 0L) + try { + os.write(ByteBuffer.wrap(bytes), 0L) + } catch { + case ex: ClosedByInterruptException => + try { + Files.deleteIfExists(path) // don't leave a empty of half-written classfile around after an interrupt + } catch { + case _: Throwable => + } + throw ex + } os.close() } catch { case e: FileConflictException => From c3249a46eff5377f1d0172917eceedb8c7f371b6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Mar 2018 14:03:49 +1000 Subject: [PATCH 1365/2793] Honour interrupts in source file reading --- src/compiler/scala/tools/nsc/io/SourceReader.scala | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index 89964003ab24..5ac79f357b04 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -7,10 +7,11 @@ package scala.tools.nsc package io -import java.io.{ FileInputStream, IOException } +import java.io.{FileInputStream, IOException} import java.nio.{ByteBuffer, CharBuffer} -import java.nio.channels.{ ReadableByteChannel, Channels } +import java.nio.channels.{AsynchronousCloseException, Channels, ClosedByInterruptException, ReadableByteChannel} import java.nio.charset.{CharsetDecoder, CoderResult} + import scala.tools.nsc.reporters._ /** This class implements methods to read and decode source files. */ @@ -38,7 +39,11 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { val c = new FileInputStream(file).getChannel try read(c) - catch { case e: Exception => reportEncodingError("" + file, e) ; Array() } + catch { + case ex: InterruptedException => throw ex + case _: ClosedByInterruptException => throw new InterruptedException + case e: Exception => reportEncodingError("" + file, e) ; Array() + } finally c.close() } @@ -51,6 +56,8 @@ class SourceReader(decoder: CharsetDecoder, reporter: Reporter) { case _ => read(ByteBuffer.wrap(file.toByteArray)) } catch { + case ex: InterruptedException => throw ex + case _: ClosedByInterruptException => throw new InterruptedException case e: Exception => reportEncodingError("" + file, e) ; Array() } } From 0549be58619324ca0904ca486273da64be67631d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Mar 2018 10:11:24 +1000 Subject: [PATCH 1366/2793] Support cancellation by checking Thread.interrupted A common means of cancelling a task is to shutdown the thread pool executing it. That's what SBT's CTRL-C handler does, for example. Typically, thread pools call `Thread.interrupt()` to cooperatively stop the workload. We need to do our part by checking `interrupted()` from time to time, and translating this into an exception that will stop compilation. --- src/compiler/scala/tools/nsc/Global.scala | 5 ++++- src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala | 1 + src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala | 5 ++++- src/compiler/scala/tools/nsc/typechecker/Macros.scala | 1 + src/compiler/scala/tools/nsc/typechecker/Typers.scala | 1 + 5 files changed, 11 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 2638bd54ba65..a59f13c4faad 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -423,7 +423,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } } - final def applyPhase(unit: CompilationUnit) = withCurrentUnit(unit)(apply(unit)) + final def applyPhase(unit: CompilationUnit) = { + if (Thread.interrupted()) throw new InterruptedException + withCurrentUnit(unit)(apply(unit)) + } } // phaseName = "parser" diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 0b01bbaab6aa..8c186bb8e44f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -43,6 +43,7 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { generatedClasses += GeneratedClass(beanClassNode, fullSymbolName, position, isArtifact = true) } } catch { + case ex: InterruptedException => throw ex case ex: Throwable => ex.printStackTrace() error(s"Error while emitting ${unit.source}\n${ex.getMessage}") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 78b65dedcc03..4a3f4dab24ac 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -1,6 +1,7 @@ package scala.tools.nsc package backend.jvm +import java.nio.channels.ClosedByInterruptException import java.nio.channels.ClosedByInterruptException import java.util.concurrent.ConcurrentHashMap @@ -64,8 +65,10 @@ abstract class PostProcessor extends PerRunInit { backendReporting.error(NoPosition, s"Could not write class ${internalName} because it exceeds JVM code size limits. ${e.getMessage}") null - case ex: ClosedByInterruptException => throw new InterruptedException + case ex: InterruptedException => throw ex case ex: Throwable => + // TODO hide this stack trace behind -Ydebug? + // TODO fail fast rather than continuing to write the rest of the class files? ex.printStackTrace() backendReporting.error(NoPosition, s"Error while emitting ${internalName}\n${ex.getMessage}") null diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index faadf07235eb..4cb9c2ca39d7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -843,6 +843,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { if (openMacros.nonEmpty) popMacroContext() // weirdly we started popping on an empty stack when refactoring fatalWarnings logic val realex = ReflectionUtils.unwrapThrowable(ex) realex match { + case ex: InterruptedException => throw ex case ex: AbortMacroException => MacroGeneratedAbort(expandee, ex) case ex: ControlThrowable => throw ex case ex: TypeError => MacroGeneratedTypeError(expandee, ex) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c8404236b57a..28dbb46f4a05 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5615,6 +5615,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) + if (Thread.interrupted()) throw new InterruptedException try body finally if (statsEnabled) statistics.popTimer(byTypeStack, startByType) } From 4ac59fc15fda608eaed264b115bcf9aa7e3da15a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 3 Apr 2018 10:04:02 +1000 Subject: [PATCH 1367/2793] Combine thread interrupt handling with Reporter.cancelled Also remove the fine grained cancellation checking within typechecking a compilation unit. --- src/compiler/scala/tools/nsc/Global.scala | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a59f13c4faad..24a2831454c9 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -396,6 +396,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def cancelled(unit: CompilationUnit) = { // run the typer only if in `createJavadoc` mode val maxJavaPhase = if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id + if (Thread.interrupted()) reporter.cancelled = true reporter.cancelled || unit.isJava && this.id > maxJavaPhase } @@ -423,10 +424,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } } - final def applyPhase(unit: CompilationUnit) = { - if (Thread.interrupted()) throw new InterruptedException - withCurrentUnit(unit)(apply(unit)) - } + final def applyPhase(unit: CompilationUnit) = withCurrentUnit(unit)(apply(unit)) } // phaseName = "parser" @@ -1447,6 +1445,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val profileBefore=profiler.beforePhase(phase) try globalPhase.run() + catch { case _: InterruptedException => reporter.cancelled = true } finally if (timePhases) statistics.stopTimer(phaseTimer, startPhase) else () profiler.afterPhase(phase, profileBefore) From 80d6c3001db4aed98a02d82f5851e3f4575aa561 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 4 Apr 2018 09:03:35 +1000 Subject: [PATCH 1368/2793] Also convert interruption during source file reading into cancellation --- src/compiler/scala/tools/nsc/Global.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 24a2831454c9..3edac10cf026 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1525,7 +1525,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) profiler.afterPhase(Global.InitPhase, snap) compileSources(sources) } - catch { case ex: IOException => globalError(ex.getMessage()) } + catch { + case ex: InterruptedException => reporter.cancelled = true + case ex: IOException => globalError(ex.getMessage()) + } } /** Compile list of files given by their names */ From 1a3a0aaba1192e9a5149be30427a350da68339c3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 28 May 2018 14:10:20 +1000 Subject: [PATCH 1369/2793] Address review comments --- src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala | 2 +- .../scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala | 1 - src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 1 - 4 files changed, 2 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 8c186bb8e44f..743d3ebe8754 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -45,7 +45,7 @@ abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { } catch { case ex: InterruptedException => throw ex case ex: Throwable => - ex.printStackTrace() + if (settings.debug) ex.printStackTrace() error(s"Error while emitting ${unit.source}\n${ex.getMessage}") } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index b9d5a98658a2..a5284611dad7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -155,7 +155,6 @@ private[jvm] object GeneratedClassHandler { unitInPostProcess.task.value.get.get } catch { case _: ClosedByInterruptException => throw new InterruptedException() - case ex: InterruptedException => throw ex case NonFatal(t) => t.printStackTrace() frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.paths.sourceFile} $t") diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 4a3f4dab24ac..c3b249ad2b93 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -67,9 +67,8 @@ abstract class PostProcessor extends PerRunInit { null case ex: InterruptedException => throw ex case ex: Throwable => - // TODO hide this stack trace behind -Ydebug? // TODO fail fast rather than continuing to write the rest of the class files? - ex.printStackTrace() + if (frontendAccess.compilerSettings.debug) ex.printStackTrace() backendReporting.error(NoPosition, s"Error while emitting ${internalName}\n${ex.getMessage}") null } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 28dbb46f4a05..c8404236b57a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5615,7 +5615,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) - if (Thread.interrupted()) throw new InterruptedException try body finally if (statsEnabled) statistics.popTimer(byTypeStack, startByType) } From ebd1dd854ae41f68d534085eb9fa6fae4ea6d298 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 30 May 2018 12:15:57 +0200 Subject: [PATCH 1370/2793] Scaladoc: style elements with monospace font The 2.12 scaladoc redesign changed the css `` from monospace to bold. I wonder why? https://github.com/scala/scala/commit/260661d16afe2266aecf9980476e386003cd50d1#diff-73c862a5ee9e3b9afafaba1a5a42e62eR699 --- src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css index d5f89b15ac65..bb48b1a639c5 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css @@ -666,7 +666,7 @@ div#definition > h4#signature > span.modifier_kind > i.unfold-arrow, } .cmt code { - font-weight: bold; + font-family: "Source Code Pro", "Monaco", "Ubuntu Mono Regular", "Lucida Console", monospace; } .cmt a { From 8ae50c164565332d2059e2718a901dd4a591617b Mon Sep 17 00:00:00 2001 From: sh0hei Date: Sat, 3 Mar 2018 23:19:37 +0900 Subject: [PATCH 1371/2793] Deprecate bit shifting by Long value --- project/GenerateAnyVals.scala | 11 +++++++---- src/library/scala/Byte.scala | 3 +++ src/library/scala/Char.scala | 3 +++ src/library/scala/Function0.scala | 2 +- src/library/scala/Int.scala | 3 +++ src/library/scala/Short.scala | 3 +++ test/files/run/t9516.check | 1 + 7 files changed, 21 insertions(+), 5 deletions(-) create mode 100644 test/files/run/t9516.check diff --git a/project/GenerateAnyVals.scala b/project/GenerateAnyVals.scala index f349bfd16b96..b8078c607b6b 100644 --- a/project/GenerateAnyVals.scala +++ b/project/GenerateAnyVals.scala @@ -148,8 +148,10 @@ import scala.language.implicitConversions""" def mkUnaryOps = unaryOps map (x => "%s\n def unary_%s : %s".format(x.doc, x.op, this opType I)) def mkStringOps = List("def +(x: String): String") def mkShiftOps = ( - for (op <- shiftOps ; arg <- List(I, L)) yield - "%s\n def %s(x: %s): %s".format(op.doc, op.op, arg, this opType I) + for (op <- shiftOps ; arg <- List(I, L)) yield { + val doc = op.doc + (if (this == L || arg == I) "" else "\n @deprecated(\"shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.\", \"2.12.7\")") + "%s\n def %s(x: %s): %s".format(doc, op.op, arg, this opType I) + } ) def clumps: List[List[String]] = { @@ -232,6 +234,7 @@ import scala.language.implicitConversions""" "@unboxImpl@" -> "???" ) def interpolations = Map( + "@article@" -> (if (this == I) "an" else "a"), "@name@" -> name, "@representation@" -> representation, "@javaequiv@" -> javaEquiv, @@ -319,10 +322,10 @@ override def toString = "object scala.@name@" def nonUnitCompanions = "" // todo def cardinalCompanion = """ -/** The smallest value representable as a @name@. */ +/** The smallest value representable as @article@ @name@. */ final val MinValue = @boxed@.MIN_VALUE -/** The largest value representable as a @name@. */ +/** The largest value representable as @article@ @name@. */ final val MaxValue = @boxed@.MAX_VALUE """ diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala index 3709586f2ec2..9a51e9e45dad 100644 --- a/src/library/scala/Byte.scala +++ b/src/library/scala/Byte.scala @@ -55,6 +55,7 @@ final abstract class Byte private extends AnyVal { * filling in the new right bits with zeroes. * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def <<(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -77,6 +78,7 @@ final abstract class Byte private extends AnyVal { * // 00011111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>>(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -99,6 +101,7 @@ final abstract class Byte private extends AnyVal { * // 11111111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>(x: Long): Int /** Returns `true` if this value is equal to x, `false` otherwise. */ diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala index 7dbb0209c38d..ff3246f7d605 100644 --- a/src/library/scala/Char.scala +++ b/src/library/scala/Char.scala @@ -55,6 +55,7 @@ final abstract class Char private extends AnyVal { * filling in the new right bits with zeroes. * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def <<(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -77,6 +78,7 @@ final abstract class Char private extends AnyVal { * // 00011111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>>(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -99,6 +101,7 @@ final abstract class Char private extends AnyVal { * // 11111111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>(x: Long): Int /** Returns `true` if this value is equal to x, `false` otherwise. */ diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala index 15d0f1493875..cfcc7b3726fe 100644 --- a/src/library/scala/Function0.scala +++ b/src/library/scala/Function0.scala @@ -6,7 +6,7 @@ ** |/ ** \* */ // GENERATED CODE: DO NOT EDIT. -// genprod generated these sources at: Mon Jun 08 18:05:40 CEST 2015 +// genprod generated these sources at: Wed May 30 22:17:36 CEST 2018 package scala diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala index 491094cfde4a..cda015063a17 100644 --- a/src/library/scala/Int.scala +++ b/src/library/scala/Int.scala @@ -55,6 +55,7 @@ final abstract class Int private extends AnyVal { * filling in the new right bits with zeroes. * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def <<(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -77,6 +78,7 @@ final abstract class Int private extends AnyVal { * // 00011111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>>(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -99,6 +101,7 @@ final abstract class Int private extends AnyVal { * // 11111111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>(x: Long): Int /** Returns `true` if this value is equal to x, `false` otherwise. */ diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala index 136d745f167d..94dea784d3af 100644 --- a/src/library/scala/Short.scala +++ b/src/library/scala/Short.scala @@ -55,6 +55,7 @@ final abstract class Short private extends AnyVal { * filling in the new right bits with zeroes. * @example {{{ 6 << 3 == 48 // in binary: 0110 << 3 == 0110000 }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def <<(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -77,6 +78,7 @@ final abstract class Short private extends AnyVal { * // 00011111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>>(x: Long): Int /** * Returns this value bit-shifted right by the specified number of bits, @@ -99,6 +101,7 @@ final abstract class Short private extends AnyVal { * // 11111111 11111111 11111111 11111101 * }}} */ + @deprecated("shifting a value by a `Long` argument is deprecated (except when the value is a `Long`).\nCall `toInt` on the argument to maintain the current behavior and avoid the deprecation warning.", "2.12.7") def >>(x: Long): Int /** Returns `true` if this value is equal to x, `false` otherwise. */ diff --git a/test/files/run/t9516.check b/test/files/run/t9516.check new file mode 100644 index 000000000000..fc338cd903b5 --- /dev/null +++ b/test/files/run/t9516.check @@ -0,0 +1 @@ +warning: there were 12 deprecation warnings (since 2.12.7); re-run with -deprecation for details From a97cefd9d3e0f82deac9cbf497115fe9b43b9f91 Mon Sep 17 00:00:00 2001 From: Mark Petruska Date: Fri, 24 Nov 2017 18:09:12 +0100 Subject: [PATCH 1372/2793] Backport #5640 to 2.11.x 2.11 fix for scala/bug#9881 --- .../tools/nsc/typechecker/Contexts.scala | 7 ++-- .../scala/reflect/internal/Names.scala | 6 ++-- .../tools/nsc/interpreter/ExprTyper.scala | 14 +++++--- .../nsc/interpreter/MemberHandlers.scala | 33 +++++++++++------ test/files/run/t9880-9881.check | 36 +++++++++++++++++++ test/files/run/t9880-9881.scala | 29 +++++++++++++++ .../scala/reflect/internal/NamesTest.scala | 28 +++++++++++++++ 7 files changed, 134 insertions(+), 19 deletions(-) create mode 100644 test/files/run/t9880-9881.check create mode 100644 test/files/run/t9880-9881.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 5ec16e84bb19..55831b1abcc0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -64,9 +64,8 @@ trait Contexts { self: Analyzer => for (imps <- allImportInfos.remove(unit)) { for (imp <- imps.reverse.distinct) { val used = allUsedSelectors(imp) - def isMask(s: ImportSelector) = s.name != nme.WILDCARD && s.rename == nme.WILDCARD - imp.tree.selectors filterNot (s => isMask(s) || used(s)) foreach { sel => + imp.tree.selectors filterNot (s => isMaskImport(s) || used(s)) foreach { sel => reporter.warning(imp posOf sel, "Unused import") } } @@ -74,6 +73,10 @@ trait Contexts { self: Analyzer => } } + def isMaskImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename == nme.WILDCARD + def isIndividualImport(s: ImportSelector): Boolean = s.name != nme.WILDCARD && s.rename != nme.WILDCARD + def isWildcardImport(s: ImportSelector): Boolean = s.name == nme.WILDCARD + var lastAccessCheckDetails: String = "" /** List of symbols to import from in a root context. Typically that diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 32d12d305ee4..ee80e507f719 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -296,11 +296,13 @@ trait Names extends api.Names { */ final def pos(s: String, start: Int): Int = { var i = pos(s.charAt(0), start) - while (i + s.length() <= len) { + val sLen = s.length() + if (sLen == 1) return i + while (i + sLen <= len) { var j = 1 while (s.charAt(j) == chrs(index + i + j)) { j += 1 - if (j == s.length()) return i + if (j == sLen) return i } i = pos(s.charAt(0), i + 1) } diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala index 8a6a40581023..a6271ed52567 100644 --- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala @@ -15,6 +15,12 @@ trait ExprTyper { import global.{ reporter => _, Import => _, _ } import naming.freshInternalVarName + private def doInterpret(code: String): IR.Result = { + // interpret/interpretSynthetic may change the phase, which would have unintended effects on types. + val savedPhase = phase + try interpretSynthetic(code) finally phase = savedPhase + } + def symbolOfLine(code: String): Symbol = { def asExpr(): Symbol = { val name = freshInternalVarName() @@ -23,7 +29,7 @@ trait ExprTyper { // behind a def and strip the NullaryMethodType which wraps the expr. val line = "def " + name + " = " + code - interpretSynthetic(line) match { + doInterpret(line) match { case IR.Success => val sym0 = symbolOfTerm(name) // drop NullaryMethodType @@ -34,7 +40,7 @@ trait ExprTyper { def asDefn(): Symbol = { val old = repl.definedSymbolList.toSet - interpretSynthetic(code) match { + doInterpret(code) match { case IR.Success => repl.definedSymbolList filterNot old match { case Nil => NoSymbol @@ -45,7 +51,7 @@ trait ExprTyper { } } def asError(): Symbol = { - interpretSynthetic(code) + doInterpret(code) NoSymbol } beSilentDuring(asExpr()) orElse beSilentDuring(asDefn()) orElse asError() @@ -74,7 +80,7 @@ trait ExprTyper { def asProperType(): Option[Type] = { val name = freshInternalVarName() val line = "def %s: %s = ???" format (name, typeString) - interpretSynthetic(line) match { + doInterpret(line) match { case IR.Success => val sym0 = symbolOfTerm(name) Some(sym0.asMethod.returnType) diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala index 4e45f6d61524..0a3d402a8544 100644 --- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -212,29 +212,40 @@ trait MemberHandlers { class ImportHandler(imp: Import) extends MemberHandler(imp) { val Import(expr, selectors) = imp + def targetType = intp.global.rootMirror.getModuleIfDefined("" + expr) match { case NoSymbol => intp.typeOfExpression("" + expr) - case sym => sym.thisType + case sym => sym.tpe } - private def importableTargetMembers = importableMembers(targetType).toList - // wildcard imports, e.g. import foo._ - private def selectorWild = selectors filter (_.name == nme.USCOREkw) - // renamed imports, e.g. import foo.{ bar => baz } - private def selectorRenames = selectors map (_.rename) filterNot (_ == null) + + private def isFlattenedSymbol(sym: Symbol) = + sym.owner.isPackageClass && + sym.name.containsName(nme.NAME_JOIN_STRING) && + sym.owner.info.member(sym.name.take(sym.name.indexOf(nme.NAME_JOIN_STRING))) != NoSymbol + + private def importableTargetMembers = + importableMembers(exitingTyper(targetType)).filterNot(isFlattenedSymbol).toList + + // non-wildcard imports + private def individualSelectors = selectors filter analyzer.isIndividualImport /** Whether this import includes a wildcard import */ - val importsWildcard = selectorWild.nonEmpty + val importsWildcard = selectors exists analyzer.isWildcardImport def implicitSymbols = importedSymbols filter (_.isImplicit) def importedSymbols = individualSymbols ++ wildcardSymbols - private val selectorNames = selectorRenames filterNot (_ == nme.USCOREkw) flatMap (_.bothNames) toSet - lazy val individualSymbols: List[Symbol] = exitingTyper(importableTargetMembers filter (m => selectorNames(m.name))) - lazy val wildcardSymbols: List[Symbol] = exitingTyper(if (importsWildcard) importableTargetMembers else Nil) + lazy val importableSymbolsWithRenames = { + val selectorRenameMap = individualSelectors.flatMap(x => x.name.bothNames zip x.rename.bothNames).toMap + importableTargetMembers flatMap (m => selectorRenameMap.get(m.name) map (m -> _)) + } + + lazy val individualSymbols: List[Symbol] = importableSymbolsWithRenames map (_._1) + lazy val wildcardSymbols: List[Symbol] = if (importsWildcard) importableTargetMembers else Nil /** Complete list of names imported by a wildcard */ lazy val wildcardNames: List[Name] = wildcardSymbols map (_.name) - lazy val individualNames: List[Name] = individualSymbols map (_.name) + lazy val individualNames: List[Name] = importableSymbolsWithRenames map (_._2) /** The names imported by this statement */ override lazy val importedNames: List[Name] = wildcardNames ++ individualNames diff --git a/test/files/run/t9880-9881.check b/test/files/run/t9880-9881.check new file mode 100644 index 000000000000..d600b9895b29 --- /dev/null +++ b/test/files/run/t9880-9881.check @@ -0,0 +1,36 @@ + +scala> // import in various ways + +scala> import java.util.Date +import java.util.Date + +scala> import scala.util._ +import scala.util._ + +scala> import scala.reflect.runtime.{universe => ru} +import scala.reflect.runtime.{universe=>ru} + +scala> import ru.TypeTag +import ru.TypeTag + +scala> + +scala> // show the imports + +scala> :imports + 1) import java.lang._ (...) + 2) import scala._ (...) + 3) import scala.Predef._ (...) + 4) import java.util.Date (...) + 5) import scala.util._ (...) + 6) import scala.reflect.runtime.{universe=>ru} (...) + 7) import ru.TypeTag (...) + +scala> + +scala> // should be able to define this class with the imports above + +scala> class C[T](date: Date, rand: Random, typeTag: TypeTag[T]) +defined class C + +scala> :quit diff --git a/test/files/run/t9880-9881.scala b/test/files/run/t9880-9881.scala new file mode 100644 index 000000000000..0268c8c32c61 --- /dev/null +++ b/test/files/run/t9880-9881.scala @@ -0,0 +1,29 @@ +import scala.tools.partest.ReplTest +import scala.tools.nsc.Settings + +object Test extends ReplTest { + + override def transformSettings(s: Settings): Settings = { + s.Yreplclassbased.value = true + s + } + + lazy val normalizeRegex = """(import\s.*)\(.*\)""".r + + override def normalize(s: String): String = normalizeRegex.replaceFirstIn(s, "$1(...)") + + def code = + """ + |// import in various ways + |import java.util.Date + |import scala.util._ + |import scala.reflect.runtime.{universe => ru} + |import ru.TypeTag + | + |// show the imports + |:imports + | + |// should be able to define this class with the imports above + |class C[T](date: Date, rand: Random, typeTag: TypeTag[T]) + """.stripMargin +} diff --git a/test/junit/scala/reflect/internal/NamesTest.scala b/test/junit/scala/reflect/internal/NamesTest.scala index 549c10abedbc..fdec32d31f71 100644 --- a/test/junit/scala/reflect/internal/NamesTest.scala +++ b/test/junit/scala/reflect/internal/NamesTest.scala @@ -92,4 +92,32 @@ class NamesTest { assert(h1 string_== h2) assert(h1 string_== h1y) } + + @Test + def pos(): Unit = { + def check(nameString: String, sub: String) = { + val name = TermName(nameString) + val javaResult = name.toString.indexOf(sub) match { + case -1 => name.length + case x => x + } + val nameResult = name.pos(sub) + assertEquals(javaResult, nameResult) + if (sub.length == 1) { + val nameResultChar = name.pos(sub.head) + assertEquals(javaResult, nameResultChar) + } + } + + check("a", "a") // was "String index out of range: 1 + check("a", "b") + check("a", "ab") + check("a", "ba") + check("ab", "a") + check("ab", "b") + check("ab", "ab") + check("ab", "ba") + check("", "x") + check("", "xy") + } } From 48c337b9f44d1fb9c8e5f40f4a280e918d777c28 Mon Sep 17 00:00:00 2001 From: exoego Date: Sun, 3 Jun 2018 20:35:28 +0900 Subject: [PATCH 1373/2793] Make scaladoc member filter easier to read --- .../tools/nsc/doc/html/resource/lib/template.css | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css index bb48b1a639c5..412cc51bc652 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css @@ -847,6 +847,19 @@ div.fullcomment dl.paramcmts > dd { font-family: "Open Sans"; } +#memberfilter > .input > input::-webkit-input-placeholder { + color: #fff; + opacity: 0.6; +} +#memberfilter > .input > input:-ms-input-placeholder { + color: #fff; + opacity: 0.6; +} +#memberfilter > .input > input::placeholder { + color: #fff; + opacity: 0.6; +} + #memberfilter > .clear { display: none; position: absolute; From b1dc183944d4a05269b430ced8ec321f299dd42a Mon Sep 17 00:00:00 2001 From: exoego Date: Mon, 4 Jun 2018 21:56:50 +0900 Subject: [PATCH 1374/2793] Add version string in page and title so that readers and crawlers knows the page is for what version. --- spec/_config.yml | 2 ++ spec/_layouts/default.yml | 13 +++++++++++-- spec/_layouts/toc.yml | 6 +++--- 3 files changed, 16 insertions(+), 5 deletions(-) diff --git a/spec/_config.yml b/spec/_config.yml index 1a67f7de6324..22bccafc90bd 100644 --- a/spec/_config.yml +++ b/spec/_config.yml @@ -1,4 +1,6 @@ baseurl: /files/archive/spec/2.12 +latestScalaVersion: 2.12 +thisScalaVersion: 2.12 safe: true lsi: false highlighter: false diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml index aa79e5ddab2d..36b3dc88c249 100644 --- a/spec/_layouts/default.yml +++ b/spec/_layouts/default.yml @@ -26,12 +26,21 @@ - {{ page.title }} + {{ page.title }} | Scala {{ site.thisScalaVersion }}

- +
diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml index dfd92eb11471..41750130ccc1 100644 --- a/spec/_layouts/toc.yml +++ b/spec/_layouts/toc.yml @@ -7,7 +7,7 @@ - {{ page.title }} + {{ page.title }} | Scala {{ site.thisScalaVersion }} @@ -19,9 +19,9 @@
Scala Language Specification - Edit at GitHub + Edit at GitHub
-
Version 2.12
+
Version {{ site.thisScalaVersion }}
{{ content }} From 2f91930fc53ab95e7e1d74e9d284f7bd309f4b82 Mon Sep 17 00:00:00 2001 From: exoego Date: Mon, 4 Jun 2018 21:57:32 +0900 Subject: [PATCH 1375/2793] Add version notice with error-ish style, since consulting older spec is usually error. --- spec/_includes/version-notice.yml | 3 +++ spec/_layouts/default.yml | 1 + spec/_layouts/toc.yml | 1 + spec/public/stylesheets/screen.css | 13 +++++++++++++ 4 files changed, 18 insertions(+) create mode 100644 spec/_includes/version-notice.yml diff --git a/spec/_includes/version-notice.yml b/spec/_includes/version-notice.yml new file mode 100644 index 000000000000..31669682eb4c --- /dev/null +++ b/spec/_includes/version-notice.yml @@ -0,0 +1,3 @@ +{% if site.thisScalaVersion != site.latestScalaVersion %} +
This is the specification of a previous version of Scala. See the Scala {{ site.latestScalaVersion }} spec.
+{% endif %} diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml index 36b3dc88c249..70e2a69b3192 100644 --- a/spec/_layouts/default.yml +++ b/spec/_layouts/default.yml @@ -45,6 +45,7 @@
+{% include version-notice.yml %} {{ content }}
diff --git a/spec/_layouts/toc.yml b/spec/_layouts/toc.yml index 41750130ccc1..69c908e3cb36 100644 --- a/spec/_layouts/toc.yml +++ b/spec/_layouts/toc.yml @@ -24,6 +24,7 @@
Version {{ site.thisScalaVersion }}
+{% include version-notice.yml %} {{ content }}
diff --git a/spec/public/stylesheets/screen.css b/spec/public/stylesheets/screen.css index b7babaf5bf42..36f4a5a18126 100644 --- a/spec/public/stylesheets/screen.css +++ b/spec/public/stylesheets/screen.css @@ -502,3 +502,16 @@ header { /* proper rendering of MathJax into highlighted code blocks */ .fixws { white-space: pre; } .fixws .math { white-space: nowrap; } + +.version-notice { + background-color: #C93A3A; + color: #f2f2f2; + border:1px solid #ccc; + padding: 1em; + margin-bottom: 1em; +} +.version-notice a { + color: #f2f2f2; + font-weight: bold; + text-decoration: underline; +} From 17fb78c1bfa34777cfdaeca0ec65c485a84f96c1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Dec 2017 15:36:30 +1000 Subject: [PATCH 1376/2793] Optimize IndexedSeqOptimized.toList Notably, this will be used in `List(a, b, c)`. --- src/library/scala/collection/IndexedSeqOptimized.scala | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index 320725c30e63..0a9a65516d94 100644 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -276,5 +276,15 @@ trait IndexedSeqOptimized[+A, +Repr] extends Any with IndexedSeqLike[A, Repr] { case _ => super.endsWith(that) } + + override def toList: List[A] = { + var i = length - 1 + var result: List[A] = Nil + while (i >= 0) { + result ::= apply(i) + i -= 1 + } + result + } } From 2062b3e63c70d6fad0c87a3c8d317525577441b2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Dec 2017 19:36:30 +1000 Subject: [PATCH 1377/2793] Use AnyRefMap in hot parts of the compiler. --- src/compiler/scala/tools/nsc/CompilationUnits.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 4 ++-- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index d9f8acf7c59b..5c84748b9509 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -80,7 +80,7 @@ trait CompilationUnits { global: Global => /** Synthetic definitions generated by namer, eliminated by typer. */ object synthetics { - private val map = mutable.HashMap[Symbol, Tree]() + private val map = mutable.AnyRefMap[Symbol, Tree]() def update(sym: Symbol, tree: Tree) { debuglog(s"adding synthetic ($sym, $tree) to $self") map.update(sym, tree) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 3edac10cf026..81ca512f20d9 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1122,10 +1122,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val compiledFiles = new mutable.HashSet[String] /** A map from compiled top-level symbols to their source files */ - val symSource = new mutable.HashMap[Symbol, AbstractFile] + val symSource = new mutable.AnyRefMap[Symbol, AbstractFile] /** A map from compiled top-level symbols to their picklers */ - val symData = new mutable.HashMap[Symbol, PickleBuffer] + val symData = new mutable.AnyRefMap[Symbol, PickleBuffer] private var phasec: Int = 0 // phases completed private var unitc: Int = 0 // units completed this phase diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c5cee9c72398..bc16fd79679b 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -70,7 +70,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * The original owner of a symbol is needed in some places in the backend. Ideally, owners should * be versioned like the type history. */ - private val originalOwnerMap = perRunCaches.newMap[Symbol, Symbol]() + private val originalOwnerMap = perRunCaches.newAnyRefMap[Symbol, Symbol]() // TODO - don't allow the owner to be changed without checking invariants, at least // when under some flag. Define per-phase invariants for owner/owned relationships, From 833cf7ef13bf65877c3730c41f3fea63468ff863 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Dec 2017 20:36:30 +1000 Subject: [PATCH 1378/2793] Optimize nested class collection --- .../tools/nsc/backend/jvm/analysis/BackendUtils.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index d4d49b0ca0cf..789865d78c7e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -328,8 +328,11 @@ abstract class BackendUtils extends PerRunInit { bTypesFromClassfile.classBTypeFromParsedClassfile(internalName).info.get.nestedClasses.force def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - val c = bTypesFromClassfile.classBTypeFromParsedClassfile(internalName) - if (c.isNestedClass.get) Some(c) else None + if (internalName.indexOf('$') < 0) None + else { + val c = bTypesFromClassfile.classBTypeFromParsedClassfile(internalName) + if (c.isNestedClass.get) Some(c) else None + } } def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { From 8398ca5c23a79ac7175e2e0382a1e3e74f695546 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Dec 2017 21:36:30 +1000 Subject: [PATCH 1379/2793] Optimize generic sig parser --- .../nsc/backend/jvm/analysis/BackendUtils.scala | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 789865d78c7e..c71ead09a691 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -774,6 +774,15 @@ object BackendUtils { private def skipUntil(isDelimiter: CharBooleanFunction): Unit = { while (!isDelimiter(current)) { index += 1 } } + private def skipUntilDelimiter(delimiter: Char): Unit = { + sig.indexOf(delimiter, index) match { + case -1 => + raiseError(s"Out of bounds", sig) + abort() // Don't continue, even if `notifyInvalidSignature` returns + case i => + index = i + } + } private def appendUntil(builder: java.lang.StringBuilder, isDelimiter: CharBooleanFunction): Unit = { val start = index @@ -817,7 +826,7 @@ object BackendUtils { accept(';') case 'T' => - skipUntil(_ == ';') + skipUntilDelimiter(';') skip() case '[' => @@ -828,7 +837,7 @@ object BackendUtils { private def typeParameters(): Unit = if (current == '<') { skip() while (current != '>') { - skipUntil(_ == ':'); skip() + skipUntilDelimiter(':'); skip() val c = current // The ClassBound can be missing, but only if there's an InterfaceBound after. // This is an assumption that's not in the spec, see https://stackoverflow.com/q/44284928 From 39a567b8defa7eb3f7593774b0dbdd68023825fd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Dec 2017 18:36:30 +1000 Subject: [PATCH 1380/2793] Avoid nonEmpty in hot paths --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- src/reflect/scala/reflect/internal/Definitions.scala | 8 ++++---- src/reflect/scala/reflect/internal/Types.scala | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index f167a653ffb9..eb958512fe11 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -583,7 +583,7 @@ trait Implicits { var ps = params var as = args if (fast) { - while (ps.nonEmpty && as.nonEmpty) { + while (!(ps.isEmpty || as.isEmpty)) { if (!isPlausiblySubType(as.head, ps.head.tpe)) return false ps = ps.tail diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3f90ee9afcfd..52a7cb5f5a6d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -826,7 +826,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * and should not be used otherwise. TODO: can it be replaced with a tree attachment? */ protected def adapt(tree: Tree, mode: Mode, pt: Type, original: Tree = EmptyTree): Tree = { - def hasUndets = context.undetparams.nonEmpty + def hasUndets = !context.undetparams.isEmpty def hasUndetsInMonoMode = hasUndets && !mode.inPolyMode def adaptToImplicitMethod(mt: MethodType): Tree = { diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 20c330a56f2f..64fb2562b658 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -395,10 +395,10 @@ trait Definitions extends api.StandardDefinitions { def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params) - def isJavaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isJavaRepeatedParamType(params.last.tpe) - def isScalaVarArgs(params: Seq[Symbol]) = params.nonEmpty && isScalaRepeatedParamType(params.last.tpe) - def isVarArgsList(params: Seq[Symbol]) = params.nonEmpty && isRepeatedParamType(params.last.tpe) - def isVarArgTypes(formals: Seq[Type]) = formals.nonEmpty && isRepeatedParamType(formals.last) + def isJavaVarArgs(params: Seq[Symbol]) = !params.isEmpty && isJavaRepeatedParamType(params.last.tpe) + def isScalaVarArgs(params: Seq[Symbol]) = !params.isEmpty && isScalaRepeatedParamType(params.last.tpe) + def isVarArgsList(params: Seq[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe) + def isVarArgTypes(formals: Seq[Type]) = !formals.isEmpty && isRepeatedParamType(formals.last) def firstParamType(tpe: Type): Type = tpe.paramTypes match { case p :: _ => p diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index bec839b85606..d706842913b7 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -410,7 +410,7 @@ trait Types /** For a class with nonEmpty parents, the first parent. * Otherwise some specific fixed top type. */ - def firstParent = if (parents.nonEmpty) parents.head else ObjectTpe + def firstParent = if (!parents.isEmpty) parents.head else ObjectTpe /** For a typeref or single-type, the prefix of the normalized type (@see normalize). * NoType for all other types. */ @@ -3906,7 +3906,7 @@ trait Types def typeParamsToExistentials(clazz: Symbol): List[Symbol] = typeParamsToExistentials(clazz, clazz.typeParams) - def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && sym.typeParams.nonEmpty && sym.isJavaDefined + def isRawIfWithoutArgs(sym: Symbol) = sym.isClass && !sym.typeParams.isEmpty && sym.isJavaDefined /** Is type tp a ''raw type''? */ // note: it's important to write the two tests in this order, // as only typeParams forces the classfile to be read. See #400 From 3485981bdda0f7a49e946ea8405b6d071a7856a8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 5 Dec 2017 19:32:28 +1000 Subject: [PATCH 1381/2793] Simplify specialization transformer --- .../tools/nsc/transform/SpecializeTypes.scala | 59 +++++++------------ 1 file changed, 22 insertions(+), 37 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index cc062a44798a..d53b02a3f423 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -8,7 +8,7 @@ package tools.nsc package transform import scala.tools.nsc.symtab.Flags -import scala.collection.{ mutable, immutable } +import scala.collection.{immutable, mutable} import scala.annotation.tailrec /** Specialize code on types. @@ -1454,12 +1454,23 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { && originalClass(clazz).parentSymbols.exists(p => hasSpecializedParams(p) && !p.isTrait) ) - def specializeCalls(unit: CompilationUnit) = new TypingTransformer(unit) { + class SpecializationTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { + + override def transformUnit(unit: CompilationUnit): Unit = if (!settings.nospecialization) { + informProgress("specializing " + unit) + try { + exitingSpecialize(super.transformUnit(unit)) + } catch { + case te: TypeError => + reporter.error(te.pos, te.msg) + } + } + /** Map a specializable method to its rhs, when not deferred. */ - val body = perRunCaches.newMap[Symbol, Tree]() + val body = new mutable.AnyRefMap[Symbol, Tree]() /** Map a specializable method to its value parameter symbols. */ - val parameters = perRunCaches.newMap[Symbol, List[Symbol]]() + val parameters = new mutable.AnyRefMap[Symbol, List[Symbol]]() /** Collect method bodies that are concrete specialized methods. */ @@ -1502,18 +1513,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - def reportError[T](body: =>T)(handler: TypeError => T): T = - try body - catch { - case te: TypeError => - reporter.error(te.pos, te.msg) - handler(te) - } - - override def transform(tree: Tree): Tree = - reportError { transform1(tree) } {_ => tree} - - def transform1(tree: Tree) = { + override def transform(tree: Tree): Tree = { val symbol = tree.symbol /* The specialized symbol of 'tree.symbol' for tree.tpe, if there is one */ def specSym(qual: Tree): Symbol = { @@ -1602,7 +1602,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val found = specializedType(tpt.tpe) if (found.typeSymbol ne tpt.tpe.typeSymbol) { // the ctor can be specialized val inst = New(found, transformTrees(args): _*) - reportError(localTyper.typedPos(tree.pos)(inst))(_ => super.transform(tree)) + localTyper.typedPos(tree.pos)(inst) } else super.transform(tree) @@ -1693,13 +1693,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (symbol.isPrimaryConstructor) localTyper.typedPos(symbol.pos)(deriveDefDef(tree)(_ => Block(List(t), Literal(Constant(()))))) else // duplicate the original constructor - reportError(duplicateBody(ddef, info(symbol).target))(_ => ddef) + duplicateBody(ddef, info(symbol).target) } else info(symbol) match { case Implementation(target) => assert(body.isDefinedAt(target), "sym: " + symbol.fullName + " target: " + target.fullName) // we have an rhs, specialize it - val tree1 = reportError(duplicateBody(ddef, target))(_ => ddef) + val tree1 = duplicateBody(ddef, target) debuglog("implementation: " + tree1) deriveDefDef(tree1)(transform) @@ -1707,7 +1707,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { logResult("constraints")(satisfiabilityConstraints(typeEnv(symbol))) match { case Some(constraint) if !target.isDeferred => // we have an rhs, specialize it - val tree1 = reportError(duplicateBody(ddef, target, constraint))(_ => ddef) + val tree1 = duplicateBody(ddef, target, constraint) debuglog("implementation: " + tree1) deriveDefDef(tree1)(transform) case _ => @@ -1738,21 +1738,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { }) debuglog("created special overload tree " + t) debuglog("created " + t) - reportError { - localTyper.typed(t) - } { - _ => super.transform(tree) - } + localTyper.typed(t) case fwd @ Forward(_) => debuglog("forward: " + fwd + ", " + ddef) val rhs1 = forwardCall(tree.pos, gen.mkAttributedRef(symbol.owner.thisType, fwd.target), vparamss) debuglog("-->d completed forwarder to specialized overload: " + fwd.target + ": " + rhs1) - reportError { - localTyper.typed(deriveDefDef(tree)(_ => rhs1)) - } { - _ => super.transform(tree) - } + localTyper.typed(deriveDefDef(tree)(_ => rhs1)) case SpecializedAccessor(target) => val rhs1 = if (symbol.isGetter) @@ -2037,12 +2029,5 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe)) ) - class SpecializationTransformer(unit: CompilationUnit) extends Transformer { - informProgress("specializing " + unit) - override def transform(tree: Tree) = { - if (settings.nospecialization) tree - else exitingSpecialize(specializeCalls(unit).transform(tree)) - } - } object SpecializedSuperConstructorCallArgument } From 4381845a1354f1547d9b1ad69958a140bdc4f63f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 5 Dec 2017 19:46:26 +1000 Subject: [PATCH 1382/2793] Tree convervation in specialization --- .../scala/tools/nsc/transform/SpecializeTypes.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index d53b02a3f423..36fb2addc1b6 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1676,8 +1676,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val specMembers = makeSpecializedMembers(tree.symbol.enclClass) ::: (implSpecClasses(body) map localTyper.typed) if (!symbol.isPackageClass) (new CollectMethodBodies)(tree) - val parents1 = map2(currentOwner.info.parents, parents)((tpe, parent) => - TypeTree(tpe) setPos parent.pos) + val parents1 = map2Conserve(parents, currentOwner.info.parents)((parent, tpe) => + parent match { + case tt @ TypeTree() if tpe eq tt.tpe => tt + case _ => TypeTree(tpe) setPos parent.pos + }) treeCopy.Template(tree, parents1 /*currentOwner.info.parents.map(tpe => TypeTree(tpe) setPos parents.head.pos)*/ , From fb66b4f4c7d96eea59c81eeb252738a4438a0add Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Dec 2017 16:03:22 +1000 Subject: [PATCH 1383/2793] Optimize bookkeeping in specialization transform --- .../tools/nsc/backend/jvm/CoreBTypes.scala | 6 ++--- .../tools/nsc/transform/SpecializeTypes.scala | 24 ++++++++++++------- 2 files changed, 18 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index cd601970e17b..c30ef7cd7ba2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -312,9 +312,9 @@ abstract class CoreBTypesFromSymbols[G <: Global] extends CoreBTypes { private def specializedSubclasses(cls: Symbol): List[Symbol] = { exitingSpecialize(cls.info) // the `transformInfo` method of specialization adds specialized subclasses to the `specializedClass` map - specializeTypes.specializedClass.collect({ - case ((`cls`, _), specCls) => specCls - }).toList + val map = specializeTypes.specializedClass.getOrNull(cls) + if (map == null) Nil + else map.values.toList } // scala/Tuple3 -> MethodNameAndType(,(Ljava/lang/Object;Ljava/lang/Object;Ljava/lang/Object;)V) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 36fb2addc1b6..695b8143f12b 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -75,7 +75,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ /** For a given class and concrete type arguments, give its specialized class */ - val specializedClass = perRunCaches.newMap[(Symbol, TypeEnv), Symbol] + val specializedClass = perRunCaches.newAnyRefMap[Symbol, mutable.AnyRefMap[TypeEnv, Symbol]] /** Map a method symbol to a list of its specialized overloads in the same class. */ private val overloads = perRunCaches.newMap[Symbol, List[Overload]]() withDefaultValue Nil @@ -329,7 +329,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (isSpecializedAnyRefSubtype(tp, orig)) AnyRefTpe else tp ) - specializedClass.get((sym, TypeEnv.fromSpecialization(sym, args1))) match { + specializedClass.getOrElse(sym, Map.empty[TypeEnv, Symbol]).get(TypeEnv.fromSpecialization(sym, args1)) match { case Some(sym1) => typeRef(pre1, sym1, survivingArgs(sym, args)) case None => typeRef(pre1, sym, args) } @@ -340,7 +340,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def specializedFunctionName(sym: Symbol, args: List[Type]) = exitingSpecialize { require(isFunctionSymbol(sym), sym) val env: TypeEnv = TypeEnv.fromSpecialization(sym, args) - specializedClass.get((sym, env)) match { + specializedClass.getOrElse(sym, Map.empty[TypeEnv, Symbol]).get(env) match { case Some(x) => x.name case None => @@ -615,7 +615,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val env = mapAnyRefsInSpecSym(env0, clazz, sClass) typeEnv(sClass) = env - this.specializedClass((clazz, env0)) = sClass + this.specializedClass.getOrElseUpdate(clazz, new mutable.AnyRefMap()).update(env0, sClass) val decls1 = newScope // declarations of the newly specialized class 'sClass' var oldClassTParams: List[Symbol] = Nil // original unspecialized type parameters @@ -1949,11 +1949,17 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { trees flatMap { case tree @ ClassDef(_, _, _, impl) => tree.symbol.info // force specialization - for (((sym1, env), specCls) <- specializedClass if sym1 == tree.symbol) yield { - debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env)) - val parents = specCls.info.parents.map(TypeTree) - ClassDef(specCls, atPos(impl.pos)(Template(parents, noSelfType, List())) - .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos + specializedClass.getOrNull(tree.symbol) match { + case null => Nil + case map => + val sym1 = tree.symbol + map.iterator.map { + case (env, specCls) => + debuglog("created synthetic class: " + specCls + " of " + sym1 + " in " + pp(env)) + val parents = specCls.info.parents.map(TypeTree) + ClassDef(specCls, atPos(impl.pos)(Template(parents, noSelfType, List())) + .setSymbol(specCls.newLocalDummy(sym1.pos))) setPos tree.pos + }.toList } case _ => Nil } sortBy (_.name.decoded) From 88f4b37b77cc103f7ad2db8d6ec514fed6b7f84a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 22 Mar 2018 13:16:44 +1000 Subject: [PATCH 1384/2793] Avoid wasteful loading of unreferenced, high arity function and tuple classes --- .../tools/nsc/transform/SpecializeTypes.scala | 4 +- .../scala/reflect/internal/Definitions.scala | 2 + .../nsc/transform/SpecializationTest.scala | 45 +++++++++++++++++++ 3 files changed, 49 insertions(+), 2 deletions(-) create mode 100644 test/junit/scala/tools/nsc/transform/SpecializationTest.scala diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 695b8143f12b..c7458a9ef38e 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -201,8 +201,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { override def run(): Unit = { super.run() exitingSpecialize { - FunctionClass.seq.map(_.info) - TupleClass.seq.map(_.info) + FunctionClass.seq.take(MaxFunctionAritySpecialized + 1).foreach(_.info) + TupleClass.seq.take(MaxTupleAritySpecialized).foreach(_.info) } // Remove the final modifier and @inline annotation from anything in the diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 20c330a56f2f..d69eb9903938 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -577,6 +577,8 @@ trait Definitions extends api.StandardDefinitions { object VarArityClass val MaxTupleArity, MaxProductArity, MaxFunctionArity = 22 + // A unit test checks these are kept in synch with the library. + val MaxTupleAritySpecialized, MaxProductAritySpecialized, MaxFunctionAritySpecialized = 2 lazy val ProductClass = new VarArityClass("Product", MaxProductArity, countFrom = 1, init = Some(UnitClass)) lazy val TupleClass = new VarArityClass("Tuple", MaxTupleArity, countFrom = 1) diff --git a/test/junit/scala/tools/nsc/transform/SpecializationTest.scala b/test/junit/scala/tools/nsc/transform/SpecializationTest.scala new file mode 100644 index 000000000000..02dff1983094 --- /dev/null +++ b/test/junit/scala/tools/nsc/transform/SpecializationTest.scala @@ -0,0 +1,45 @@ +package scala.tools.nsc.transform + +import org.junit.Assert.assertEquals +import org.junit.{Assert, Test} + +import scala.tools.nsc.symtab.SymbolTableForUnitTesting + +class SpecializationTest { + object symbolTable extends SymbolTableForUnitTesting + + @Test def testHardCodedAssumptionsAboutTupleAndFunction(): Unit = { + // The specialization phase always runs its info transform on the specialized Function and Tuple types + // so that the later phases can see them, even with the optimization in the specialization info transform + // that makes it a no-op after the global phase has passed specialize. + // + // Initially, we just called `exitingSpecialize { TupleClass.seq.map(_.info); Function.seq.map(_.info) }` + // but this was wasteful, as it loaded the seldom used, high-arity Tuple and Function classes, some of which + // are pretty big in bytecode! + // + // So we know bake the knowledge about the max arity for which specialization is used into that code. + // This test asserts the assumption still holds. + import symbolTable.definitions._ + + for (i <- (0 to MaxFunctionArity)) { + val cls = FunctionClass.apply(i) + val actual = cls.typeParams.exists(_.isSpecialized) + val expected = i <= MaxFunctionAritySpecialized + assertEquals(cls.toString, expected, actual) + } + + for (i <- (1 to MaxTupleArity)) { + val cls = TupleClass.apply(i) + val actual = cls.typeParams.exists(_.isSpecialized) + val expected = i <= MaxTupleAritySpecialized + assertEquals(cls.toString, expected, actual) + } + + for (i <- (1 to MaxProductArity)) { + val cls = ProductClass.apply(i) + val actual = cls.typeParams.exists(_.isSpecialized) + val expected = i <= MaxProductAritySpecialized + assertEquals(cls.toString, expected, actual) + } + } +} From a24d7c0a21d9a7ec09eb88f0b3f7a13f3e3d1da9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 19 Dec 2017 22:40:50 +1000 Subject: [PATCH 1385/2793] Reduce overhead of enabling -Ystatistics MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The implementation trick of using an AlmostFinalValue to have zero cost for the "isEnabled" check in the common case has a small flaw: the switchpoint is tripped _every_ time stats is enabled, rather than just on the first time. This discards a swathe of JIT compiled code each time a Global is started with `-Ystatistics`. This commit avoids tripping the switchpoint redundantly. Performance: ``` ⚡ for extra in "-Ystatistics:_" ""; do for v in 2.12.5-bin-91649d1-SNAPSHOT 2.12.4; do echo $v $extra; sbt 'set scalaVersion in compilation := "'$v'"' 'hot -psource=scalap -f1 -wi 5 -i 3 -pextraArgs='$extra | egrep 'HotScalacBenchmark.compile\s'; done; done 2.12.5-bin-91649d1-SNAPSHOT -Ystatistics:_ [info] HotScalacBenchmark.compile a8c43dc -Ystatistics:_ false scalap sample 33 973.523 ± 23.389 ms/op 2.12.4 -Ystatistics:_ [info] HotScalacBenchmark.compile a8c43dc -Ystatistics:_ false scalap sample 12 2921.333 ± 177.831 ms/op 2.12.5-bin-91649d1-SNAPSHOT [info] HotScalacBenchmark.compile a8c43dc false scalap sample 38 811.846 ± 13.436 ms/op 2.12.4 [info] HotScalacBenchmark.compile a8c43dc false scalap sample 38 820.814 ± 17.809 ms/op ``` There is still more overhead than I would like, and it might still make sense to move a few stats back into the "hot" category. From 19abe85e1ab9a14d04c8032c0156e8d52d7bf4c5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 31 Jan 2018 10:41:19 +1000 Subject: [PATCH 1386/2793] Avoid allocation of ClassTags in hot code Cherry pick of 94c2d4a82a1d044b4eb59b20d35ada72e9cc7ca7 --- .../scala/tools/nsc/typechecker/StdAttachments.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 6c2ac8f301bc..524f27559772 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -145,12 +145,13 @@ trait StdAttachments { * typechecks to be a macro application. Then we need to unmark it, expand it and try to treat * its expansion as a macro impl reference. */ - def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type] + def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type](MacroImplRefAttachmentTag) /** Determines whether a tree should or should not be adapted, * because someone has put MacroImplRefAttachment on it. */ - def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type] + def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type](MacroImplRefAttachmentTag) + private[this] val MacroImplRefAttachmentTag: reflect.ClassTag[MacroImplRefAttachment.type] = reflect.classTag[MacroImplRefAttachment.type] /** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected * from typedNamedApply, the applyDynamicNamed argument rewriter, the latter @@ -163,8 +164,9 @@ trait StdAttachments { */ case object DynamicRewriteAttachment def markDynamicRewrite(tree: Tree): Tree = tree.updateAttachment(DynamicRewriteAttachment) - def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type] - def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type].isDefined + def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type](DynamicRewriteAttachmentTag) + def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type](DynamicRewriteAttachmentTag).isDefined + private[this] val DynamicRewriteAttachmentTag: reflect.ClassTag[DynamicRewriteAttachment.type] = reflect.classTag[DynamicRewriteAttachment.type] /** * Marks a tree that has been adapted by typer and sets the original tree that was in place before. From b10e25529d1783c7dc47ac92243b1b96750f3380 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 31 Jan 2018 11:03:52 +1000 Subject: [PATCH 1387/2793] Refactor typechecking of array instantiation for performance Defining the extractor object `ArrayInstantation` in `typed1` meant that we needed to thread a `LazyRef` through to `typedApply` (even if we were dealing with a different AST node!). This commit moves the extractor to `TreeInfo` after extracting the core part that relies on `Typer.this`. (cherry picked from commit d1c90ec3dec2bf72546edcda9e4f696882a7ec61) --- .../scala/tools/nsc/ast/TreeInfo.scala | 11 +++++++ .../scala/tools/nsc/typechecker/Typers.scala | 31 +++++++------------ 2 files changed, 22 insertions(+), 20 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 32dca2561f51..60558479265a 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -105,4 +105,15 @@ abstract class TreeInfo extends scala.reflect.internal.TreeInfo { } super.firstConstructor(stats map unwrap) } + + object ArrayInstantiation { + def unapply(tree: Apply) = tree match { + case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == definitions.ArrayClass => + tpt.tpe match { + case erasure.GenericArray(level, componentType) => Some(level, componentType, arg) + case _ => None + } + case _ => None + } + } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3f90ee9afcfd..8d7a95b3da32 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4775,31 +4775,22 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len) - // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len) - // where Array HK gets applied (N-1) times - object ArrayInstantiation { - def unapply(tree: Apply) = tree match { - case Apply(Select(New(tpt), name), arg :: Nil) if tpt.tpe != null && tpt.tpe.typeSymbol == ArrayClass => - Some(tpt.tpe) collect { - case erasure.GenericArray(level, componentType) => - val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res)) - - resolveClassTag(tree.pos, tagType) match { - case EmptyTree => MissingClassTagError(tree, tagType) - case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil)) - } - } - case _ => None - } - } - def typedApply(tree: Apply) = tree match { case Apply(Block(stats, expr), args) => typed1(atPos(tree.pos)(Block(stats, Apply(expr, args) setPos tree.pos.makeTransparent)), mode, pt) case Apply(fun, args) => normalTypedApply(tree, fun, args) match { - case ArrayInstantiation(tree1) => if (tree1.isErrorTyped) tree1 else typed(tree1, mode, pt) + case treeInfo.ArrayInstantiation(level, componentType, arg) => + // convert new Array[T](len) to evidence[ClassTag[T]].newArray(len) + // convert new Array^N[T](len) for N > 1 to evidence[ClassTag[Array[...Array[T]...]]].newArray(len) + // where Array HK gets applied (N-1) times + val tagType = (1 until level).foldLeft(componentType)((res, _) => arrayType(res)) + + val tree1: Tree = resolveClassTag(tree.pos, tagType) match { + case EmptyTree => MissingClassTagError(tree, tagType) + case tag => atPos(tree.pos)(new ApplyToImplicitArgs(Select(tag, nme.newArray), arg :: Nil)) + } + if (tree1.isErrorTyped) tree1 else typed(tree1, mode, pt) case Apply(Select(fun, nme.apply), _) if treeInfo.isSuperConstrCall(fun) => TooManyArgumentListsForConstructor(tree) //scala/bug#5696 case tree1 if mode.inPatternMode && tree1.tpe.paramSectionCount > 0 => // For a case class C with more than two parameter lists, From bcafc8ff4bd2ef1b16a85fb28852ab731ffce8f3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Feb 2018 13:45:59 +1000 Subject: [PATCH 1388/2793] Avoid creation of temporary Lists of imports during Context.lookup This is a major source of allocation pressure during typechecking. After the refactoring, we only allocate a single cursor (which is hopefully amenable to escape analyis), which advances the pair of pointers out the context chain. (cherry picked from commit 7526e4550acb672dd1cdbba603bba2ee03a249e7) --- .../tools/nsc/typechecker/Contexts.scala | 70 +++++++++++++------ 1 file changed, 47 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a4f191720ad5..7f487776a21c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -238,6 +238,7 @@ trait Contexts { self: Analyzer => def imports: List[ImportInfo] = outer.imports /** Equivalent to `imports.headOption`, but more efficient */ def firstImport: Option[ImportInfo] = outer.firstImport + protected[Contexts] def importOrNull: ImportInfo = null def isRootImport: Boolean = false /** Types for which implicit arguments are currently searched */ @@ -295,6 +296,13 @@ trait Contexts { self: Analyzer => /** ...or an Apply. */ def enclosingApply = nextEnclosing(_.tree.isInstanceOf[Apply]) + @tailrec + final def enclosingImport: Context = this match { + case _: ImportContext => this + case NoContext => this + case _ => outer.enclosingImport + } + def siteString = { def what_s = if (owner.isConstructor) "" else owner.kindString def where_s = if (owner.isClass) "" else "in " + enclClass.owner.decodedName @@ -1102,12 +1110,8 @@ trait Contexts { self: Analyzer => symbolDepth = cx.depth var impSym: Symbol = NoSymbol - var imports = Context.this.imports - def imp1 = imports.head - def imp2 = imports.tail.head - def sameDepth = imp1.depth == imp2.depth - def imp1Explicit = imp1 isExplicitImport name - def imp2Explicit = imp2 isExplicitImport name + val importCursor = new ImportCursor(this, name) + import importCursor.{imp1, imp2} def lookupImport(imp: ImportInfo, requireExplicit: Boolean) = importedAccessibleSymbol(imp, name, requireExplicit, record = true) filter qualifies @@ -1130,10 +1134,10 @@ trait Contexts { self: Analyzer => || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth) ) - while (!impSym.exists && imports.nonEmpty && depthOk(imports.head)) { + while (!impSym.exists && importCursor.imp1Exists && depthOk(importCursor.imp1)) { impSym = lookupImport(imp1, requireExplicit = false) if (!impSym.exists) - imports = imports.tail + importCursor.advanceImp1Imp2() } if (defSym.exists && impSym.exists) { @@ -1152,16 +1156,6 @@ trait Contexts { self: Analyzer => if (defSym.exists) finishDefSym(defSym, pre) else if (impSym.exists) { - // We continue walking down the imports as long as the tail is non-empty, which gives us: - // imports == imp1 :: imp2 :: _ - // And at least one of the following is true: - // - imp1 and imp2 are at the same depth - // - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked - def keepLooking = ( - lookupError == null - && imports.tail.nonEmpty - && (sameDepth || !imp1Explicit) - ) // If we find a competitor imp2 which imports the same name, possible outcomes are: // // - same depth, imp1 wild, imp2 explicit: imp2 wins, drop imp1 @@ -1173,19 +1167,19 @@ trait Contexts { self: Analyzer => // The ambiguity check is: if we can verify that both imports refer to the same // symbol (e.g. import foo.X followed by import foo._) then we discard imp2 // and proceed. If we cannot, issue an ambiguity error. - while (keepLooking) { + while (lookupError == null && importCursor.keepLooking) { // If not at the same depth, limit the lookup to explicit imports. // This is desirable from a performance standpoint (compare to // filtering after the fact) but also necessary to keep the unused // import check from being misled by symbol lookups which are not // actually used. - val other = lookupImport(imp2, requireExplicit = !sameDepth) - def imp1wins() = { imports = imp1 :: imports.tail.tail } - def imp2wins() = { impSym = other ; imports = imports.tail } + val other = lookupImport(imp2, requireExplicit = !importCursor.sameDepth) + def imp1wins() { importCursor.advanceImp2() } + def imp2wins() { impSym = other; importCursor.advanceImp1Imp2() } if (!other.exists) // imp1 wins; drop imp2 and continue. imp1wins() - else if (sameDepth && !imp1Explicit && imp2Explicit) // imp2 wins; drop imp1 and continue. + else if (importCursor.imp2Wins) // imp2 wins; drop imp1 and continue. imp2wins() else resolveAmbiguousImport(name, imp1, imp2) match { case Some(imp) => if (imp eq imp1) imp1wins() else imp2wins() @@ -1259,6 +1253,7 @@ trait Contexts { self: Analyzer => } override final def imports = impInfo :: super.imports override final def firstImport = Some(impInfo) + override final def importOrNull = impInfo override final def isRootImport = !tree.pos.isDefined override final def toString = s"${super.toString} with ImportContext { $impInfo; outer.owner = ${outer.owner} }" } @@ -1525,6 +1520,35 @@ trait Contexts { self: Analyzer => type ImportType = global.ImportType val ImportType = global.ImportType + + /** Walks a pair of references (`imp1` and `imp2`) up the context chain to ImportContexts */ + private final class ImportCursor(var ctx: Context, name: Name) { + private var imp1Ctx = ctx.enclosingImport + private var imp2Ctx = imp1Ctx.outer.enclosingImport + + def advanceImp1Imp2(): Unit = { + imp1Ctx = imp2Ctx; imp2Ctx = imp1Ctx.outer.enclosingImport + } + def advanceImp2(): Unit = { + imp2Ctx = imp2Ctx.outer.enclosingImport + } + def imp1Exists: Boolean = imp1Ctx.importOrNull != null + def imp1: ImportInfo = imp1Ctx.importOrNull + def imp2: ImportInfo = imp2Ctx.importOrNull + + // We continue walking down the imports as long as the tail is non-empty, which gives us: + // imports == imp1 :: imp2 :: _ + // And at least one of the following is true: + // - imp1 and imp2 are at the same depth + // - imp1 is a wildcard import, so all explicit imports from outer scopes must be checked + def keepLooking: Boolean = imp2Exists && (sameDepth || !imp1Explicit) + def imp2Wins: Boolean = sameDepth && !imp1Explicit && imp2Explicit + def sameDepth: Boolean = imp1.depth == imp2.depth + + private def imp2Exists = imp2Ctx.importOrNull != null + private def imp1Explicit = imp1 isExplicitImport name + private def imp2Explicit = imp2 isExplicitImport name + } } object ContextMode { From 9d2500072ca4df5129211ff543dceedd6eb2af39 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 14 Apr 2018 13:36:44 +0100 Subject: [PATCH 1389/2793] Rule out more implicit based on bounds of type parameters Before: ``` implicitly[Foo[String]] BYVALmode-EXPRmode (site: value in Test) |-- implicitly BYVALmode-EXPRmode-FUNmode-POLYmode-TAPPmode (site: value in Test) | \-> [T](implicit e: T)T |-- Foo[String] TYPEmode (site: value in Test) | |-- String TYPEmode (site: value in Test) | | [adapt] String is now a TypeTree(String) | | \-> String | \-> Foo[String] [search #1] start `[T](implicit e: T)T`, searching for adaptation to pt=Foo[String] (silent: value in Test) implicits disabled [search #1] considering Foo.javaEnumFoo solving for (T: ?T) [adapt] [T]=> Foo[T] adapted to [T]=> Foo[T] based on pt Foo[String] [search #1] success inferred value of type Foo[String] is SearchResult(Foo.javaEnumFoo[String], ) |-- [T](implicit e: T)T BYVALmode-EXPRmode (site: value in Test) | \-> Foo[String] [adapt] [T](implicit e: T)T adapted to [T](implicit e: T)T ``` After: ``` implicitly[Foo[String]] BYVALmode-EXPRmode (site: value in Test) |-- implicitly BYVALmode-EXPRmode-FUNmode-POLYmode-TAPPmode (site: value in Test) | \-> [T](implicit e: T)T |-- Foo[String] TYPEmode (site: value in Test) | |-- String TYPEmode (site: value in Test) | | [adapt] String is now a TypeTree(String) | | \-> String | \-> Foo[String] [search #1] start `[T](implicit e: T)T`, searching for adaptation to pt=Foo[String] (silent: value in Test) implicits disabled [search #1] considering Foo.stringFoo [search #1] success inferred value of type Foo[String] is SearchResult(Foo.stringFoo, ) |-- [T](implicit e: T)T BYVALmode-EXPRmode (site: value in Test) | \-> Foo[String] [adapt] [T](implicit e: T)T adapted to [T](implicit e: T)T \-> Foo[String] ``` (cherry picked from commit 68799b9c73de1a5d99437fce54397980a4e8e0fe) --- .../tools/nsc/typechecker/Implicits.scala | 26 ++++++++++++++----- test/files/pos/implicit-implausible.scala | 12 +++++++++ test/files/pos/sip23-singleton-view.scala | 6 +++++ 3 files changed, 38 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/implicit-implausible.scala create mode 100644 test/files/pos/sip23-singleton-view.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index f167a653ffb9..19569a734c01 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -617,17 +617,31 @@ trait Implicits { /** This expresses more cleanly in the negative: there's a linear path * to a final true or false. */ - private def isPlausiblySubType(tp1: Type, tp2: Type) = !isImpossibleSubType(tp1, tp2) - private def isImpossibleSubType(tp1: Type, tp2: Type) = tp1.dealiasWiden match { + private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = !isImpossibleSubType(tp1, tp2) + private def isImpossibleSubType(tp1: Type, tp2: Type): Boolean = tp1.dealiasWiden match { // We can only rule out a subtype relationship if the left hand // side is a class, else we may not know enough. - case tr1 @ TypeRef(_, sym1, _) if sym1.isClass => + case tr1 @ TypeRef(_, sym1, args1) if sym1.isClass => def typeRefHasMember(tp: TypeRef, name: Name) = { tp.baseClasses.exists(_.info.decls.lookupEntry(name) != null) } - tp2.dealiasWiden match { - case TypeRef(_, sym2, _) => ((sym1 eq ByNameParamClass) != (sym2 eq ByNameParamClass)) || (sym2.isClass && !(sym1 isWeakSubClass sym2)) - case RefinedType(parents, decls) => decls.nonEmpty && !typeRefHasMember(tr1, decls.head.name) // opt avoid full call to .member + + def existentialUnderlying(t: Type) = t match { + case et: ExistentialType => et.underlying + case tp => tp + } + val tp2Bounds = existentialUnderlying(tp2.dealiasWiden.bounds.hi) + tp2Bounds match { + case TypeRef(_, sym2, args2) if sym2 ne SingletonClass => + val impossible = if ((sym1 eq sym2) && (args1 ne Nil)) !corresponds3(sym1.typeParams, args1, args2) {(tparam, arg1, arg2) => + if (tparam.isCovariant) isPlausiblySubType(arg1, arg2) else isPlausiblySubType(arg2, arg1) + } else { + ((sym1 eq ByNameParamClass) != (sym2 eq ByNameParamClass)) || (sym2.isClass && !(sym1 isWeakSubClass sym2)) + } + impossible + case RefinedType(parents, decls) => + val impossible = decls.nonEmpty && !typeRefHasMember(tr1, decls.head.name) // opt avoid full call to .member + impossible case _ => false } case _ => false diff --git a/test/files/pos/implicit-implausible.scala b/test/files/pos/implicit-implausible.scala new file mode 100644 index 000000000000..734b5ad18327 --- /dev/null +++ b/test/files/pos/implicit-implausible.scala @@ -0,0 +1,12 @@ +trait Foo[T] +object Foo { + implicit def javaEnumFoo[T <: java.lang.Enum[_]]: Foo[T] = ??? + implicit def stringFoo: Foo[String] = ??? +} + +object Test { + // -Ytyper-debug output shows whether or not `javaEnumFoo` is considered + // By making `isImpossibleSubtype` a little smarter, we can exclude it + // on the grounds that `String` can't be a subtpe of the bounds ot `Enum[_]`. + implicitly[Foo[String]] +} diff --git a/test/files/pos/sip23-singleton-view.scala b/test/files/pos/sip23-singleton-view.scala new file mode 100644 index 000000000000..735173cacb9d --- /dev/null +++ b/test/files/pos/sip23-singleton-view.scala @@ -0,0 +1,6 @@ +import language.implicitConversions + +class Test { + implicit def singletonToString(c: Singleton): String = "" + def foo(a: 1): String = a // implicit was being ruled out because Int(1).widen was not a subclass of Singletom +} From aa72ad50b3d4634d3fa12050748ac8ee97bbd01a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 11:14:12 +1000 Subject: [PATCH 1390/2793] Optimize isStable (cherry picked from commit c8fd3373c026db1b84460e2f63c9763a0c647841) --- src/reflect/scala/reflect/internal/Definitions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 20c330a56f2f..300dabe2f0b4 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -758,7 +758,7 @@ trait Definitions extends api.StandardDefinitions { case TypeRef(_, NothingClass | SingletonClass, _) => true case TypeRef(_, sym, _) if sym.isAbstractType => tp.bounds.hi.typeSymbol isSubClass SingletonClass case TypeRef(pre, sym, _) if sym.isModuleClass => isStable(pre) - case TypeRef(_, _, _) if tp ne tp.dealias => isStable(tp.dealias) + case TypeRef(_, _, _) => val dealiased = tp.dealias; (dealiased ne tp) && isStable(dealiased) case TypeVar(origin, _) => isStable(origin) case AnnotatedType(_, atp) => isStable(atp) // Really? case _: SimpleTypeProxy => isStable(tp.underlying) From 18bc693f67266fbb1ff4e126f7a84b79d35f4d1b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 11:14:56 +1000 Subject: [PATCH 1391/2793] Optimize isStable by using normalize, which is cached (cherry picked from commit 9e533873f87535fc28ac7d315f04bf37697cc44e) --- src/reflect/scala/reflect/internal/Definitions.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 300dabe2f0b4..b3255bb5e1af 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -758,7 +758,7 @@ trait Definitions extends api.StandardDefinitions { case TypeRef(_, NothingClass | SingletonClass, _) => true case TypeRef(_, sym, _) if sym.isAbstractType => tp.bounds.hi.typeSymbol isSubClass SingletonClass case TypeRef(pre, sym, _) if sym.isModuleClass => isStable(pre) - case TypeRef(_, _, _) => val dealiased = tp.dealias; (dealiased ne tp) && isStable(dealiased) + case TypeRef(_, _, _) => val normalize = tp.normalize; (normalize ne tp) && isStable(normalize) case TypeVar(origin, _) => isStable(origin) case AnnotatedType(_, atp) => isStable(atp) // Really? case _: SimpleTypeProxy => isStable(tp.underlying) From 1ed41e6ad48f0affa7850cbf15b1df1356519571 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 11:23:49 +1000 Subject: [PATCH 1392/2793] Avoid eager error buffer creation (cherry picked from commit 84b52194304097358479b5e5c13cc3293c36ab4f) --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a4f191720ad5..5c3238d78e4d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1300,7 +1300,7 @@ trait Contexts { self: Analyzer => @inline final def withFreshErrorBuffer[T](expr: => T): T = { val previousBuffer = _errorBuffer - _errorBuffer = newBuffer + _errorBuffer = null val res = expr // expr will read _errorBuffer _errorBuffer = previousBuffer res @@ -1332,7 +1332,7 @@ trait Contexts { self: Analyzer => case INFO => reporter.echo(pos, msg) } - final override def hasErrors = super.hasErrors || errorBuffer.nonEmpty + final override def hasErrors = super.hasErrors || (_errorBuffer != null && errorBuffer.nonEmpty) // TODO: everything below should be pushed down to BufferingReporter (related to buffering) // Implicit relies on this most heavily, but there you know reporter.isInstanceOf[BufferingReporter] From 10e3a47c6a2bf293dba75a0f1d5f4e4def393ed7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 13:34:57 +1000 Subject: [PATCH 1393/2793] Avoid an array copy in the parser (cherry picked from commit aaf56b4bc26bfd5f4d527689e5b66f31af6e2b59) --- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 4dbba5a01000..c9fe0c6ab62c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -212,7 +212,7 @@ trait Scanners extends ScannersCommon { /** Clear buffer and set name and token */ private def finishNamed(idtoken: Token = IDENTIFIER): Unit = { - name = newTermName(cbuf.toString) + name = newTermName(cbuf.toArray) cbuf.clear() token = idtoken if (idtoken == IDENTIFIER) { From 9d4aa224e7ae8e235035d356a7efc5971a32f549 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 09:23:37 +1000 Subject: [PATCH 1394/2793] Reduce allocation in patmat updateSubstitution (cherry picked from commit 6be8a5474bcc77d50b3688ba75ab4bb0bf90433a) --- .../nsc/transform/patmat/MatchAnalysis.scala | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index ac3f4ff93c6b..67e1c90ca04c 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -310,19 +310,27 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT def updateSubstitution(subst: Substitution): Unit = { // find part of substitution that replaces bound symbols by new symbols, and reverse that part // so that we don't introduce new aliases for existing symbols, thus keeping the set of bound symbols minimal - val (boundSubst, unboundSubst) = (subst.from zip subst.to) partition { + + // HOT Method for allocation, hence the imperative style here + val substSize = subst.from.length + val boundFrom = new mutable.ListBuffer[Tree]() + val boundTo = new mutable.ListBuffer[Symbol] + val unboundFrom = new mutable.ArrayBuffer[Symbol](substSize) + val unboundTo = new mutable.ListBuffer[Tree] + foreach2(subst.from, subst.to) { + case (f, t: Ident) if t.symbol.exists && pointsToBound(f) => + boundFrom += CODE.REF(f) + boundTo += t.symbol case (f, t) => - t.isInstanceOf[Ident] && t.symbol.exists && pointsToBound(f) + unboundFrom += f + unboundTo += normalize(t) } - val (boundFrom, boundTo) = boundSubst.unzip - val (unboundFrom, unboundTo) = unboundSubst.unzip - // reverse substitution that would otherwise replace a variable we already encountered by a new variable // NOTE: this forgets the more precise type we have for these later variables, but that's probably okay - normalize >>= Substitution(boundTo map (_.symbol), boundFrom map (CODE.REF(_))) + normalize >>= Substitution(boundTo.toList, boundFrom.toList) // debug.patmat ("normalize subst: "+ normalize) - val okSubst = Substitution(unboundFrom, unboundTo map (normalize(_))) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway + val okSubst = Substitution(unboundFrom.toList, unboundTo.toList) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1 // debug.patmat("pointsToBound: "+ pointsToBound) From 9785e1fc0112e6560d1cd23e65fbacbd8d113222 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 10:04:42 +1000 Subject: [PATCH 1395/2793] Reduce allocation patmat Logic.simplify (cherry picked from commit 3124995fd948e4a4877a2345a530b60b92b94785) --- .../tools/nsc/transform/patmat/Logic.scala | 90 +++++++++++++------ 1 file changed, 63 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index aeaf2bcdb960..d0abf6abe629 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -174,12 +174,37 @@ trait Logic extends Debugging { def simplify(f: Prop): Prop = { // limit size to avoid blow up - def hasImpureAtom(ops: Seq[Prop]): Boolean = ops.size < 10 && - ops.combinations(2).exists { - case Seq(a, Not(b)) if a == b => true - case Seq(Not(a), b) if a == b => true - case _ => false + def hasImpureAtom(ops0: collection.Iterable[Prop]): Boolean = { + val size = ops0.size + size < 10 && { + // HOT method, imperative rewrite of: + // ops.combinations(2).exists { + // case Seq(a, Not(b)) if a == b => true + // case Seq(Not(a), b) if a == b => true + // case _ => false + // } + val ops = new Array[Prop](size) + ops0.copyToArray(ops) + var i = 0 + val len = ops.length + while (i < len - 1) { + var j = i + 1 + while (j < len) { + ops(j) match { + case Not(b) if ops(i) == b => return true + case _ => + ops(i) match { + case Not(a) if a == ops(j) => return true + case _ => + } + } + j += 1 + } + i += 1 + } + false } + } // push negation inside formula def negationNormalFormNot(p: Prop): Prop = p match { @@ -204,39 +229,50 @@ trait Logic extends Debugging { def simplifyProp(p: Prop): Prop = p match { case And(fv) => // recurse for nested And (pulls all Ands up) - val ops = fv.map(simplifyProp) - True // ignore `True` - // build up Set in order to remove duplicates - val opsFlattened = ops.flatMap { - case And(fv) => fv - case f => Set(f) - }.toSeq + val opsFlattenedBuilder = collection.immutable.Set.newBuilder[Prop] + for (prop <- fv) { + val simplified = simplifyProp(prop) + if (simplified != True) { // ignore `True` + simplified match { + case And(fv) => fv.foreach(opsFlattenedBuilder += _) + case f => opsFlattenedBuilder += f + } + } + } + val opsFlattened = opsFlattenedBuilder.result() - if (hasImpureAtom(opsFlattened) || opsFlattened.contains(False)) { + if (opsFlattened.contains(False) || hasImpureAtom(opsFlattened)) { False } else { - opsFlattened match { - case Seq() => True - case Seq(f) => f - case ops => And(ops: _*) + opsFlattened.size match { + case 0 => True + case 1 => opsFlattened.head + case _ => new And(opsFlattened) } } case Or(fv) => // recurse for nested Or (pulls all Ors up) - val ops = fv.map(simplifyProp) - False // ignore `False` - - val opsFlattened = ops.flatMap { - case Or(fv) => fv - case f => Set(f) - }.toSeq + // build up Set in order to remove duplicates + val opsFlattenedBuilder = collection.immutable.Set.newBuilder[Prop] + for (prop <- fv) { + val simplified = simplifyProp(prop) + if (simplified != False) { // ignore `False` + simplified match { + case Or(fv) => fv.foreach(opsFlattenedBuilder += _) + case f => opsFlattenedBuilder += f + } + } + } + val opsFlattened = opsFlattenedBuilder.result() - if (hasImpureAtom(opsFlattened) || opsFlattened.contains(True)) { + if (opsFlattened.contains(True) || hasImpureAtom(opsFlattened)) { True } else { - opsFlattened match { - case Seq() => False - case Seq(f) => f - case ops => Or(ops: _*) + opsFlattened.size match { + case 0 => False + case 1 => opsFlattened.head + case _ => new Or(opsFlattened) } } case Not(Not(a)) => From cbf7813630c0c2e5e65e33ab30f4e0e015ce1bb1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 11:01:35 +1000 Subject: [PATCH 1396/2793] Optimize patmat substitution (cherry picked from commit d1e489ef16f66670a91139d31a17de9175e5943f) --- .../transform/patmat/PatternMatching.scala | 27 +++++++++++++------ 1 file changed, 19 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 3e4fe35395ee..bdf3d0f075c7 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -6,6 +6,7 @@ package scala.tools.nsc.transform.patmat +import scala.collection.mutable.ListBuffer import scala.tools.nsc.Global import scala.tools.nsc.ast import scala.language.postfixOps @@ -192,21 +193,22 @@ trait Interface extends ast.TreeDSL { /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait TypedSubstitution extends MatchMonadInterface { object Substitution { - def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to)) + def apply(from: Symbol, to: Tree): Substitution = new Substitution(from :: Nil, to :: Nil) // requires sameLength(from, to) - def apply(from: List[Symbol], to: List[Tree]) = + def apply(from: List[Symbol], to: List[Tree]): Substitution = if (from nonEmpty) new Substitution(from, to) else EmptySubstitution } class Substitution(val from: List[Symbol], val to: List[Tree]) { import global.{Transformer, Ident, NoType, TypeTree, SingleType} + private val toIdents = to.forall(_.isInstanceOf[Ident]) + // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed, // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees. def apply(tree: Tree): Tree = { // according to -Ystatistics 10% of translateMatch's time is spent in this method... // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst - val toIdents = to.forall(_.isInstanceOf[Ident]) val containsSym = tree.exists { case i@Ident(_) => from contains i.symbol case tt: TypeTree => tt.tpe.exists { @@ -219,7 +221,6 @@ trait Interface extends ast.TreeDSL { } case _ => false } - val toSyms = to.map(_.symbol) object substIdentsForTrees extends Transformer { private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = if (origTp == null || origTp == NoType) to @@ -249,7 +250,7 @@ trait Interface extends ast.TreeDSL { } } if (containsSym) { - if (to.forall(_.isInstanceOf[Ident])) + if (toIdents) tree.duplicate.substituteSymbols(from, to.map(_.symbol)) // scala/bug#7459 catches `case t => new t.Foo` else substIdentsForTrees.transform(tree) @@ -260,9 +261,19 @@ trait Interface extends ast.TreeDSL { // the substitution that chains `other` before `this` substitution // forall t: Tree. this(other(t)) == (this >> other)(t) - def >>(other: Substitution): Substitution = { - val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) } - new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly + def >>(other: Substitution): Substitution = if (other == EmptySubstitution) this else { + // HOT + val newFrom = new ListBuffer[Symbol] + val newTo = new ListBuffer[Tree] + newFrom ++= other.from + for (t <- other.to) newTo += apply(t) + foreach2(from, to) { (f, t) => + if (!other.from.contains(f)) { + newFrom += f + newTo += t + } + } + new Substitution(newFrom.toList, newTo.toList) } override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")") } From 8106106b26efbc17a66102a7834685f52c420fa3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 11:54:55 +1000 Subject: [PATCH 1397/2793] Optimize patmat substitution (cherry picked from commit e57bafd804120b05ac7aff2468ebd27f957bed56) --- .../transform/patmat/MatchTreeMaking.scala | 11 +++++-- .../transform/patmat/PatternMatching.scala | 32 +++++++++++++------ 2 files changed, 30 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 9381c8a375ac..53f27b15e880 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -170,12 +170,17 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { def ref(sym: Symbol) = if (potentiallyStoredBinders(sym)) usedBinders += sym // compute intersection of all symbols in the tree `in` and all potentially stored subpat binders - in.foreach { - case tt: TypeTree => - tt.tpe foreach { // scala/bug#7459 e.g. case Prod(t) => new t.u.Foo + val typeTraverser = new TypeTraverser { + def traverse(tp: Type) = { + tp match { case SingleType(_, sym) => ref(sym) case _ => } + mapOver(tp) + } + } + in.foreach { + case tt: TypeTree => typeTraverser.apply(tt.tpe) case t => ref(t.symbol) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index bdf3d0f075c7..6c2b1e5dff1a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -203,24 +203,38 @@ trait Interface extends ast.TreeDSL { import global.{Transformer, Ident, NoType, TypeTree, SingleType} private val toIdents = to.forall(_.isInstanceOf[Ident]) + private def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode) + lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe) // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed, // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees. def apply(tree: Tree): Tree = { // according to -Ystatistics 10% of translateMatch's time is spent in this method... // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst - val containsSym = tree.exists { - case i@Ident(_) => from contains i.symbol - case tt: TypeTree => tt.tpe.exists { - case SingleType(_, sym) => - (from contains sym) && { - if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree `$tt`, subst= $this") - true + + val checkType = new TypeCollector[Boolean](false) { + def traverse(tp: Type) { + if (!result) { + tp match { + case SingleType(_, sym) => + if (from contains sym) { + if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree, subst= $this") + result = true + } + case _ => } - case _ => false + mapOver(tp) + } } + } + val containsSym = tree.exists { + case i@Ident(_) => from contains i.symbol + case tt: TypeTree => + checkType.result = false + checkType.collect(tt.tpe) case _ => false } + object substIdentsForTrees extends Transformer { private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = if (origTp == null || origTp == NoType) to @@ -228,8 +242,6 @@ trait Interface extends ast.TreeDSL { // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors) else typer.typed(to) - def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode) - lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe) override def transform(tree: Tree): Tree = { def subst(from: List[Symbol], to: List[Tree]): Tree = From 439264b928a4442e728cac0fc5cd17511161f323 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 2 May 2018 19:42:50 +1000 Subject: [PATCH 1398/2793] Reduce allocation further in TypedSubstitution.<< (cherry picked from commit 9464399d2ce577b2e2ac429917d0b1f766be5a86) --- .../scala/tools/nsc/transform/patmat/PatternMatching.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 6c2b1e5dff1a..0460d87702cd 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -277,15 +277,13 @@ trait Interface extends ast.TreeDSL { // HOT val newFrom = new ListBuffer[Symbol] val newTo = new ListBuffer[Tree] - newFrom ++= other.from - for (t <- other.to) newTo += apply(t) foreach2(from, to) { (f, t) => if (!other.from.contains(f)) { newFrom += f newTo += t } } - new Substitution(newFrom.toList, newTo.toList) + new Substitution(newFrom.prependToList(other.from), newTo.prependToList(other.to.mapConserve(apply))) } override def toString = (from.map(_.name) zip to) mkString("Substitution(", ", ", ")") } From da315ab118998b764f9ffe7c868dae4bad1262fd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 May 2018 13:30:49 +1000 Subject: [PATCH 1399/2793] Specialize hasImpureAtom for common cases of small collections (cherry picked from commit fbb9d9088c47e8f6b3f9bbb9a3363afa38d8e944) --- .../tools/nsc/transform/patmat/Logic.scala | 49 +++++++++++++------ 1 file changed, 33 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index d0abf6abe629..12129884d98f 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -173,16 +173,40 @@ trait Logic extends Debugging { */ def simplify(f: Prop): Prop = { - // limit size to avoid blow up def hasImpureAtom(ops0: collection.Iterable[Prop]): Boolean = { + // HOT method, imperative rewrite of: + // ops.combinations(2).exists { + // case Seq(a, Not(b)) if a == b => true + // case Seq(Not(a), b) if a == b => true + // case _ => false + // } + + def checkPair(a: Prop, b: Prop): Boolean = { + b match { + case Not(b) if a == b => true + case _ => + a match { + case Not(a) if a == b => true + case _ => false + } + } + } val size = ops0.size - size < 10 && { - // HOT method, imperative rewrite of: - // ops.combinations(2).exists { - // case Seq(a, Not(b)) if a == b => true - // case Seq(Not(a), b) if a == b => true - // case _ => false - // } + if (size > 10) false // limit size to avoid blow up + else if (size < 2) false // no combinations + else if (size == 2) { // Specialized versions for size 2+3 + val it = ops0.iterator + val result = checkPair(it.next(), it.next()) + assert(!it.hasNext) + result + } else if (size == 3) { + val it = ops0.iterator + val a = it.next() + val b = it.next() + val c = it.next() + assert(!it.hasNext) + checkPair(a, b) || checkPair(a, c) || checkPair(b, c) + } else { val ops = new Array[Prop](size) ops0.copyToArray(ops) var i = 0 @@ -190,14 +214,7 @@ trait Logic extends Debugging { while (i < len - 1) { var j = i + 1 while (j < len) { - ops(j) match { - case Not(b) if ops(i) == b => return true - case _ => - ops(i) match { - case Not(a) if a == ops(j) => return true - case _ => - } - } + if (checkPair(ops(i), ops(j))) return true j += 1 } i += 1 From a4353563de01d4091d8fabf17070e8e2c5053e49 Mon Sep 17 00:00:00 2001 From: Cong Zhao Date: Sun, 6 May 2018 10:12:38 +0800 Subject: [PATCH 1400/2793] Enhance performance of unapply method of ClassTag (cherry picked from commit 25a87833345f274ad10de2b20fa0781ec88b0913) --- src/library/scala/reflect/ClassTag.scala | 33 ++----- src/library/scala/reflect/Manifest.scala | 59 +++++++++++- .../scala/reflect/ClassTagBenchmark.scala | 93 +++++++++++++++++++ 3 files changed, 158 insertions(+), 27 deletions(-) create mode 100644 test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 30ceadceeb59..4cb44a4f4045 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -46,19 +46,7 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) /** Produces a new array with element type `T` and length `len` */ - override def newArray(len: Int): Array[T] = - runtimeClass match { - case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]] - case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]] - case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]] - case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]] - case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]] - case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]] - case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]] - case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]] - case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]] - case _ => java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] - } + override def newArray(len: Int): Array[T] /** A ClassTag[T] can serve as an extractor that matches only objects of type T. * @@ -69,18 +57,7 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial * is uncheckable, but we have an instance of `ClassTag[T]`. */ def unapply(x: Any): Option[T] = - if (null != x && ( - (runtimeClass.isInstance(x)) - || (x.isInstanceOf[Byte] && runtimeClass.isAssignableFrom(classOf[Byte])) - || (x.isInstanceOf[Short] && runtimeClass.isAssignableFrom(classOf[Short])) - || (x.isInstanceOf[Char] && runtimeClass.isAssignableFrom(classOf[Char])) - || (x.isInstanceOf[Int] && runtimeClass.isAssignableFrom(classOf[Int])) - || (x.isInstanceOf[Long] && runtimeClass.isAssignableFrom(classOf[Long])) - || (x.isInstanceOf[Float] && runtimeClass.isAssignableFrom(classOf[Float])) - || (x.isInstanceOf[Double] && runtimeClass.isAssignableFrom(classOf[Double])) - || (x.isInstanceOf[Boolean] && runtimeClass.isAssignableFrom(classOf[Boolean])) - || (x.isInstanceOf[Unit] && runtimeClass.isAssignableFrom(classOf[Unit]))) - ) Some(x.asInstanceOf[T]) + if (runtimeClass.isInstance(x)) Some(x.asInstanceOf[T]) else None // case class accessories @@ -120,7 +97,11 @@ object ClassTag { val Null : ClassTag[scala.Null] = Manifest.Null @SerialVersionUID(1L) - private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] + private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] { + override def newArray(len: Int): Array[T] = { + java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + } + } def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = runtimeClass1 match { diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 8e5ba6376eea..3579f4731026 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -93,6 +93,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Byte] = new Array[Byte](len) override def newWrappedArray(len: Int): WrappedArray[Byte] = new WrappedArray.ofByte(new Array[Byte](len)) override def newArrayBuilder(): ArrayBuilder[Byte] = new ArrayBuilder.ofByte() + override def unapply(x: Any): Option[Byte] = { + x match { + case d: Byte => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Byte } val Byte: AnyValManifest[Byte] = new ByteManifest @@ -103,6 +109,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Short] = new Array[Short](len) override def newWrappedArray(len: Int): WrappedArray[Short] = new WrappedArray.ofShort(new Array[Short](len)) override def newArrayBuilder(): ArrayBuilder[Short] = new ArrayBuilder.ofShort() + override def unapply(x: Any): Option[Short] = { + x match { + case d: Short => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Short } val Short: AnyValManifest[Short] = new ShortManifest @@ -113,6 +125,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Char] = new Array[Char](len) override def newWrappedArray(len: Int): WrappedArray[Char] = new WrappedArray.ofChar(new Array[Char](len)) override def newArrayBuilder(): ArrayBuilder[Char] = new ArrayBuilder.ofChar() + override def unapply(x: Any): Option[Char] = { + x match { + case d: Char => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Char } val Char: AnyValManifest[Char] = new CharManifest @@ -123,6 +141,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Int] = new Array[Int](len) override def newWrappedArray(len: Int): WrappedArray[Int] = new WrappedArray.ofInt(new Array[Int](len)) override def newArrayBuilder(): ArrayBuilder[Int] = new ArrayBuilder.ofInt() + override def unapply(x: Any): Option[Int] = { + x match { + case d: Int => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Int } val Int: AnyValManifest[Int] = new IntManifest @@ -133,6 +157,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Long] = new Array[Long](len) override def newWrappedArray(len: Int): WrappedArray[Long] = new WrappedArray.ofLong(new Array[Long](len)) override def newArrayBuilder(): ArrayBuilder[Long] = new ArrayBuilder.ofLong() + override def unapply(x: Any): Option[Long] = { + x match { + case d: Long => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Long } val Long: AnyValManifest[Long] = new LongManifest @@ -143,6 +173,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Float] = new Array[Float](len) override def newWrappedArray(len: Int): WrappedArray[Float] = new WrappedArray.ofFloat(new Array[Float](len)) override def newArrayBuilder(): ArrayBuilder[Float] = new ArrayBuilder.ofFloat() + override def unapply(x: Any): Option[Float] = { + x match { + case d: Float => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Float } val Float: AnyValManifest[Float] = new FloatManifest @@ -150,9 +186,18 @@ object ManifestFactory { @SerialVersionUID(1L) private class DoubleManifest extends AnyValManifest[scala.Double]("Double") { def runtimeClass = java.lang.Double.TYPE - override def newArray(len: Int): Array[Double] = new Array[Double](len) + override def newArray(len: Int): Array[Double] = { + new Array[Double](len) + } override def newWrappedArray(len: Int): WrappedArray[Double] = new WrappedArray.ofDouble(new Array[Double](len)) override def newArrayBuilder(): ArrayBuilder[Double] = new ArrayBuilder.ofDouble() + + override def unapply(x: Any): Option[Double] = { + x match { + case d: Double => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Double } val Double: AnyValManifest[Double] = new DoubleManifest @@ -163,6 +208,12 @@ object ManifestFactory { override def newArray(len: Int): Array[Boolean] = new Array[Boolean](len) override def newWrappedArray(len: Int): WrappedArray[Boolean] = new WrappedArray.ofBoolean(new Array[Boolean](len)) override def newArrayBuilder(): ArrayBuilder[Boolean] = new ArrayBuilder.ofBoolean() + override def unapply(x: Any): Option[Boolean] = { + x match { + case d: Boolean => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Boolean } val Boolean: AnyValManifest[Boolean] = new BooleanManifest @@ -176,6 +227,12 @@ object ManifestFactory { override protected def arrayClass[T](tp: Class[_]): Class[Array[T]] = if (tp eq runtimeClass) classOf[Array[scala.runtime.BoxedUnit]].asInstanceOf[Class[Array[T]]] else super.arrayClass(tp) + override def unapply(x: Any): Option[Unit] = { + x match { + case d: Unit => Some(d) + case _ => None + } + } private def readResolve(): Any = Manifest.Unit } val Unit: AnyValManifest[Unit] = new UnitManifest diff --git a/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala new file mode 100644 index 000000000000..0f01aa4a55e8 --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala @@ -0,0 +1,93 @@ +package scala.reflect + +import java.util.concurrent.TimeUnit + +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ClassTagBenchmark { + var unitClassTag: ClassTag[_] = null + var booleanClassTag: ClassTag[_] = null + var byteClassTag: ClassTag[_] = null + var shortClassTag: ClassTag[_] = null + var charClassTag: ClassTag[_] = null + var intClassTag: ClassTag[_] = null + var longClassTag: ClassTag[_] = null + var floatClassTag: ClassTag[_] = null + var doubleClassTag: ClassTag[_] = null + var refClassTag: ClassTag[_] = null + var otherValue: Object = null + var arraySize: Int = 100 + + @Setup def setup(): Unit = { + unitClassTag = classTag[Unit] + booleanClassTag = classTag[Boolean] + byteClassTag = classTag[Byte] + shortClassTag = classTag[Short] + charClassTag = classTag[Char] + intClassTag = classTag[Int] + longClassTag = classTag[Long] + floatClassTag = classTag[Float] + doubleClassTag = classTag[Double] + refClassTag = classTag[ClassTagBenchmark] + otherValue = new Object + } + + @Benchmark def primitivesNegOnRefClassTag(bh: Blackhole): Any = { + bh.consume(refClassTag.unapply(())) + bh.consume(refClassTag.unapply(1: Byte)) + bh.consume(refClassTag.unapply('A')) + bh.consume(refClassTag.unapply(1: Short)) + bh.consume(refClassTag.unapply(1)) + bh.consume(refClassTag.unapply(1L)) + bh.consume(refClassTag.unapply(1f)) + bh.consume(refClassTag.unapply(1d)) + } + + @Benchmark def primitivesPos(bh: Blackhole): Any = { + bh.consume(unitClassTag.unapply(())) + bh.consume(booleanClassTag.unapply(true)) + bh.consume(byteClassTag.unapply(1: Byte)) + bh.consume(charClassTag.unapply('A')) + bh.consume(shortClassTag.unapply(1: Short)) + bh.consume(intClassTag.unapply(1)) + bh.consume(longClassTag.unapply(1L)) + bh.consume(floatClassTag.unapply(1f)) + bh.consume(doubleClassTag.unapply(1d)) + } + + @Benchmark def primitivesNewArray(bh: Blackhole): Any = { + bh.consume(unitClassTag.newArray(arraySize)) + bh.consume(booleanClassTag.newArray(arraySize)) + bh.consume(charClassTag.newArray(arraySize)) + bh.consume(shortClassTag.newArray(arraySize)) + bh.consume(intClassTag.newArray(arraySize)) + bh.consume(longClassTag.newArray(arraySize)) + bh.consume(floatClassTag.newArray(arraySize)) + bh.consume(doubleClassTag.newArray(arraySize)) + } + + @Benchmark def refClassTagNewArray(bh: Blackhole): Any = { + bh.consume(refClassTag.newArray(arraySize)) + } + + @Benchmark def doubleClassTagNewArray(bh: Blackhole): Any = { + bh.consume(doubleClassTag.newArray(arraySize)) + } + + @Benchmark def refClassTagUnapplyNeg2(bh: Blackhole): Any = refClassTag.unapply(otherValue) + + @Benchmark def refClassTagUnapplyNeg2Direct(bh: Blackhole): Any = unapplyDirect(refClassTag, otherValue) + + def unapplyDirect(ct: ClassTag[_], x: AnyRef): Option[_] = { + if (null != x && (ct.runtimeClass.isInstance(x))) Some(x) + else None + } +} \ No newline at end of file From 64899084df69c4c057b23a1b5426e0f3ade8d983 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 6 May 2018 17:28:42 +0100 Subject: [PATCH 1401/2793] Solve Issue 492, "MethodTypes.isTrivial is allocation heavy" This commit addresses Issue 492 at ScalaDev. It aims at reducing the allocation of Cons objects in the execution of the `isTrivial` method. In particular: - We use an array, evaluated on demand, to contain the `_.tpe` of each parameter in `params`. - We also add an array for the ContainsCollector objects, which is also one for each parameter. Before, the method would create one for each type, which meant N^2 creations in the worst case. - We turn the auxiliary methods for `isTrivial` from recursive into imperative loops, using when possible indexes over those arrays. (cherry picked from commit 9875c12a4c05b337b958e1c35590fa111739f5ed) --- .../scala/reflect/internal/Types.scala | 68 ++++++++++++++++--- 1 file changed, 60 insertions(+), 8 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index bec839b85606..ddb890fae665 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2494,20 +2494,72 @@ trait Types private var trivial: ThreeValue = UNKNOWN override def isTrivial: Boolean = { - if (trivial == UNKNOWN) trivial = fromBoolean(isTrivialResult && areTrivialParams(params)) + if (trivial == UNKNOWN) trivial = fromBoolean(isTrivialResult && areTrivialParams) toBoolean(trivial) } private def isTrivialResult = resultType.isTrivial && (resultType eq resultType.withoutAnnotations) - private def areTrivialParams(ps: List[Symbol]): Boolean = ps match { - case p :: rest => - p.tpe.isTrivial && !typesContain(paramTypes, p) && !(resultType contains p) && - areTrivialParams(rest) - case _ => - true - } + /*- Imperative encoding for: + * `lazy val paramsContainsCollectors = params.map( new ContainsCollector(_) ).toArray` + * `lazy val paramTpes = params.map( _.tpe).toArray` + */ + private[this] var paramsContainsCollectors: Array[ContainsCollector] = null + private[this] var paramsTpes: Array[Type] = null + private[this] def buildParamsContainsCollectors: Unit = + if (paramsContainsCollectors == null) { + val len = params.length + paramsContainsCollectors = new Array[ContainsCollector](len) + paramsTpes = new Array[Type](len) + @tailrec + def buildPCC(syms: List[Symbol], ix: Int): Unit = syms match { + case sym :: tailSyms => + paramsContainsCollectors(ix) = new ContainsCollector(sym) + paramsTpes(ix) = sym.tpe + buildPCC(tailSyms, ix+1) + case Nil => + } + buildPCC(params, ix = 0) + } + /* End of paramsContainsCollector */ + + // areTrivialParams = params.forall( + private def areTrivialParams: Boolean = + if (params.isEmpty) true else { + + def typeContains(pcc: ContainsCollector, tp: Type): Boolean = { + pcc.result = false + pcc.collect(tp) + } + + // Imperative rewrite of paramsTpes.exists( typeContains(pcc, _) ) + def anyTypeContains(pcc: ContainsCollector): Boolean = { + var existsContains = false + var tpeIx = 0 + while(tpeIx < paramsTpes.length && !existsContains){ + existsContains = typeContains(pcc, paramsTpes(tpeIx) ) + tpeIx = tpeIx + 1 + } + existsContains + } + + def isTrivialParam(paramIx: Int): Boolean = + paramsTpes(paramIx).isTrivial && { + val pcc = paramsContainsCollectors(paramIx) + !typeContains(pcc, resultType) && !anyTypeContains(pcc) + } + + buildParamsContainsCollectors + // Imperative rewrite of `params.forall( isTrivialParam )` + var paramIdx = 0 + var allIsTrivial = true + while(paramIdx < paramsTpes.length && allIsTrivial){ + allIsTrivial = isTrivialParam(paramIdx) + paramIdx = paramIdx + 1 + } + allIsTrivial + } def isImplicit = (params ne Nil) && params.head.isImplicit def isJava = false // can we do something like for implicits? I.e. do Java methods without parameters need to be recognized? From a17e79a72ca3169aabec275bf84fea7cd88a2f38 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Mon, 7 May 2018 10:32:02 +0100 Subject: [PATCH 1402/2793] Move fields into local variables. The method `areTrivialParams` is assumed to be used at most once per MethodType object, so there is no need to keep the arrays as fields. Instead, we can turn them into local method variables. - Since the main loop goes parameter by parameter, we need not to keep the ContainsCollector objects, only the types. - We unroll the main loop, so that we first check the `.isTrivial` method of each type, and only afterwards check with anyContains. (cherry picked from commit 2fcf0ecbdeff6a99a9a873742ae7df237a115beb) --- .../scala/reflect/internal/Types.scala | 65 ++++++++----------- 1 file changed, 27 insertions(+), 38 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index ddb890fae665..c546c9b3df4d 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2501,32 +2501,25 @@ trait Types private def isTrivialResult = resultType.isTrivial && (resultType eq resultType.withoutAnnotations) - /*- Imperative encoding for: - * `lazy val paramsContainsCollectors = params.map( new ContainsCollector(_) ).toArray` - * `lazy val paramTpes = params.map( _.tpe).toArray` - */ - private[this] var paramsContainsCollectors: Array[ContainsCollector] = null - private[this] var paramsTpes: Array[Type] = null - private[this] def buildParamsContainsCollectors: Unit = - if (paramsContainsCollectors == null) { - val len = params.length - paramsContainsCollectors = new Array[ContainsCollector](len) - paramsTpes = new Array[Type](len) - @tailrec - def buildPCC(syms: List[Symbol], ix: Int): Unit = syms match { - case sym :: tailSyms => - paramsContainsCollectors(ix) = new ContainsCollector(sym) - paramsTpes(ix) = sym.tpe - buildPCC(tailSyms, ix+1) - case Nil => - } - buildPCC(params, ix = 0) - } - /* End of paramsContainsCollector */ - - // areTrivialParams = params.forall( private def areTrivialParams: Boolean = if (params.isEmpty) true else { + val len = params.length + val paramsTpes: Array[Type] = new Array[Type](len) + + // returns the result of ```params.forall(_.tpe.isTrivial))``` + // along the way, it loads each param' tpe into array + def forallIsTrivial: Boolean = { + var res = true + var pps = params + var ix = 0 + while(res && ix < len){ + paramsTpes(ix) = pps.head.tpe + res = paramsTpes(ix).isTrivial + pps = pps.tail + ix = ix + 1 + } + res + } def typeContains(pcc: ContainsCollector, tp: Type): Boolean = { pcc.result = false @@ -2537,28 +2530,24 @@ trait Types def anyTypeContains(pcc: ContainsCollector): Boolean = { var existsContains = false var tpeIx = 0 - while(tpeIx < paramsTpes.length && !existsContains){ + while(tpeIx < len && !existsContains){ existsContains = typeContains(pcc, paramsTpes(tpeIx) ) tpeIx = tpeIx + 1 } existsContains } - def isTrivialParam(paramIx: Int): Boolean = - paramsTpes(paramIx).isTrivial && { - val pcc = paramsContainsCollectors(paramIx) - !typeContains(pcc, resultType) && !anyTypeContains(pcc) + @tailrec + def forallParamsNoTypeContains(params: List[Symbol]): Boolean = + params match { + case Nil => true + case pp :: pps => + val pcc = new ContainsCollector(pp) + !typeContains(pcc, resultType) && ! anyTypeContains(pcc) && + forallParamsNoTypeContains(pps) } - buildParamsContainsCollectors - // Imperative rewrite of `params.forall( isTrivialParam )` - var paramIdx = 0 - var allIsTrivial = true - while(paramIdx < paramsTpes.length && allIsTrivial){ - allIsTrivial = isTrivialParam(paramIdx) - paramIdx = paramIdx + 1 - } - allIsTrivial + forallIsTrivial && forallParamsNoTypeContains(params) } def isImplicit = (params ne Nil) && params.head.isImplicit From 93deeccb93c1d703c109977b18045bf299d9cf45 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 21 May 2018 11:13:04 +1000 Subject: [PATCH 1403/2793] Minor style changes and remove dead code (cherry picked from commit 1581c8405e9a78397b5cf4c78567d85e9da60529) --- src/reflect/scala/reflect/internal/Types.scala | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index c546c9b3df4d..f16ff32db7ad 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2516,7 +2516,7 @@ trait Types paramsTpes(ix) = pps.head.tpe res = paramsTpes(ix).isTrivial pps = pps.tail - ix = ix + 1 + ix += 1 } res } @@ -2532,7 +2532,7 @@ trait Types var tpeIx = 0 while(tpeIx < len && !existsContains){ existsContains = typeContains(pcc, paramsTpes(tpeIx) ) - tpeIx = tpeIx + 1 + tpeIx += 1 } existsContains } @@ -4819,11 +4819,6 @@ trait Types loop(tps, Depth.Zero) } - @tailrec private def typesContain(tps: List[Type], sym: Symbol): Boolean = tps match { - case tp :: rest => (tp contains sym) || typesContain(rest, sym) - case _ => false - } - @tailrec private def areTrivialTypes(tps: List[Type]): Boolean = tps match { case tp :: rest => tp.isTrivial && areTrivialTypes(rest) case _ => true From f070e6ec0d44f958454ab2041723bb0fb9b92e87 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 7 May 2018 19:11:01 +1000 Subject: [PATCH 1404/2793] Avoid needless LUB of the cases after patmat translation I've restricted the change to the non-CPS world, where we can't assume that Any is a top type. (cherry picked from commit c1e81721e3c7038c17207499b79c86a63d1920ba) --- .../scala/tools/nsc/transform/patmat/PatternMatching.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 3e4fe35395ee..50003ad94b04 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -61,7 +61,10 @@ trait PatternMatching extends Transform // setType origTp intended for CPS -- TODO: is it necessary? val translated = translator(sel.pos).translateMatch(treeCopy.Match(tree, transform(sel), transformTrees(cases).asInstanceOf[List[CaseDef]])) try { - localTyper.typed(translated) setType origTp + // Keep 2.12 behaviour of using wildcard expected type, recomputing the LUB, then throwing it away for the continuations plugins + // but for the rest of us pass in top as the expected type to avoid waste. + val pt = if (origTp <:< definitions.AnyTpe) definitions.AnyTpe else WildcardType + localTyper.typed(translated, definitions.AnyTpe) setType origTp } catch { case x: (Types#TypeError) => // TODO: this should never happen; error should've been reported during type checking From d0265db8d0ae2c7fa26cfa704570a121201f54cc Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Fri, 4 May 2018 11:01:23 +0100 Subject: [PATCH 1405/2793] Faster implicit search: lazier error messages Don't eagerly compute unseen, yet expensive error messages (because we're in a nested search, or -Xlog-implicits is not enabled). (cherry picked from commit a595114cd8be644cfc5587d7ffa98710befbd8ba) --- .../scala/tools/nsc/typechecker/ContextErrors.scala | 10 ++++++---- .../scala/tools/nsc/typechecker/Implicits.scala | 6 +++--- 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 4b8268102615..7052edf8082a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -105,7 +105,9 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } - def typeErrorMsg(found: Type, req: Type) = "type mismatch" + foundReqMsg(found, req) + def typeErrorMsg(context: Context, found: Type, req: Type) = + if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value) "type mismatch" + else "type mismatch" + foundReqMsg(found, req) } def notAnyRefMessage(found: Type): String = { @@ -216,7 +218,7 @@ trait ContextErrors { assert(!foundType.isErroneous, s"AdaptTypeError - foundType is Erroneous: $foundType") assert(!req.isErroneous, s"AdaptTypeError - req is Erroneous: $req") - issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(foundType, req))) + issueNormalTypeError(callee, withAddendum(callee.pos)(typeErrorMsg(context, foundType, req))) infer.explainTypes(foundType, req) } @@ -1016,7 +1018,7 @@ trait ContextErrors { } def NoBestExprAlternativeError(tree: Tree, pt: Type, lastTry: Boolean) = { - issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt))) + issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(context, tree.symbol.tpe, pt))) setErrorOnLastTry(lastTry, tree) } @@ -1284,7 +1286,7 @@ trait ContextErrors { sm"""|Note that implicit conversions are not applicable because they are ambiguous: |${coreMsg}are possible conversion functions from $found to $req""" } - typeErrorMsg(found, req) + ( + typeErrorMsg(context, found, req) + ( if (explanation == "") "" else "\n" + explanation ) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index f167a653ffb9..34b9d467d3d8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -401,7 +401,7 @@ trait Implicits { def pos = if (pos0 != NoPosition) pos0 else tree.pos - def failure(what: Any, reason: String, pos: Position = this.pos): SearchResult = { + @inline final def failure(what: Any, reason: => String, pos: Position = this.pos): SearchResult = { if (settings.XlogImplicits) reporter.echo(pos, what+" is not a valid implicit value for "+pt+" because:\n"+reason) SearchFailure @@ -664,7 +664,7 @@ trait Implicits { val itree1 = if (isBlackbox(info.sym)) suppressMacroExpansion(itree0) else itree0 typingLog("considering", typeDebug.ptTree(itree1)) - def fail(reason: String): SearchResult = failure(itree0, reason) + @inline def fail(reason: => String): SearchResult = failure(itree0, reason) def fallback = typed1(itree1, EXPRmode, wildPt) try { val itree2 = if (!isView) fallback else pt match { @@ -725,7 +725,7 @@ trait Implicits { info.sym.fullLocationString, itree2.symbol.fullLocationString)) else { val tvars = undetParams map freshVar - def ptInstantiated = pt.instantiateTypeParams(undetParams, tvars) + val ptInstantiated = pt.instantiateTypeParams(undetParams, tvars) if (matchesPt(itree3.tpe, ptInstantiated, undetParams)) { if (tvars.nonEmpty) From 28686fbde476b2321a3a408e81c08fee5376644f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 18:27:32 +1000 Subject: [PATCH 1406/2793] Reduce allocations in Context.lookup This avoids allocations in the case where only a single result is found. When more than once is found, we do still allocate a ListBuffer as we build up the list of overloaded alternatives. (cherry picked from commit 4eb551a1c0ed1271d90fe0c384af3b6434b6d32c) --- .../tools/nsc/typechecker/Contexts.scala | 52 ++++++++++++------- .../pos/constructor-pattern-name-class.scala | 10 ++++ 2 files changed, 44 insertions(+), 18 deletions(-) create mode 100644 test/files/pos/constructor-pattern-name-class.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a4f191720ad5..ef42afced151 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1065,35 +1065,51 @@ trait Contexts { self: Analyzer => found1 } - def lookupInScope(scope: Scope) = - (scope lookupUnshadowedEntries name filter (e => qualifies(e.sym))).toList + def lookupInScope(owner: Symbol, pre: Type, scope: Scope): Symbol = { + var e = scope.lookupEntry(name) + while (e != null && !qualifies(e.sym)) { + e = scope.lookupNextEntry(e) + } + if (e == null) { + NoSymbol + } else { + val e1 = e + val e1Sym = e.sym + var syms: mutable.ListBuffer[Symbol] = null + e = scope.lookupNextEntry(e) + while (e ne null) { + if (e.depth == e1.depth && e.sym != e1Sym && qualifies(e.sym)) { + if (syms eq null) { + syms = new mutable.ListBuffer[Symbol] + syms += e1Sym + } + syms += e.sym + } + e = scope.lookupNextEntry(e) + } + // we have a winner: record the symbol depth + symbolDepth = (cx.depth - cx.scope.nestingLevel) + e1.depth - def newOverloaded(owner: Symbol, pre: Type, entries: List[ScopeEntry]) = - logResult(s"overloaded symbol in $pre")(owner.newOverloaded(pre, entries map (_.sym))) + if (syms eq null) e1Sym + else owner.newOverloaded(pre, syms.toList) + } + } // Constructor lookup should only look in the decls of the enclosing class // not in the self-type, nor in the enclosing context, nor in imports (scala/bug#4460, scala/bug#6745) - if (name == nme.CONSTRUCTOR) return { + if (name == nme.CONSTRUCTOR) { val enclClassSym = cx.enclClass.owner val scope = cx.enclClass.prefix.baseType(enclClassSym).decls - val constructorSym = lookupInScope(scope) match { - case Nil => NoSymbol - case hd :: Nil => hd.sym - case entries => newOverloaded(enclClassSym, cx.enclClass.prefix, entries) - } - finishDefSym(constructorSym, cx.enclClass.prefix) + val constructorSym = lookupInScope(enclClassSym, cx.enclClass.prefix, scope) + return finishDefSym(constructorSym, cx.enclClass.prefix) } // cx.scope eq null arises during FixInvalidSyms in Duplicators while (defSym == NoSymbol && (cx ne NoContext) && (cx.scope ne null)) { pre = cx.enclClass.prefix - defSym = lookupInScope(cx.scope) match { - case Nil => searchPrefix - case entries @ (hd :: tl) => - // we have a winner: record the symbol depth - symbolDepth = (cx.depth - cx.scope.nestingLevel) + hd.depth - if (tl.isEmpty) hd.sym - else newOverloaded(cx.owner, pre, entries) + defSym = lookupInScope(cx.owner, cx.enclClass.prefix, cx.scope) match { + case NoSymbol => searchPrefix + case found => found } if (!defSym.exists) cx = cx.outer // push further outward diff --git a/test/files/pos/constructor-pattern-name-class.scala b/test/files/pos/constructor-pattern-name-class.scala new file mode 100644 index 000000000000..8cc0afe642ce --- /dev/null +++ b/test/files/pos/constructor-pattern-name-class.scala @@ -0,0 +1,10 @@ +case class ClassDef(a: Any) + +trait T { + def ClassDef(a: Any): Any +} +class C extends T { + def ClassDef(a: Any) = a match { + case t @ ClassDef(_) => t // when typing constructor pattern, we skip method symbols + } +} From 293529f9c5d1418627bd440e83f5f6ba1a9e0965 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 17:38:09 +1000 Subject: [PATCH 1407/2793] Avoid allocation of Typer within typing transformers Rather than allocating a new Typer each time, we can just mutate/restore `Typer.context`. (cherry picked from commit f7f85f2d71928c0fc045d422c8c20e49eb7b8c58) --- .../scala/tools/nsc/transform/TypingTransformers.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index d5adfe12e983..97e46d5fd8f7 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -25,10 +25,10 @@ trait TypingTransformers { override final def atOwner[A](owner: Symbol)(trans: => A): A = atOwner(curTree, owner)(trans) def atOwner[A](tree: Tree, owner: Symbol)(trans: => A): A = { - val savedLocalTyper = localTyper - localTyper = localTyper.atOwner(tree, if (owner.isModuleNotMethod) owner.moduleClass else owner) + val savedContext = localTyper.context + localTyper.context = localTyper.context.make(tree, if (owner.isModuleNotMethod) owner.moduleClass else owner) val result = super.atOwner(owner)(trans) - localTyper = savedLocalTyper + localTyper.context = savedContext result } From 437a1e9d0afa642db402fabba41933183a37ae1f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 21:12:08 +0800 Subject: [PATCH 1408/2793] Improve efficiency of dead code checks - Move the checkDead module out of Typer - Use a new bit in ContextMode to track when to suppress the warning, rather than maintaining the stack of the symbols of enclosing applications - Only do any of this when under -Ywarn-dead code References scala/scala-dev#501 (cherry picked from commit fc72bbbc7943099ab76a7e59f14ddb55f41abca3) --- .../tools/nsc/typechecker/Contexts.scala | 9 +++- .../nsc/typechecker/TypeDiagnostics.scala | 46 +++++++------------ .../scala/tools/nsc/typechecker/Typers.scala | 32 +++++++------ 3 files changed, 43 insertions(+), 44 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a4f191720ad5..5b50505e3ba2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -405,6 +405,9 @@ trait Contexts { self: Analyzer => @inline final def withinSecondTry[T](op: => T): T = withMode(enabled = SecondTry)(op) @inline final def withinPatAlternative[T](op: => T): T = withMode(enabled = PatternAlternative)(op) + @inline final def withSuppressDeadArgWarning[T](suppress: Boolean)(op: => T): T = + if (suppress) withMode(enabled = SuppressDeadArgWarning)(op) else withMode(disabled = SuppressDeadArgWarning)(op) + /** TypeConstructorAllowed is enabled when we are typing a higher-kinded type. * adapt should then check kind-arity based on the prototypical type's kind * arity. Type arguments should not be inferred. @@ -1581,6 +1584,9 @@ object ContextMode { /** Are unapplied type constructors allowed here? Formerly HKmode. */ final val TypeConstructorAllowed: ContextMode = 1 << 16 + /** Should a dead code warning be issued for a Nothing-typed argument to the current application. */ + final val SuppressDeadArgWarning: ContextMode = 1 << 17 + /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode. * To mimic the sticky mode behavior, when captain stickyfingers * comes around we need to propagate those modes but forget the other @@ -1604,7 +1610,8 @@ object ContextMode { StarPatterns -> "StarPatterns", SuperInit -> "SuperInit", SecondTry -> "SecondTry", - TypeConstructorAllowed -> "TypeConstructorAllowed" + TypeConstructorAllowed -> "TypeConstructorAllowed", + SuppressDeadArgWarning -> "SuppressDeadArgWarning" ) } diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index fc1cf9acc471..f0e49c23ff5e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -466,6 +466,22 @@ trait TypeDiagnostics { } } + object checkDead { + private def treeOK(tree: Tree) = { + val isLabelDef = tree match { case _: LabelDef => true; case _ => false} + tree.tpe != null && tree.tpe.typeSymbol == NothingClass && !isLabelDef + } + + def apply(context: Context, tree: Tree): Tree = { + if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && !context.contextMode.inAny(ContextMode.SuppressDeadArgWarning)) + context.warning(tree.pos, "dead code following this construct") + tree + } + + // The checkDead call from typedArg is more selective. + def inMode(context: Context, mode: Mode, tree: Tree): Tree = if (mode.typingMonoExprByValue) apply(context, tree) else tree + } + trait TyperDiagnostics { self: Typer => @@ -714,36 +730,6 @@ trait TypeDiagnostics { } } - object checkDead { - private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol) - // The method being applied to `tree` when `apply` is called. - private def expr = exprStack.top - - private def exprOK = - (expr != Object_synchronized) && - !(expr.isLabel && treeInfo.isSynthCaseSymbol(expr)) // it's okay to jump to matchEnd (or another case) with an argument of type nothing - - private def treeOK(tree: Tree) = { - val isLabelDef = tree match { case _: LabelDef => true; case _ => false} - tree.tpe != null && tree.tpe.typeSymbol == NothingClass && !isLabelDef - } - - @inline def updateExpr[A](fn: Tree)(f: => A) = { - if (fn.symbol != null && fn.symbol.isMethod && !fn.symbol.isConstructor) { - exprStack push fn.symbol - try f finally exprStack.pop() - } else f - } - def apply(tree: Tree): Tree = { - if (settings.warnDeadCode && context.unit.exists && treeOK(tree) && exprOK) - context.warning(tree.pos, "dead code following this construct") - tree - } - - // The checkDead call from typedArg is more selective. - def inMode(mode: Mode, tree: Tree): Tree = if (mode.typingMonoExprByValue) apply(tree) else tree - } - private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded private def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive" diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3f90ee9afcfd..3d4cd6c1ac96 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2086,7 +2086,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else tpt1.tpe transformedOrTyped(vdef.rhs, EXPRmode | BYVALmode, tpt2) } - treeCopy.ValDef(vdef, typedMods, sym.name, tpt1, checkDead(rhs1)) setType NoType + treeCopy.ValDef(vdef, typedMods, sym.name, tpt1, checkDead(context, rhs1)) setType NoType } /** Enter all aliases of local parameter accessors. @@ -2317,7 +2317,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) - rhs1 = checkDead(rhs1) + rhs1 = checkDead(context, rhs1) if (!isPastTyper && meth.owner.isClass && meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe))) @@ -2557,7 +2557,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // takes untyped sub-trees of a match and type checks them def typedMatch(selector: Tree, cases: List[CaseDef], mode: Mode, pt: Type, tree: Tree = EmptyTree): Match = { - val selector1 = checkDead(typedByValueExpr(selector)) + val selector1 = checkDead(context, typedByValueExpr(selector)) val selectorTp = packCaptured(selector1.tpe.widen).skolemizeExistential(context.owner, selector) val casesTyped = typedCases(cases, selectorTp, pt) @@ -3126,7 +3126,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else newTyper(context.make(stat, exprOwner)) // XXX this creates a spurious dead code warning if an exception is thrown // in a constructor, even if it is the only thing in the constructor. - val result = checkDead(localTyper.typedByValueExpr(stat)) + val result = checkDead(context, localTyper.typedByValueExpr(stat)) if (treeInfo.isSelfOrSuperConstrCall(result)) { context.inConstructorSuffix = true @@ -3288,7 +3288,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedArg(arg: Tree, mode: Mode, newmode: Mode, pt: Type): Tree = { val typedMode = mode.onlySticky | newmode val t = withCondConstrTyper(mode.inSccMode)(_.typed(arg, typedMode, pt)) - checkDead.inMode(typedMode, t) + checkDead.inMode(context, typedMode, t) } def typedArgs(args: List[Tree], mode: Mode) = @@ -3657,9 +3657,15 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe)) } - checkDead.updateExpr(fun) { - handleMonomorphicCall - } + if (settings.warnDeadCode) { + val sym = fun.symbol + if (sym != null && sym.isMethod && !sym.isConstructor) { + val suppress = sym == Object_synchronized || (sym.isLabel && treeInfo.isSynthCaseSymbol(sym)) + context.withSuppressDeadArgWarning(suppress) { + handleMonomorphicCall + } + } else handleMonomorphicCall + } else handleMonomorphicCall } else if (needsInstantiation(tparams, formals, args)) { //println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info))) inferExprInstance(fun, tparams) @@ -4406,7 +4412,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // (phase.erasedTypes && varsym.isValue && !varsym.isMethod)) { if (varsym.isVariable || varsym.isValue && phase.assignsFields) { val rhs1 = typedByValueExpr(rhs, lhs1.tpe) - treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe + treeCopy.Assign(tree, lhs1, checkDead(context, rhs1)) setType UnitTpe } else if(dyna.isDynamicallyUpdatable(lhs1)) { val t = atPos(lhs1.pos.withEnd(rhs.pos.end)) { @@ -4418,7 +4424,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedIf(tree: If): If = { - val cond1 = checkDead(typedByValueExpr(tree.cond, BooleanTpe)) + val cond1 = checkDead(context, typedByValueExpr(tree.cond, BooleanTpe)) // One-legged ifs don't need a lot of analysis if (tree.elsep.isEmpty) return treeCopy.If(tree, cond1, typed(tree.thenp, UnitTpe), tree.elsep) setType UnitTpe @@ -4506,7 +4512,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (typed(expr).tpe.typeSymbol != UnitClass) context.warning(tree.pos, "enclosing method " + name + " has result type Unit: return value discarded") } - val res = treeCopy.Return(tree, checkDead(expr1)).setSymbol(enclMethod.owner) + val res = treeCopy.Return(tree, checkDead(context, expr1)).setSymbol(enclMethod.owner) val tp = pluginsTypedReturn(NothingTpe, this, res, restpt.tpe) res.setType(tp) } @@ -5060,7 +5066,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper typedSelect(tree, qualStableOrError, name) } else { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(typedSelectCount) - val qualTyped = checkDead(typedQualifier(qual, mode)) + val qualTyped = checkDead(context, typedQualifier(qual, mode)) val tree1 = typedSelect(tree, qualTyped, name) if (tree.isInstanceOf[PostfixSelect]) @@ -5352,7 +5358,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case MethodValue(expr) => typed1(suppressMacroExpansion(expr), mode, pt) match { case macroDef if treeInfo.isMacroApplication(macroDef) => MacroEtaError(macroDef) - case methodValue => typedEta(checkDead(methodValue).updateAttachment(MethodValueAttachment)) + case methodValue => typedEta(checkDead(context, methodValue).updateAttachment(MethodValueAttachment)) } case Typed(expr, tpt) => val tpt1 = typedType(tpt, mode) // type the ascribed type first From 8cdcc2ed775afc1019845eb11259f45c12a1f138 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 May 2018 07:02:34 +0200 Subject: [PATCH 1409/2793] Remove the checkNoEscaping field from Typer (cherry picked from commit 4d28256098169a36765162fbdb06f3b8ab14fd14) --- .../scala/tools/nsc/typechecker/Typers.scala | 164 +++++++++--------- 1 file changed, 83 insertions(+), 81 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3f90ee9afcfd..ad2b09536afa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -108,6 +108,84 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private final val InterpolatorCodeRegex = """\$\{\s*(.*?)\s*\}""".r private final val InterpolatorIdentRegex = """\$[$\w]+""".r // note that \w doesn't include $ + /** Check that type of given tree does not contain local or private + * components. + */ + object checkNoEscaping extends TypeMap { + private var owner: Symbol = _ + private var scope: Scope = _ + private var hiddenSymbols: List[Symbol] = _ + + /** Check that type `tree` does not refer to private + * components unless itself is wrapped in something private + * (`owner` tells where the type occurs). + */ + def privates[T <: Tree](typer: Typer, owner: Symbol, tree: T): T = + check(typer, owner, EmptyScope, WildcardType, tree) + + private def check[T <: Tree](typer: Typer, owner: Symbol, scope: Scope, pt: Type, tree: T): T = { + this.owner = owner + this.scope = scope + hiddenSymbols = List() + import typer.TyperErrorGen._ + val tp1 = apply(tree.tpe) + if (hiddenSymbols.isEmpty) tree setType tp1 + else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree) + else if (isFullyDefined(pt)) tree setType pt + else if (tp1.typeSymbol.isAnonymousClass) + check(typer, owner, scope, pt, tree setType tp1.typeSymbol.classBound) + else if (owner == NoSymbol) + tree setType packSymbols(hiddenSymbols.reverse, tp1) + else if (!isPastTyper) { // privates + val badSymbol = hiddenSymbols.head + SymbolEscapesScopeError(tree, badSymbol) + } else tree + } + + def addHidden(sym: Symbol) = + if (!(hiddenSymbols contains sym)) hiddenSymbols = sym :: hiddenSymbols + + override def apply(t: Type): Type = { + def checkNoEscape(sym: Symbol): Unit = { + if (sym.isPrivate && !sym.hasFlag(SYNTHETIC_PRIVATE)) { + var o = owner + while (o != NoSymbol && o != sym.owner && o != sym.owner.linkedClassOfClass && + !o.isLocalToBlock && !o.isPrivate && + !o.privateWithin.hasTransOwner(sym.owner)) + o = o.owner + if (o == sym.owner || o == sym.owner.linkedClassOfClass) + addHidden(sym) + } else if (sym.owner.isTerm && !sym.isTypeParameterOrSkolem) { + var e = scope.lookupEntry(sym.name) + var found = false + while (!found && (e ne null) && e.owner == scope) { + if (e.sym == sym) { + found = true + addHidden(sym) + } else { + e = scope.lookupNextEntry(e) + } + } + } + } + mapOver( + t match { + case TypeRef(_, sym, args) => + checkNoEscape(sym) + if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym && + sym.isAliasType && sameLength(sym.typeParams, args)) { + hiddenSymbols = hiddenSymbols.tail + t.dealias + } else t + case SingleType(_, sym) => + checkNoEscape(sym) + t + case _ => + t + }) + } + } + abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors { import context0.unit import typeDebug.ptTree @@ -359,83 +437,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper checkParamsConvertible0(tpe0) } - /** Check that type of given tree does not contain local or private - * components. - */ - object checkNoEscaping extends TypeMap { - private var owner: Symbol = _ - private var scope: Scope = _ - private var hiddenSymbols: List[Symbol] = _ - - /** Check that type `tree` does not refer to private - * components unless itself is wrapped in something private - * (`owner` tells where the type occurs). - */ - def privates[T <: Tree](owner: Symbol, tree: T): T = - check(owner, EmptyScope, WildcardType, tree) - - private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = { - this.owner = owner - this.scope = scope - hiddenSymbols = List() - val tp1 = apply(tree.tpe) - if (hiddenSymbols.isEmpty) tree setType tp1 - else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree) - else if (isFullyDefined(pt)) tree setType pt - else if (tp1.typeSymbol.isAnonymousClass) - check(owner, scope, pt, tree setType tp1.typeSymbol.classBound) - else if (owner == NoSymbol) - tree setType packSymbols(hiddenSymbols.reverse, tp1) - else if (!isPastTyper) { // privates - val badSymbol = hiddenSymbols.head - SymbolEscapesScopeError(tree, badSymbol) - } else tree - } - - def addHidden(sym: Symbol) = - if (!(hiddenSymbols contains sym)) hiddenSymbols = sym :: hiddenSymbols - - override def apply(t: Type): Type = { - def checkNoEscape(sym: Symbol) { - if (sym.isPrivate && !sym.hasFlag(SYNTHETIC_PRIVATE)) { - var o = owner - while (o != NoSymbol && o != sym.owner && o != sym.owner.linkedClassOfClass && - !o.isLocalToBlock && !o.isPrivate && - !o.privateWithin.hasTransOwner(sym.owner)) - o = o.owner - if (o == sym.owner || o == sym.owner.linkedClassOfClass) - addHidden(sym) - } else if (sym.owner.isTerm && !sym.isTypeParameterOrSkolem) { - var e = scope.lookupEntry(sym.name) - var found = false - while (!found && (e ne null) && e.owner == scope) { - if (e.sym == sym) { - found = true - addHidden(sym) - } else { - e = scope.lookupNextEntry(e) - } - } - } - } - mapOver( - t match { - case TypeRef(_, sym, args) => - checkNoEscape(sym) - if (!hiddenSymbols.isEmpty && hiddenSymbols.head == sym && - sym.isAliasType && sameLength(sym.typeParams, args)) { - hiddenSymbols = hiddenSymbols.tail - t.dealias - } else t - case SingleType(_, sym) => - checkNoEscape(sym) - t - case _ => - t - }) - } - } - def reenterValueParams(vparamss: List[List[ValDef]]) { for (vparams <- vparamss) for (vparam <- vparams) @@ -1706,7 +1707,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (treeInfo.hasUntypedPreSuperFields(templ.body)) typedPrimaryConstrBody(templ)(EmptyTree) - supertpts mapConserve (tpt => checkNoEscaping.privates(context.owner, tpt)) + supertpts mapConserve (tpt => checkNoEscaping.privates(this, context.owner, tpt)) } catch { case ex: TypeError if !global.propagateCyclicReferences => @@ -1955,6 +1956,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val self1 = (templ.self: @unchecked) match { case vd @ ValDef(_, _, tpt, EmptyTree) => val tpt1 = checkNoEscaping.privates( + this, clazz.thisSym, treeCopy.TypeTree(tpt).setOriginal(tpt) setType vd.symbol.tpe ) @@ -2054,7 +2056,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else typedModifiers(vdef.mods) sym.annotations.map(_.completeInfo()) - val tpt1 = checkNoEscaping.privates(sym, typedType(vdef.tpt)) + val tpt1 = checkNoEscaping.privates(this, sym, typedType(vdef.tpt)) checkNonCyclic(vdef, tpt1) // allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to @@ -2286,7 +2288,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (isRepeatedParamType(vparam1.symbol.tpe)) StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt)) + val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) checkNonCyclic(ddef, tpt1) ddef.tpt.setType(tpt1.tpe) val typedMods = typedModifiers(ddef.mods) @@ -2365,7 +2367,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tdef.symbol.deSkolemize.removeAnnotation(definitions.SpecializedClass) } - val rhs1 = checkNoEscaping.privates(tdef.symbol, typedType(tdef.rhs)) + val rhs1 = checkNoEscaping.privates(this, tdef.symbol, typedType(tdef.rhs)) checkNonCyclic(tdef.symbol) if (tdef.symbol.owner.isType) rhs1.tpe match { From fa39e5a4338aa8b7c3cab2e7cacc5d8649e124d6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 19:44:40 +1000 Subject: [PATCH 1410/2793] Expunge the transformed field from Typer (cherry picked from commit 708f5990381f149564d96b47abe3a432345f08f2) --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ad2b09536afa..297b6abc82e1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -38,11 +38,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def forArgMode(fun: Tree, mode: Mode) = if (treeInfo.isSelfOrSuperConstrCall(fun)) mode | SCCmode else mode - // namer calls typer.computeType(rhs) on DefDef / ValDef when tpt is empty. the result - // is cached here and re-used in typedDefDef / typedValDef - // Also used to cache imports type-checked by namer. - val transformed = new mutable.AnyRefMap[Tree, Tree] - final val shortenImports = false // allows override of the behavior of the resetTyper method w.r.t comments @@ -193,7 +188,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val runDefinitions = currentRun.runDefinitions import runDefinitions._ - private val transformed: mutable.Map[Tree, Tree] = unit.transformed + private def transformed: mutable.Map[Tree, Tree] = unit.transformed val infer = new Inferencer { def context = Typer.this.context From 00d72cca978032b4c2df68a8709e34b6202d75eb Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 11 May 2018 07:18:08 +0200 Subject: [PATCH 1411/2793] Remove the dyna field from Typer (cherry picked from commit 1cacc13d0a7d5a65d311a967277a06f886297030) --- .../scala/tools/nsc/typechecker/Typers.scala | 228 +++++++++--------- 1 file changed, 116 insertions(+), 112 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 297b6abc82e1..24a3f06206be 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1148,7 +1148,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty) def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0) - dyna.acceptsApplyDynamic(tree.tpe) || ( + acceptsApplyDynamic(tree.tpe) || ( if (mode.inTappMode) tree.tpe.typeParams.isEmpty && hasPolymorphicApply else @@ -3611,7 +3611,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper (args exists isNamedArg) || // uses a named argument isNamedApplyBlock(fun)) { // fun was transformed to a named apply block => // integrate this application into the block - if (dyna.isApplyDynamicNamed(fun) && isDynamicRewrite(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt) + if (isApplyDynamicNamed(fun) && isDynamicRewrite(fun)) typedNamedApply(tree, fun, args, mode, pt) else tryNamesDefaults } else { val tparams = context.extractUndetparams() @@ -4128,121 +4128,125 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - object dyna { - import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed} + // + // START: applyDynamic suport + // + import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed} - def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass + private def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass - /** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not. - * `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType) - * NOTE: currently either returns None or Some(NoType) (scala-virtualized extends this to Some(t) for selections on staged Structs) - */ - def acceptsApplyDynamicWithType(qual: Tree, name: Name): Option[Type] = - // don't selectDynamic selectDynamic, do select dynamic at unknown type, - // in scala-virtualized, we may return a Some(tp) where tp ne NoType - if (!isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen)) Some(NoType) - else None - - def isDynamicallyUpdatable(tree: Tree) = tree match { - // if the qualifier is a Dynamic, that's all we need to know - case DynamicUpdate(qual, name) => acceptsApplyDynamic(qual.tpe) - case _ => false - } - - def isApplyDynamicNamed(fun: Tree): Boolean = fun match { - case DynamicApplicationNamed(qual, _) => acceptsApplyDynamic(qual.tpe.widen) - case _ => false - // look deeper? - // val treeInfo.Applied(methPart, _, _) = fun - // println("methPart of "+ fun +" is "+ methPart) - // if (methPart ne fun) isApplyDynamicNamed(methPart) - // else false - } - - def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { - def argToBinding(arg: Tree): Tree = arg match { - case AssignOrNamedArg(i @ Ident(name), rhs) => - atPos(i.pos.withEnd(rhs.pos.end)) { - gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs)) - } - case _ => - gen.mkTuple(List(CODE.LIT(""), arg)) - } + /** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not. + * `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType) + * NOTE: currently either returns None or Some(NoType) (scala-virtualized extends this to Some(t) for selections on staged Structs) + */ + private def acceptsApplyDynamicWithType(qual: Tree, name: Name): Option[Type] = + // don't selectDynamic selectDynamic, do select dynamic at unknown type, + // in scala-virtualized, we may return a Some(tp) where tp ne NoType + if (!isApplyDynamicName(name) && acceptsApplyDynamic(qual.tpe.widen)) Some(NoType) + else None + + private def isDynamicallyUpdatable(tree: Tree) = tree match { + // if the qualifier is a Dynamic, that's all we need to know + case DynamicUpdate(qual, name) => acceptsApplyDynamic(qual.tpe) + case _ => false + } - val t = treeCopy.Apply(orig, unmarkDynamicRewrite(fun), args map argToBinding) - wrapErrors(t, _.typed(t, mode, pt)) - } + private def isApplyDynamicNamed(fun: Tree): Boolean = fun match { + case DynamicApplicationNamed(qual, _) => acceptsApplyDynamic(qual.tpe.widen) + case _ => false + // look deeper? + // val treeInfo.Applied(methPart, _, _) = fun + // println("methPart of "+ fun +" is "+ methPart) + // if (methPart ne fun) isApplyDynamicNamed(methPart) + // else false + } - /** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic. - * - * foo.method("blah") ~~> foo.applyDynamic("method")("blah") - * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah")) - * foo.varia = 10 ~~> foo.updateDynamic("varia")(10) - * foo.field ~~> foo.selectDynamic("field") - * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13) - * - * what if we want foo.field == foo.selectDynamic("field") == 1, but `foo.field = 10` == `foo.selectDynamic("field").update(10)` == () - * what would the signature for selectDynamic be? (hint: it needs to depend on whether an update call is coming or not) - * - * need to distinguish selectDynamic and applyDynamic somehow: the former must return the selected value, the latter must accept an apply or an update - * - could have only selectDynamic and pass it a boolean whether more is to come, - * so that it can either return the bare value or something that can handle the apply/update - * HOWEVER that makes it hard to return unrelated values for the two cases - * --> selectDynamic's return type is now dependent on the boolean flag whether more is to come - * - simplest solution: have two method calls - * - */ - def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = { - val cxTree = context.enclosingNonImportContext.tree // scala/bug#8364 - debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)") - val treeInfo.Applied(treeSelection, _, _) = tree - def isDesugaredApply = { - val protoQual = macroExpandee(qual) orElse qual - treeSelection match { - case Select(`protoQual`, nme.apply) => true - case _ => false + private def typedNamedApply(orig: Tree, fun: Tree, args: List[Tree], mode: Mode, pt: Type): Tree = { + def argToBinding(arg: Tree): Tree = arg match { + case AssignOrNamedArg(i @ Ident(name), rhs) => + atPos(i.pos.withEnd(rhs.pos.end)) { + gen.mkTuple(List(atPos(i.pos)(CODE.LIT(name.toString)), rhs)) } - } - acceptsApplyDynamicWithType(qual, name) map { tp => - // If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all - // here - it is for scala-virtualized, where tp will be passed as an argument (for - // selection on a staged Struct) - def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection - - /* Note that the trees which arrive here are potentially some distance from - * the trees of direct interest. `cxTree` is some enclosing expression which - * may apparently be arbitrarily larger than `tree`; and `tree` itself is - * too small, having at least in some cases lost its explicit type parameters. - * This logic is designed to use `tree` to pinpoint the immediately surrounding - * Apply/TypeApply/Select node, and only then creates the dynamic call. - * See scala/bug#6731 among others. - */ - def findSelection(t: Tree): Option[(TermName, Tree)] = t match { - case Apply(fn, args) if matches(fn) => - val op = if(args.exists(_.isInstanceOf[AssignOrNamedArg])) nme.applyDynamicNamed else nme.applyDynamic - // not supported: foo.bar(a1,..., an: _*) - val fn1 = if(treeInfo.isWildcardStarArgList(args)) DynamicVarArgUnsupported(fn, op) else fn - Some((op, fn1)) - case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs)) - case _ if matches(t) => Some((nme.selectDynamic, t)) - case _ => t.children.flatMap(findSelection).headOption - } - findSelection(cxTree) map { case (opName, treeInfo.Applied(_, targs, _)) => - val fun = gen.mkTypeApply(Select(qual, opName), targs) - if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // scala/bug#7617 - val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) { - Literal(Constant(name.decode)) - } - markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit)))) - } getOrElse { - // While there may be an error in the found tree itself, it should not be possible to *not find* it at all. - devWarning(s"Tree $tree not found in the context $cxTree while trying to do a dynamic application") - setError(tree) + case _ => + gen.mkTuple(List(CODE.LIT(""), arg)) + } + + val t = treeCopy.Apply(orig, unmarkDynamicRewrite(fun), args map argToBinding) + wrapErrors(t, _.typed(t, mode, pt)) + } + + /** Translate selection that does not typecheck according to the normal rules into a selectDynamic/applyDynamic. + * + * foo.method("blah") ~~> foo.applyDynamic("method")("blah") + * foo.method(x = "blah") ~~> foo.applyDynamicNamed("method")(("x", "blah")) + * foo.varia = 10 ~~> foo.updateDynamic("varia")(10) + * foo.field ~~> foo.selectDynamic("field") + * foo.arr(10) = 13 ~~> foo.selectDynamic("arr").update(10, 13) + * + * what if we want foo.field == foo.selectDynamic("field") == 1, but `foo.field = 10` == `foo.selectDynamic("field").update(10)` == () + * what would the signature for selectDynamic be? (hint: it needs to depend on whether an update call is coming or not) + * + * need to distinguish selectDynamic and applyDynamic somehow: the former must return the selected value, the latter must accept an apply or an update + * - could have only selectDynamic and pass it a boolean whether more is to come, + * so that it can either return the bare value or something that can handle the apply/update + * HOWEVER that makes it hard to return unrelated values for the two cases + * --> selectDynamic's return type is now dependent on the boolean flag whether more is to come + * - simplest solution: have two method calls + * + */ + private def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = { + val cxTree = context.enclosingNonImportContext.tree // scala/bug#8364 + debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)") + val treeInfo.Applied(treeSelection, _, _) = tree + def isDesugaredApply = { + val protoQual = macroExpandee(qual) orElse qual + treeSelection match { + case Select(`protoQual`, nme.apply) => true + case _ => false + } + } + acceptsApplyDynamicWithType(qual, name) map { tp => + // If tp == NoType, pass only explicit type arguments to applyXXX. Not used at all + // here - it is for scala-virtualized, where tp will be passed as an argument (for + // selection on a staged Struct) + def matches(t: Tree) = isDesugaredApply || treeInfo.dissectApplied(t).core == treeSelection + + /* Note that the trees which arrive here are potentially some distance from + * the trees of direct interest. `cxTree` is some enclosing expression which + * may apparently be arbitrarily larger than `tree`; and `tree` itself is + * too small, having at least in some cases lost its explicit type parameters. + * This logic is designed to use `tree` to pinpoint the immediately surrounding + * Apply/TypeApply/Select node, and only then creates the dynamic call. + * See scala/bug#6731 among others. + */ + def findSelection(t: Tree): Option[(TermName, Tree)] = t match { + case Apply(fn, args) if matches(fn) => + val op = if(args.exists(_.isInstanceOf[AssignOrNamedArg])) nme.applyDynamicNamed else nme.applyDynamic + // not supported: foo.bar(a1,..., an: _*) + val fn1 = if(treeInfo.isWildcardStarArgList(args)) DynamicVarArgUnsupported(fn, op) else fn + Some((op, fn1)) + case Assign(lhs, _) if matches(lhs) => Some((nme.updateDynamic, lhs)) + case _ if matches(t) => Some((nme.selectDynamic, t)) + case _ => t.children.flatMap(findSelection).headOption + } + findSelection(cxTree) map { case (opName, treeInfo.Applied(_, targs, _)) => + val fun = gen.mkTypeApply(Select(qual, opName), targs) + if (opName == nme.updateDynamic) suppressMacroExpansion(fun) // scala/bug#7617 + val nameStringLit = atPos(treeSelection.pos.withStart(treeSelection.pos.point).makeTransparent) { + Literal(Constant(name.decode)) } + markDynamicRewrite(atPos(qual.pos)(Apply(fun, List(nameStringLit)))) + } getOrElse { + // While there may be an error in the found tree itself, it should not be possible to *not find* it at all. + devWarning(s"Tree $tree not found in the context $cxTree while trying to do a dynamic application") + setError(tree) } } - def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head)) } + private def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head)) + // + // END: applyDynamic support + // def typed1(tree: Tree, mode: Mode, pt: Type): Tree = { // Lookup in the given class using the root mirror. @@ -4405,11 +4409,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val rhs1 = typedByValueExpr(rhs, lhs1.tpe) treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitTpe } - else if(dyna.isDynamicallyUpdatable(lhs1)) { + else if(isDynamicallyUpdatable(lhs1)) { val t = atPos(lhs1.pos.withEnd(rhs.pos.end)) { Apply(lhs1, List(rhs)) } - dyna.wrapErrors(t, _.typed1(t, mode, pt)) + wrapErrors(t, _.typed1(t, mode, pt)) } else fail() } @@ -4931,8 +4935,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (tree.isInstanceOf[SelectFromTypeTree]) TypeSelectionFromVolatileTypeError(tree, qual) else UnstableTreeError(qual) else { - def asDynamicCall = dyna.mkInvoke(context, tree, qual, name) map { t => - dyna.wrapErrors(t, (_.typed1(t, mode, pt))) + def asDynamicCall = mkInvoke(context, tree, qual, name) map { t => + wrapErrors(t, (_.typed1(t, mode, pt))) } val sym = tree.symbol orElse member(qual, name) orElse inCompanionForJavaStatic(qual.tpe.prefix, qual.symbol, name) From 762c5acb1738b1f48294b1c34fceaaec76af144c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 20:00:04 +1000 Subject: [PATCH 1412/2793] Jettison runDefinitions field from Typer (cherry picked from commit 4861fd20fe8cfd7a7958b8a551b266fee79e5fb7) --- .../tools/nsc/typechecker/Adaptations.scala | 6 ++---- .../scala/tools/nsc/typechecker/Typers.scala | 20 +++++++++---------- 2 files changed, 11 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index b1901c04bbad..c9e828f47b21 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -24,8 +24,6 @@ trait Adaptations { trait Adaptation { self: Typer => - import runDefinitions._ - def checkValidAdaptation(t: Tree, args: List[Tree]): Boolean = { def applyArg = t match { case Apply(_, arg :: Nil) => arg @@ -60,8 +58,8 @@ trait Adaptations { // they are used limits our ability to enforce anything sensible until // an opt-in compiler option is given. oneArgObject && !( - isStringAddition(t.symbol) - || isArrowAssoc(t.symbol) + currentRun.runDefinitions.isStringAddition(t.symbol) + || currentRun.runDefinitions.isArrowAssoc(t.symbol) || t.symbol.name == nme.equals_ || t.symbol.name == nme.EQ || t.symbol.name == nme.NE diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 24a3f06206be..a30f52440a8b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -185,8 +185,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper import context0.unit import typeDebug.ptTree import TyperErrorGen._ - val runDefinitions = currentRun.runDefinitions - import runDefinitions._ private def transformed: mutable.Map[Tree, Tree] = unit.transformed @@ -757,7 +755,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def checkExistentialsFeature(pos: Position, tpe: Type, prefix: String) = tpe match { case extp: ExistentialType if !extp.isRepresentableWithWildcards => - checkFeature(pos, ExistentialsFeature, prefix+" "+tpe) + checkFeature(pos, currentRun.runDefinitions.ExistentialsFeature, prefix+" "+tpe) case _ => } @@ -1734,7 +1732,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def validateParentClasses(parents: List[Tree], selfType: Type) { val pending = ListBuffer[AbsTypeError]() def validateDynamicParent(parent: Symbol, parentPos: Position) = - if (parent == DynamicClass) checkFeature(parentPos, DynamicsFeature) + if (parent == DynamicClass) checkFeature(parentPos, currentRun.runDefinitions.DynamicsFeature) def validateParentClass(parent: Tree, superclazz: Symbol) = if (!parent.isErrorTyped) { @@ -2333,7 +2331,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { case List(param) :: _ if !param.isImplicit => - checkFeature(ddef.pos, ImplicitConversionsFeature, meth.toString) + checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) case _ => } } @@ -2371,7 +2369,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } if (tdef.symbol.isDeferred && tdef.symbol.info.isHigherKinded) - checkFeature(tdef.pos, HigherKindsFeature) + checkFeature(tdef.pos, currentRun.runDefinitions.HigherKindsFeature) treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType } @@ -3460,7 +3458,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } handleOverloaded - case _ if isPolymorphicSignature(fun.symbol) => + case _ if currentRun.runDefinitions.isPolymorphicSignature(fun.symbol) => // Mimic's Java's treatment of polymorphic signatures as described in // https://docs.oracle.com/javase/specs/jls/se8/html/jls-15.html#jls-15.12.3 // @@ -4095,7 +4093,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (sameLength(tparams, args)) { val targs = mapList(args)(treeTpe) checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "") - if (isPredefClassOf(fun.symbol)) + if (fun.symbol.rawname == nme.classOf && currentRun.runDefinitions.isPredefClassOf(fun.symbol)) typedClassOf(tree, args.head, noGen = true) else { if (!isPastTyper && fun.symbol == Any_isInstanceOf && targs.nonEmpty) { @@ -5065,10 +5063,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val tree1 = typedSelect(tree, qualTyped, name) if (tree.isInstanceOf[PostfixSelect]) - checkFeature(tree.pos, PostfixOpsFeature, name.decode) + checkFeature(tree.pos, currentRun.runDefinitions.PostfixOpsFeature, name.decode) val sym = tree1.symbol if (sym != null && sym.isOnlyRefinementMember && !sym.isMacro) - checkFeature(tree1.pos, ReflectiveCallsFeature, sym.toString) + checkFeature(tree1.pos, currentRun.runDefinitions.ReflectiveCallsFeature, sym.toString) qualTyped.symbol match { case s: Symbol if s.isRootPackage => treeCopy.Ident(tree1, name) @@ -5127,7 +5125,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper (// this -> Foo.this if (sym.isThisSym) typed1(This(sym.owner) setPos tree.pos, mode, pt) - else if (isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) { + else if (sym.rawname == nme.classOf && currentRun.runDefinitions.isPredefClassOf(sym) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) { // Inferring classOf type parameter from expected type. Otherwise an // actual call to the stubbed classOf method is generated, returning null. typedClassOf(tree, TypeTree(pt.typeArgs.head).setPos(tree.pos.focus)) From 55a3654cae99f57f09476495bae35cb113a586f3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 20:31:50 +1000 Subject: [PATCH 1413/2793] Run another runDefinitions field out of town (cherry picked from commit a431c6cd3697977bd99d45bd60eb255380656a8a) --- src/compiler/scala/tools/nsc/typechecker/Tags.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index e29451f37963..31171d91586a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -10,8 +10,6 @@ trait Tags { trait Tag { self: Typer => - private val runDefinitions = currentRun.runDefinitions - private def resolveTag(pos: Position, taggedTp: Type, allowMaterialization: Boolean) = enteringTyper { context.withMacros(enabled = allowMaterialization) { inferImplicitByType(taggedTp, context, pos).tree } } @@ -56,7 +54,7 @@ trait Tags { // if someone requests a type tag, but scala-reflect.jar isn't on the library classpath, then bail if (pre == NoType && ApiUniverseClass == NoSymbol) EmptyTree else { - val tagSym = if (concrete) runDefinitions.TypeTagClass else runDefinitions.WeakTypeTagClass + val tagSym = if (concrete) currentRun.runDefinitions.TypeTagClass else currentRun.runDefinitions.WeakTypeTagClass val tagTp = if (pre == NoType) TypeRef(ApiUniverseClass.toTypeConstructor, tagSym, List(tp)) else singleType(pre, pre member tagSym.name) val taggedTp = appliedType(tagTp, List(tp)) resolveTag(pos, taggedTp, allowMaterialization) From 71fbb6c67eb395c165a78e46d9912853dea2425e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 20:34:12 +1000 Subject: [PATCH 1414/2793] Begone, toOrigin, approximateAbstracts, and AdjustedTypeArgs! (cherry picked from commit 37b3892fa06cf30e0335397a2d020ea757309c37) --- .../nsc/transform/patmat/MatchAnalysis.scala | 4 +- .../scala/tools/nsc/typechecker/Infer.scala | 91 ++++++++++--------- 2 files changed, 48 insertions(+), 47 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index ac3f4ff93c6b..70abdd8b54aa 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -118,7 +118,7 @@ trait TreeAndTypeAnalysis extends Debugging { // make sure it's not a primitive, else (5: Byte) match { case 5 => ... } sees no Byte case sym if sym.isSealed => - val tpApprox = typer.infer.approximateAbstracts(tp) + val tpApprox = analyzer.approximateAbstracts(tp) val pre = tpApprox.prefix def filterChildren(children: List[Symbol]): List[Type] = { @@ -130,7 +130,7 @@ trait TreeAndTypeAnalysis extends Debugging { val memberType = nestedMemberType(sym, pre, tpApprox.typeSymbol.owner) val subTp = appliedType(memberType, sym.typeParams.map(_ => WildcardType)) - val subTpApprox = typer.infer.approximateAbstracts(subTp) // TODO: needed? + val subTpApprox = analyzer.approximateAbstracts(subTp) // TODO: needed? // debug.patmat("subtp"+(subTpApprox <:< tpApprox, subTpApprox, tpApprox)) if (subTpApprox <:< tpApprox) Some(checkableType(subTp)) else None diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index e766b1544223..affc06fafa39 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -425,37 +425,6 @@ trait Infer extends Checkable { tvars map (_ => WildcardType) } - /** [Martin] Can someone comment this please? I have no idea what it's for - * and the code is not exactly readable. - */ - object AdjustedTypeArgs { - val Result = mutable.LinkedHashMap - type Result = mutable.LinkedHashMap[Symbol, Option[Type]] - - def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists( - (m collect {case (p, Some(a)) => (p, a)}).unzip )) - - object Undets { - def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{ - val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null) - val (okArgs, okTparams) = ok.unzip - (okArgs, okTparams, nok.keys) - }) - } - - object AllArgsAndUndets { - def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{ - val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null) - val (okArgs, okTparams) = ok.unzip - (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys) - }) - } - - private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList) - private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList) - private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList) - } - /** Retract arguments that were inferred to Nothing because inference failed. Correct types for repeated params. * * We detect Nothing-due-to-failure by only retracting a parameter if either: @@ -1221,20 +1190,6 @@ trait Infer extends Checkable { PatternTypeIncompatibleWithPtError2(pat, pt1, pt) } - object toOrigin extends TypeMap { - def apply(tp: Type): Type = tp match { - case TypeVar(origin, _) => origin - case _ => mapOver(tp) - } - } - - object approximateAbstracts extends TypeMap { - def apply(tp: Type): Type = tp.dealiasWiden match { - case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType - case _ => mapOver(tp) - } - } - /** Collects type parameters referred to in a type. */ def freeTypeParamsOfTerms(tp: Type): List[Symbol] = { @@ -1450,4 +1405,50 @@ trait Infer extends Checkable { } } } + + object toOrigin extends TypeMap { + def apply(tp: Type): Type = tp match { + case TypeVar(origin, _) => origin + case _ => mapOver(tp) + } + } + + object approximateAbstracts extends TypeMap { + def apply(tp: Type): Type = tp.dealiasWiden match { + case TypeRef(pre, sym, _) if sym.isAbstractType => WildcardType + case _ => mapOver(tp) + } + } + + /** [Martin] Can someone comment this please? I have no idea what it's for + * and the code is not exactly readable. + */ + object AdjustedTypeArgs { + val Result = mutable.LinkedHashMap + type Result = mutable.LinkedHashMap[Symbol, Option[Type]] + + def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists( + (m collect {case (p, Some(a)) => (p, a)}).unzip )) + + object Undets { + def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{ + val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null) + val (okArgs, okTparams) = ok.unzip + (okArgs, okTparams, nok.keys) + }) + } + + object AllArgsAndUndets { + def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{ + val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null) + val (okArgs, okTparams) = ok.unzip + (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys) + }) + } + + private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList) + private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList) + private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList) + } + } From e09adccbc96ca65ab4292f574b8f984a7cb5da4f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 20:45:37 +1000 Subject: [PATCH 1415/2793] Send Typer.checkUnused to the glue factory (cherry picked from commit 597cd9e301193e7bd95970365bd3743cc109ab3c) --- .../tools/nsc/typechecker/Analyzer.scala | 2 +- .../nsc/typechecker/TypeDiagnostics.scala | 394 +++++++++--------- 2 files changed, 198 insertions(+), 198 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index ccd414cc457d..4c399d705298 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -107,7 +107,7 @@ trait Analyzer extends AnyRef if (settings.warnUnusedImport) warnUnusedImports(unit) if (settings.warnUnused.isSetByUser) - typer checkUnused unit + new checkUnused(typer).apply(unit) } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index fc1cf9acc471..a7a1c2c07b20 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -466,108 +466,102 @@ trait TypeDiagnostics { } } - trait TyperDiagnostics { - self: Typer => - - def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) = - context.warning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString)) - - object checkUnused { - val ignoreNames: Set[TermName] = Set( - "readResolve", "readObject", "writeObject", "writeReplace" - ).map(TermName(_)) - - class UnusedPrivates extends Traverser { - val defnTrees = ListBuffer[MemberDef]() - val targets = mutable.Set[Symbol]() - val setVars = mutable.Set[Symbol]() - val treeTypes = mutable.Set[Type]() - val params = mutable.Set[Symbol]() - val patvars = mutable.Set[Symbol]() - - def defnSymbols = defnTrees.toList map (_.symbol) - def localVars = defnSymbols filter (t => t.isLocalToBlock && t.isVar) - - def qualifiesTerm(sym: Symbol) = ( - (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocalToBlock) + class checkUnused(typer: Typer) { + val ignoreNames: Set[TermName] = Set( + "readResolve", "readObject", "writeObject", "writeReplace" + ).map(TermName(_)) + + class UnusedPrivates extends Traverser { + val defnTrees = ListBuffer[MemberDef]() + val targets = mutable.Set[Symbol]() + val setVars = mutable.Set[Symbol]() + val treeTypes = mutable.Set[Type]() + val params = mutable.Set[Symbol]() + val patvars = mutable.Set[Symbol]() + + def defnSymbols = defnTrees.toList map (_.symbol) + def localVars = defnSymbols filter (t => t.isLocalToBlock && t.isVar) + + def qualifiesTerm(sym: Symbol) = ( + (sym.isModule || sym.isMethod || sym.isPrivateLocal || sym.isLocalToBlock) && !nme.isLocalName(sym.name) && !sym.isParameter && !sym.isParamAccessor // could improve this, but it's a pain && !sym.isEarlyInitialized // lots of false positives in the way these are encoded && !(sym.isGetter && sym.accessed.isEarlyInitialized) ) - def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage - def qualifies(sym: Symbol) = ( - (sym ne null) + def qualifiesType(sym: Symbol) = !sym.isDefinedInPackage + def qualifies(sym: Symbol) = ( + (sym ne null) && (sym.isTerm && qualifiesTerm(sym) || sym.isType && qualifiesType(sym)) ) - override def traverse(t: Tree): Unit = if (!t.isErrorTyped) { - val sym = t.symbol - t match { - case m: MemberDef if qualifies(sym) => - t match { - case ValDef(mods@_, name@_, tpt@_, rhs@_) if wasPatVarDef(t) => - if (settings.warnUnusedPatVars && !atBounded(t)) patvars += sym - case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => - if (sym.isPrimaryConstructor) - for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa - else if (sym.isSynthetic && sym.isImplicit) return - else if (!sym.isConstructor && rhs.symbol != Predef_???) - for (vs <- vparamss) params ++= vs.map(_.symbol) - defnTrees += m - case _ => - defnTrees += m - } - case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars => - pat.foreach { - case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol - case _ => - } - case _: RefTree if sym ne null => targets += sym - case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol - case _ => - } + override def traverse(t: Tree): Unit = if (!t.isErrorTyped) { + val sym = t.symbol + t match { + case m: MemberDef if qualifies(sym) => + t match { + case ValDef(mods@_, name@_, tpt@_, rhs@_) if wasPatVarDef(t) => + if (settings.warnUnusedPatVars && !atBounded(t)) patvars += sym + case DefDef(mods@_, name@_, tparams@_, vparamss, tpt@_, rhs@_) if !sym.isAbstract && !sym.isDeprecated && !sym.isMacro => + if (sym.isPrimaryConstructor) + for (cpa <- sym.owner.constrParamAccessors if cpa.isPrivateLocal) params += cpa + else if (sym.isSynthetic && sym.isImplicit) return + else if (!sym.isConstructor && rhs.symbol != Predef_???) + for (vs <- vparamss) params ++= vs.map(_.symbol) + defnTrees += m + case _ => + defnTrees += m + } + case CaseDef(pat, guard@_, rhs@_) if settings.warnUnusedPatVars => + pat.foreach { + case b @ Bind(n, _) if !atBounded(b) && n != nme.DEFAULT_CASE => patvars += b.symbol + case _ => + } + case _: RefTree if sym ne null => targets += sym + case Assign(lhs, _) if lhs.symbol != null => setVars += lhs.symbol + case _ => + } - if (t.tpe ne null) { - for (tp <- t.tpe if !treeTypes(tp)) { - // Include references to private/local aliases (which might otherwise refer to an enclosing class) - val isAlias = { - val td = tp.typeSymbolDirect - td.isAliasType && (td.isLocal || td.isPrivate) - } - // Ignore type references to an enclosing class. A reference to C must be outside C to avoid warning. - if (isAlias || !currentOwner.hasTransOwner(tp.typeSymbol)) tp match { - case NoType | NoPrefix => - case NullaryMethodType(_) => - case MethodType(_, _) => - case SingleType(_, _) => - case ConstantType(Constant(k: Type)) => - log(s"classOf $k referenced from $currentOwner") - treeTypes += k - case _ => - log(s"${if (isAlias) "alias " else ""}$tp referenced from $currentOwner") - treeTypes += tp - } + if (t.tpe ne null) { + for (tp <- t.tpe if !treeTypes(tp)) { + // Include references to private/local aliases (which might otherwise refer to an enclosing class) + val isAlias = { + val td = tp.typeSymbolDirect + td.isAliasType && (td.isLocal || td.isPrivate) } - // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused. - for (p <- t.tpe.prefix) whenever(p) { - case SingleType(_, sym) => targets += sym + // Ignore type references to an enclosing class. A reference to C must be outside C to avoid warning. + if (isAlias || !currentOwner.hasTransOwner(tp.typeSymbol)) tp match { + case NoType | NoPrefix => + case NullaryMethodType(_) => + case MethodType(_, _) => + case SingleType(_, _) => + case ConstantType(Constant(k: Type)) => + log(s"classOf $k referenced from $currentOwner") + treeTypes += k + case _ => + log(s"${if (isAlias) "alias " else ""}$tp referenced from $currentOwner") + treeTypes += tp } } - super.traverse(t) + // e.g. val a = new Foo ; new a.Bar ; don't let a be reported as unused. + for (p <- t.tpe.prefix) whenever(p) { + case SingleType(_, sym) => targets += sym + } } - def isUnusedType(m: Symbol): Boolean = ( - m.isType + super.traverse(t) + } + def isUnusedType(m: Symbol): Boolean = ( + m.isType && !m.isTypeParameterOrSkolem // would be nice to improve this && (m.isPrivate || m.isLocalToBlock) && !(treeTypes.exists(_.exists(_.typeSymbolDirect == m))) ) - def isSyntheticWarnable(sym: Symbol) = ( - sym.isDefaultGetter + def isSyntheticWarnable(sym: Symbol) = ( + sym.isDefaultGetter ) - def isUnusedTerm(m: Symbol): Boolean = ( - m.isTerm + def isUnusedTerm(m: Symbol): Boolean = ( + m.isTerm && (!m.isSynthetic || isSyntheticWarnable(m)) && ((m.isPrivate && !(m.isConstructor && m.owner.isAbstract)) || m.isLocalToBlock) && !targets(m) @@ -576,143 +570,149 @@ trait TypeDiagnostics { && !isConstantType(m.info.resultType) // subject to constant inlining && !treeTypes.exists(_ contains m) // e.g. val a = new Foo ; new a.Bar ) - def isUnusedParam(m: Symbol): Boolean = ( - isUnusedTerm(m) + def isUnusedParam(m: Symbol): Boolean = ( + isUnusedTerm(m) && !m.isDeprecated && !m.owner.isDefaultGetter && !(m.isParamAccessor && ( - m.owner.isImplicit || + m.owner.isImplicit || targets.exists(s => s.isParameter && s.name == m.name && s.owner.isConstructor && s.owner.owner == m.owner) // exclude ctor params )) ) - def sympos(s: Symbol): Int = - if (s.pos.isDefined) s.pos.point else if (s.isTerm) s.asTerm.referenced.pos.point else -1 - def treepos(t: Tree): Int = - if (t.pos.isDefined) t.pos.point else sympos(t.symbol) - - def unusedTypes = defnTrees.toList.filter(t => isUnusedType(t.symbol)).sortBy(treepos) - def unusedTerms = { - val all = defnTrees.toList.filter(v => isUnusedTerm(v.symbol)) - - // is this a getter-setter pair? and why is this a difficult question for traits? - def sameReference(g: Symbol, s: Symbol) = - if (g.accessed.exists && s.accessed.exists) g.accessed == s.accessed - else g.owner == s.owner && g.setterName == s.name //sympos(g) == sympos(s) - - // filter out setters if already warning for getter. - val clean = all.filterNot(v => v.symbol.isSetter && all.exists(g => g.symbol.isGetter && sameReference(g.symbol, v.symbol))) - clean.sortBy(treepos) - } - // local vars which are never set, except those already returned in unused - def unsetVars = localVars.filter(v => !setVars(v) && !isUnusedTerm(v)).sortBy(sympos) - def unusedParams = params.toList.filter(isUnusedParam).sortBy(sympos) - def inDefinedAt(p: Symbol) = p.owner.isMethod && p.owner.name == nme.isDefinedAt && p.owner.owner.isAnonymousFunction - def unusedPatVars = patvars.toList.filter(p => isUnusedTerm(p) && !inDefinedAt(p)).sortBy(sympos) + def sympos(s: Symbol): Int = + if (s.pos.isDefined) s.pos.point else if (s.isTerm) s.asTerm.referenced.pos.point else -1 + def treepos(t: Tree): Int = + if (t.pos.isDefined) t.pos.point else sympos(t.symbol) + + def unusedTypes = defnTrees.toList.filter(t => isUnusedType(t.symbol)).sortBy(treepos) + def unusedTerms = { + val all = defnTrees.toList.filter(v => isUnusedTerm(v.symbol)) + + // is this a getter-setter pair? and why is this a difficult question for traits? + def sameReference(g: Symbol, s: Symbol) = + if (g.accessed.exists && s.accessed.exists) g.accessed == s.accessed + else g.owner == s.owner && g.setterName == s.name //sympos(g) == sympos(s) + + // filter out setters if already warning for getter. + val clean = all.filterNot(v => v.symbol.isSetter && all.exists(g => g.symbol.isGetter && sameReference(g.symbol, v.symbol))) + clean.sortBy(treepos) } + // local vars which are never set, except those already returned in unused + def unsetVars = localVars.filter(v => !setVars(v) && !isUnusedTerm(v)).sortBy(sympos) + def unusedParams = params.toList.filter(isUnusedParam).sortBy(sympos) + def inDefinedAt(p: Symbol) = p.owner.isMethod && p.owner.name == nme.isDefinedAt && p.owner.owner.isAnonymousFunction + def unusedPatVars = patvars.toList.filter(p => isUnusedTerm(p) && !inDefinedAt(p)).sortBy(sympos) + } - object skipMacroCall extends UnusedPrivates { - override def qualifiesTerm(sym: Symbol): Boolean = - super.qualifiesTerm(sym) && !sym.isMacro - } - object skipMacroExpansion extends UnusedPrivates { - override def traverse(t: Tree): Unit = - if (!hasMacroExpansionAttachment(t)) super.traverse(t) - } - object checkMacroExpandee extends UnusedPrivates { - override def traverse(t: Tree): Unit = - super.traverse(if (hasMacroExpansionAttachment(t)) macroExpandee(t) else t) - } + object skipMacroCall extends UnusedPrivates { + override def qualifiesTerm(sym: Symbol): Boolean = + super.qualifiesTerm(sym) && !sym.isMacro + } + object skipMacroExpansion extends UnusedPrivates { + override def traverse(t: Tree): Unit = + if (!hasMacroExpansionAttachment(t)) super.traverse(t) + } + object checkMacroExpandee extends UnusedPrivates { + override def traverse(t: Tree): Unit = + super.traverse(if (hasMacroExpansionAttachment(t)) macroExpandee(t) else t) + } - private def warningsEnabled: Boolean = { - val ss = settings - import ss._ - warnUnusedPatVars || warnUnusedPrivates || warnUnusedLocals || warnUnusedParams - } + private def warningsEnabled: Boolean = { + val ss = settings + import ss._ + warnUnusedPatVars || warnUnusedPrivates || warnUnusedLocals || warnUnusedParams + } - def run(unusedPrivates: UnusedPrivates)(body: Tree): Unit = { - unusedPrivates.traverse(body) - - if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { - val valAdvice = "is never updated: consider using immutable val" - for (defn: DefTree <- unusedPrivates.unusedTerms) { - val sym = defn.symbol - val pos = ( - if (defn.pos.isDefined) defn.pos - else if (sym.pos.isDefined) sym.pos - else sym match { - case sym: TermSymbol => sym.referenced.pos - case _ => NoPosition - } + def run(unusedPrivates: UnusedPrivates)(body: Tree): Unit = { + unusedPrivates.traverse(body) + + if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { + val valAdvice = "is never updated: consider using immutable val" + for (defn: DefTree <- unusedPrivates.unusedTerms) { + val sym = defn.symbol + val pos = ( + if (defn.pos.isDefined) defn.pos + else if (sym.pos.isDefined) sym.pos + else sym match { + case sym: TermSymbol => sym.referenced.pos + case _ => NoPosition + } ) - val why = if (sym.isPrivate) "private" else "local" - var cond = "is never used" - val what = ( - if (sym.isDefaultGetter) "default argument" - else if (sym.isConstructor) "constructor" - else if ( - sym.isVar - || sym.isGetter && (sym.accessed.isVar || (sym.owner.isTrait && !sym.hasFlag(STABLE))) - ) s"var ${sym.name.getterName.decoded}" - else if ( - sym.isVal - || sym.isGetter && (sym.accessed.isVal || (sym.owner.isTrait && sym.hasFlag(STABLE))) - || sym.isLazy - ) s"val ${sym.name.decoded}" - else if (sym.isSetter) { cond = valAdvice ; s"var ${sym.name.getterName.decoded}" } - else if (sym.isMethod) s"method ${sym.name.decoded}" - else if (sym.isModule) s"object ${sym.name.decoded}" - else "term" + val why = if (sym.isPrivate) "private" else "local" + var cond = "is never used" + val what = ( + if (sym.isDefaultGetter) "default argument" + else if (sym.isConstructor) "constructor" + else if ( + sym.isVar + || sym.isGetter && (sym.accessed.isVar || (sym.owner.isTrait && !sym.hasFlag(STABLE))) + ) s"var ${sym.name.getterName.decoded}" + else if ( + sym.isVal + || sym.isGetter && (sym.accessed.isVal || (sym.owner.isTrait && sym.hasFlag(STABLE))) + || sym.isLazy + ) s"val ${sym.name.decoded}" + else if (sym.isSetter) { cond = valAdvice ; s"var ${sym.name.getterName.decoded}" } + else if (sym.isMethod) s"method ${sym.name.decoded}" + else if (sym.isModule) s"object ${sym.name.decoded}" + else "term" ) - context.warning(pos, s"$why $what in ${sym.owner} $cond") - } - for (v <- unusedPrivates.unsetVars) { - context.warning(v.pos, s"local var ${v.name} in ${v.owner} ${valAdvice}") - } - for (t <- unusedPrivates.unusedTypes) { - val sym = t.symbol - val wrn = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals - if (wrn) { - val why = if (sym.isPrivate) "private" else "local" - context.warning(t.pos, s"$why ${sym.fullLocationString} is never used") - } - } + typer.context.warning(pos, s"$why $what in ${sym.owner} $cond") } - if (settings.warnUnusedPatVars) { - for (v <- unusedPrivates.unusedPatVars) - context.warning(v.pos, s"pattern var ${v.name} in ${v.owner} is never used; `${v.name}@_' suppresses this warning") + for (v <- unusedPrivates.unsetVars) { + typer.context.warning(v.pos, s"local var ${v.name} in ${v.owner} ${valAdvice}") } - if (settings.warnUnusedParams) { - def isImplementation(m: Symbol): Boolean = { - def classOf(s: Symbol): Symbol = if (s.isClass || s == NoSymbol) s else classOf(s.owner) - val opc = new overridingPairs.Cursor(classOf(m)) - opc.iterator.exists(pair => pair.low == m) + for (t <- unusedPrivates.unusedTypes) { + val sym = t.symbol + val wrn = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals + if (wrn) { + val why = if (sym.isPrivate) "private" else "local" + typer.context.warning(t.pos, s"$why ${sym.fullLocationString} is never used") } - def isConvention(p: Symbol): Boolean = { - (p.name.decoded == "args" && p.owner.isMethod && p.owner.name.decoded == "main") || + } + } + if (settings.warnUnusedPatVars) { + for (v <- unusedPrivates.unusedPatVars) + typer.context.warning(v.pos, s"pattern var ${v.name} in ${v.owner} is never used; `${v.name}@_' suppresses this warning") + } + if (settings.warnUnusedParams) { + def isImplementation(m: Symbol): Boolean = { + def classOf(s: Symbol): Symbol = if (s.isClass || s == NoSymbol) s else classOf(s.owner) + val opc = new overridingPairs.Cursor(classOf(m)) + opc.iterator.exists(pair => pair.low == m) + } + def isConvention(p: Symbol): Boolean = { + (p.name.decoded == "args" && p.owner.isMethod && p.owner.name.decoded == "main") || (p.tpe =:= typeOf[scala.Predef.DummyImplicit]) - } - def warningIsOnFor(s: Symbol) = if (s.isImplicit) settings.warnUnusedImplicits else settings.warnUnusedExplicits - def warnable(s: Symbol) = ( - warningIsOnFor(s) + } + def warningIsOnFor(s: Symbol) = if (s.isImplicit) settings.warnUnusedImplicits else settings.warnUnusedExplicits + def warnable(s: Symbol) = ( + warningIsOnFor(s) && !isImplementation(s.owner) && !isConvention(s) ) - for (s <- unusedPrivates.unusedParams if warnable(s)) - context.warning(s.pos, s"parameter $s in ${s.owner} is never used") - } + for (s <- unusedPrivates.unusedParams if warnable(s)) + typer.context.warning(s.pos, s"parameter $s in ${s.owner} is never used") } - def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava && !context.reporter.hasErrors) { - val body = unit.body - // TODO the message should distinguish whether the unusage is before or after macro expansion. - settings.warnMacros.value match { - case "none" => run(skipMacroExpansion)(body) - case "before" => run(checkMacroExpandee)(body) - case "after" => run(skipMacroCall)(body) - case "both" => run(checkMacroExpandee)(body) ; run(skipMacroCall)(body) - } + } + def apply(unit: CompilationUnit): Unit = if (warningsEnabled && !unit.isJava && !typer.context.reporter.hasErrors) { + val body = unit.body + // TODO the message should distinguish whether the unusage is before or after macro expansion. + settings.warnMacros.value match { + case "none" => run(skipMacroExpansion)(body) + case "before" => run(checkMacroExpandee)(body) + case "after" => run(skipMacroCall)(body) + case "both" => run(checkMacroExpandee)(body) ; run(skipMacroCall)(body) } } + } + + trait TyperDiagnostics { + self: Typer => + + def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) = + context.warning(pos, "imported `%s' is permanently hidden by definition of %s".format(hidden, defn.fullLocationString)) object checkDead { private val exprStack: mutable.Stack[Symbol] = mutable.Stack(NoSymbol) From 21e459a1779bbd9803ce3af272e7527dbab0dbca Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 9 May 2018 21:17:52 +1000 Subject: [PATCH 1416/2793] One context field ought to be enough for Typer, don't you think? (cherry picked from commit 447e9f71c592d82318204119857ad6efde40cdf5) --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a30f52440a8b..9559736288cc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -182,7 +182,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors { - import context0.unit + private def unit = context.unit import typeDebug.ptTree import TyperErrorGen._ @@ -3915,7 +3915,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper /** Compute an existential type from raw hidden symbols `syms` and type `tp` */ - def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context0.owner) + def packSymbols(hidden: List[Symbol], tp: Type): Type = global.packSymbols(hidden, tp, context.owner) def isReferencedFrom(ctx: Context, sym: Symbol): Boolean = ( ctx.owner.isTerm && (ctx.scope.exists { dcl => dcl.isInitialized && (dcl.info contains sym) }) || { From 43350253e149f023bd939fe45f22d8f9f69d5c2c Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 11 May 2018 00:02:27 -0700 Subject: [PATCH 1417/2793] CommandLineParser improvements Tokenizing used to snip the line and knit the args, this commit denits the deknitting. Adds a benchmark showing linear behavior and a unit test to show correctness. Departing from the previous notion of correctness, internal quotes are respected. `"abc"xyz` is `abcxyz`. (cherry picked from commit 5df37eab5d485b7e95771f657e5e11bcca166e90) --- .../scala/tools/cmd/CommandLineParser.scala | 147 ++++++++++-------- .../tools/cmd/CommandLineParserTest.scala | 45 ++++++ 2 files changed, 129 insertions(+), 63 deletions(-) create mode 100644 test/junit/scala/tools/cmd/CommandLineParserTest.scala diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala index 6abeed31600f..3a5db0042187 100644 --- a/src/compiler/scala/tools/cmd/CommandLineParser.scala +++ b/src/compiler/scala/tools/cmd/CommandLineParser.scala @@ -1,82 +1,103 @@ /* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL + * Copyright 2007-2018 LAMP/EPFL * @author Paul Phillips */ - -package scala.tools -package cmd +package scala.tools.cmd import scala.annotation.tailrec -/** A simple (overly so) command line parser. - * !!! This needs a thorough test suite to make sure quoting is - * done correctly and portably. +/** A simple enough command line parser. */ object CommandLineParser { - // splits a string into a quoted prefix and the rest of the string, - // taking escaping into account (using \) - // `"abc"def` will match as `DoubleQuoted(abc, def)` - private class QuotedExtractor(quote: Char) { - def unapply(in: String): Option[(String, String)] = { - val del = quote.toString - if (in startsWith del) { - var escaped = false - val (quoted, next) = (in substring 1) span { - case `quote` if !escaped => false - case '\\' if !escaped => escaped = true; true - case _ => escaped = false; true - } - // the only way to get out of the above loop is with an empty next or !escaped - // require(next.isEmpty || !escaped) - if (next startsWith del) Some((quoted, next substring 1)) - else None - } else None + private final val DQ = '"' + private final val SQ = '\'' + + /** Split the line into tokens separated by whitespace or quotes. + * + * @return either an error message or reverse list of tokens + */ + private def tokens(in: String) = { + import Character.isWhitespace + import java.lang.{StringBuilder => Builder} + import collection.mutable.ArrayBuffer + + var accum: List[String] = Nil + var pos = 0 + var start = 0 + val qpos = new ArrayBuffer[Int](16) // positions of paired quotes + + def cur: Int = if (done) -1 else in.charAt(pos) + def bump() = pos += 1 + def done = pos >= in.length + + def skipToQuote(q: Int) = { + var escaped = false + def terminal = in.charAt(pos) match { + case _ if escaped => escaped = false ; false + case '\\' => escaped = true ; false + case `q` => true + case _ => false + } + while (!done && !terminal) pos += 1 + !done } - } - private object DoubleQuoted extends QuotedExtractor('"') - private object SingleQuoted extends QuotedExtractor('\'') - object Word { - private val regex = """(\S+)""".r - def unapply(s: String): Option[(String, String)] = { - regex.findPrefixOf(s) match { - case Some(prefix) => Some(prefix, s.substring(prefix.length)) - case None => None + def skipToDelim(): Boolean = + cur match { + case q @ (DQ | SQ) => { qpos.append(pos); bump(); skipToQuote(q) } && { qpos.append(pos); bump(); skipToDelim() } + case -1 => true + case c if isWhitespace(c) => true + case _ => bump(); skipToDelim() + } + def skipWhitespace() = while (isWhitespace(cur)) pos += 1 + def copyText() = { + val buf = new Builder + var p = start + var i = 0 + while (p < pos) { + if (i >= qpos.size) { + buf.append(in, p, pos) + p = pos + } else if (p == qpos(i)) { + buf.append(in, qpos(i)+1, qpos(i+1)) + p = qpos(i+1)+1 + i += 2 + } else { + buf.append(in, p, qpos(i)) + p = qpos(i) + } } + buf.toString } - } - - // parse `in` for an argument, return it and the remainder of the input (or an error message) - // (argument may be in single/double quotes, taking escaping into account, quotes are stripped) - private def argument(in: String): Either[String, (String, String)] = in match { - case DoubleQuoted(arg, rest) => Right((arg, rest)) - case SingleQuoted(arg, rest) => Right((arg, rest)) - case Word(arg, rest) => Right((arg, rest)) - case _ => Left(s"Illegal argument: $in") - } + def text() = { + val res = + if (qpos.isEmpty) in.substring(start, pos) + else if (qpos(0) == start && qpos(1) == pos) in.substring(start+1, pos-1) + else copyText() + qpos.clear() + res + } + def badquote = Left("Unmatched quote") - // parse a list of whitespace-separated arguments (ignoring whitespace in quoted arguments) - @tailrec private def commandLine(in: String, accum: List[String] = Nil): Either[String, (List[String], String)] = { - val trimmed = in.trim - if (trimmed.isEmpty) Right((accum.reverse, "")) - else argument(trimmed) match { - case Right((arg, next)) => - val leadingWhitespaceLen = next.prefixLength(Character.isWhitespace) - val rest = next.substring(leadingWhitespaceLen) - if (leadingWhitespaceLen == 0 && rest.nonEmpty) - Left("Arguments should be separated by whitespace.") // TODO: can this happen? - else - commandLine(rest, arg :: accum) - case Left(msg) => Left(msg) + @tailrec def loop(): Either[String, List[String]] = { + skipWhitespace() + start = pos + if (done) Right(accum) + else if (!skipToDelim()) badquote + else { + accum = text() :: accum + loop() + } } + loop() } class ParseException(msg: String) extends RuntimeException(msg) - def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) - def tokenize(line: String, errorFn: String => Unit): List[String] = { - commandLine(line) match { - case Right((args, _)) => args - case Left(msg) => errorFn(msg) ; Nil + def tokenize(line: String, errorFn: String => Unit): List[String] = + tokens(line) match { + case Right(args) => args.reverse + case Left(msg) => errorFn(msg) ; Nil } - } + + def tokenize(line: String): List[String] = tokenize(line, x => throw new ParseException(x)) } diff --git a/test/junit/scala/tools/cmd/CommandLineParserTest.scala b/test/junit/scala/tools/cmd/CommandLineParserTest.scala new file mode 100644 index 000000000000..6a0380265c49 --- /dev/null +++ b/test/junit/scala/tools/cmd/CommandLineParserTest.scala @@ -0,0 +1,45 @@ +package scala.tools.cmd + +import org.junit.Assert._ +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import scala.tools.testing.AssertUtil.assertThrows + +@RunWith(classOf[JUnit4]) +class CommandLineParserTest { + import CommandLineParser.{tokenize, ParseException} + + @Test + def parserTokenizes(): Unit = { + assertEquals(Nil, tokenize("")) + assertEquals(List("x"), tokenize("x")) + assertEquals(List("x"), tokenize(" x ")) + assertEquals(List("x","y"), tokenize("x y")) + assertEquals(List("x","y","z"), tokenize("x y z")) + } + @Test + def parserTrims(): Unit = { + assertEquals(Nil, tokenize(" ")) + assertEquals(List("x"), tokenize(" x ")) + assertEquals(List("x"), tokenize("\nx\n")) + assertEquals(List("x","y","z"), tokenize(" x y z ")) + } + @Test + def parserQuotes(): Unit = { + assertEquals(List("x"), tokenize("'x'")) + assertEquals(List("x"), tokenize(""""x"""")) + assertEquals(List("x","y","z"), tokenize("x 'y' z")) + assertEquals(List("x"," y ","z"), tokenize("x ' y ' z")) + assertEquals(List("x","y","z"), tokenize("""x "y" z""")) + assertEquals(List("x"," y ","z"), tokenize("""x " y " z""")) + // interior quotes + assertEquals(List("x y z"), tokenize("x' y 'z")) // was assertEquals(List("x'","y","'z"), tokenize("x' y 'z")) + assertEquals(List("x\ny\nz"), tokenize("x'\ny\n'z")) + assertEquals(List("x'y'z"), tokenize("""x"'y'"z""")) + assertEquals(List("abcxyz"), tokenize(""""abc"xyz""")) + // missing quotes + assertThrows[ParseException](tokenize(""""x""")) // was assertEquals(List("\"x"), tokenize(""""x""")) + assertThrows[ParseException](tokenize("""x'""")) + } +} From 1d366a815713d6dbd88e00c7647b3ea3cf817587 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 12 May 2018 17:03:25 +0200 Subject: [PATCH 1418/2793] Rework named/defaults bookkeeping as a tree attachment This saves a field in Context. (cherry picked from commit 23cc067e4046baa8f2b2e1e12c27ee7becae217b) --- .../tools/nsc/typechecker/Contexts.scala | 5 ++-- .../tools/nsc/typechecker/NamesDefaults.scala | 25 ++++++++++--------- .../scala/tools/nsc/typechecker/Typers.scala | 16 +++--------- 3 files changed, 20 insertions(+), 26 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a4f191720ad5..681f4496d576 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -246,8 +246,6 @@ trait Contexts { self: Analyzer => openImplicits.nonEmpty && openImplicits.exists(x => !x.isView) } - /* For a named application block (`Tree`) the corresponding `NamedApplyInfo`. */ - var namedApplyBlockInfo: Option[(Tree, NamedApplyInfo)] = None var prefix: Type = NoPrefix def inSuperInit_=(value: Boolean) = this(SuperInit) = value @@ -1581,6 +1579,9 @@ object ContextMode { /** Are unapplied type constructors allowed here? Formerly HKmode. */ final val TypeConstructorAllowed: ContextMode = 1 << 16 + /** Were default arguments used? */ + final val DiagUsedDefaults: ContextMode = 1 << 18 + /** TODO: The "sticky modes" are EXPRmode, PATTERNmode, TYPEmode. * To mimic the sticky mode behavior, when captain stickyfingers * comes around we need to propagate those modes but forget the other diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index c548a13a0c6e..0dbb0e860b25 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -49,6 +49,13 @@ trait NamesDefaults { self: Analyzer => vargss: List[List[Tree]], blockTyper: Typer ) { } + object NamedApplyBlock { + private[this] val tag = reflect.classTag[NamedApplyInfo] + def unapply(b: Tree): Option[NamedApplyInfo] = b match { + case _: Block => b.attachments.get[NamedApplyInfo](tag) + case _ => None + } + } private def nameOfNamedArg(arg: Tree) = Some(arg) collect { case AssignOrNamedArg(Ident(name), _) => name } def isNamedArg(arg: Tree) = arg match { @@ -191,15 +198,13 @@ trait NamesDefaults { self: Analyzer => val b = Block(List(vd), baseFunTransformed) .setType(baseFunTransformed.tpe).setPos(baseFun.pos.makeTransparent) - context.namedApplyBlockInfo = - Some((b, NamedApplyInfo(Some(newQual), defaultTargs, Nil, blockTyper))) + b.updateAttachment(NamedApplyInfo(Some(newQual), defaultTargs, Nil, blockTyper)) b } def blockWithoutQualifier(defaultQual: Option[Tree]) = { val b = atPos(baseFun.pos)(Block(Nil, baseFun).setType(baseFun.tpe)) - context.namedApplyBlockInfo = - Some((b, NamedApplyInfo(defaultQual, defaultTargs, Nil, blockTyper))) + b.updateAttachment(NamedApplyInfo(defaultQual, defaultTargs, Nil, blockTyper)) b } @@ -326,17 +331,14 @@ trait NamesDefaults { self: Analyzer => } // begin transform - if (isNamedApplyBlock(tree)) { - context.namedApplyBlockInfo.get._1 - } else tree match { + tree match { + case NamedApplyBlock(info) => tree // `fun` is typed. `namelessArgs` might be typed or not, if they are types are kept. case Apply(fun, namelessArgs) => val transformedFun = transformNamedApplication(typer, mode, pt)(fun, x => x) if (transformedFun.isErroneous) setError(tree) else { - assert(isNamedApplyBlock(transformedFun), transformedFun) - val NamedApplyInfo(qual, targs, vargss, blockTyper) = - context.namedApplyBlockInfo.get._2 + val NamedApplyBlock(NamedApplyInfo(qual, targs, vargss, blockTyper)) = transformedFun val Block(stats, funOnly) = transformedFun // type the application without names; put the arguments in definition-site order @@ -372,8 +374,7 @@ trait NamesDefaults { self: Analyzer => val res = blockTyper.doTypedApply(tree, expr, refArgs, mode, pt) res.setPos(res.pos.makeTransparent) val block = Block(stats ::: valDefs.flatten, res).setType(res.tpe).setPos(tree.pos.makeTransparent) - context.namedApplyBlockInfo = - Some((block, NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper))) + block.updateAttachment(NamedApplyInfo(qual, targs, vargss :+ refArgs, blockTyper)) block case _ => tree } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3f90ee9afcfd..1bf0b91fe2b2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -695,12 +695,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val context1 = context.makeSilent(reportAmbiguousErrors, newtree) context1.undetparams = context.undetparams context1.savedTypeBounds = context.savedTypeBounds - context1.namedApplyBlockInfo = context.namedApplyBlockInfo val typer1 = newTyper(context1) val result = op(typer1) context.undetparams = context1.undetparams context.savedTypeBounds = context1.savedTypeBounds - context.namedApplyBlockInfo = context1.namedApplyBlockInfo // If we have a successful result, emit any warnings it created. if (!context1.reporter.hasErrors) @@ -3318,11 +3316,6 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - /** Is `tree` a block created by a named application? - */ - def isNamedApplyBlock(tree: Tree) = - context.namedApplyBlockInfo exists (_._1 == tree) - def callToCompanionConstr(context: Context, calledFun: Symbol) = { calledFun.isConstructor && { val methCtx = context.enclMethod @@ -3539,7 +3532,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else if (!allArgsArePositional(argPos) && !sameLength(formals, params)) // !allArgsArePositional indicates that named arguments are used to re-order arguments duplErrorTree(MultipleVarargError(tree)) - else if (allArgsArePositional(argPos) && !isNamedApplyBlock(fun)) { + else if (allArgsArePositional(argPos) && !NamedApplyBlock.unapply(fun).isDefined) { // if there's no re-ordering, and fun is not transformed, no need to transform // more than an optimization, e.g. important in "synchronized { x = update-x }" checkNotMacro() @@ -3578,13 +3571,12 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x) if (fun1.isErroneous) duplErrTree else { - assert(isNamedApplyBlock(fun1), fun1) - val NamedApplyInfo(qual, targs, previousArgss, _) = context.namedApplyBlockInfo.get._2 + val NamedApplyBlock(NamedApplyInfo(qual, targs, previousArgss, _)) = fun1 val blockIsEmpty = fun1 match { case Block(Nil, _) => // if the block does not have any ValDef we can remove it. Note that the call to // "transformNamedApplication" is always needed in order to obtain targs/previousArgss - context.namedApplyBlockInfo = None + fun1.attachments.remove[NamedApplyInfo] true case _ => false } @@ -3612,7 +3604,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!sameLength(formals, args) || // wrong nb of arguments (args exists isNamedArg) || // uses a named argument - isNamedApplyBlock(fun)) { // fun was transformed to a named apply block => + NamedApplyBlock.unapply(fun).isDefined) { // fun was transformed to a named apply block => // integrate this application into the block if (dyna.isApplyDynamicNamed(fun) && isDynamicRewrite(fun)) dyna.typedNamedApply(tree, fun, args, mode, pt) else tryNamesDefaults From 5df601ed5791671468521dba39f2689ed5f8556a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 21 May 2018 06:32:22 +1000 Subject: [PATCH 1419/2793] Use faster way to lookup List.apply symbol I accidentally changed this to use Definitions.List_apply in #6618. (cherry picked from commit 1379df4c0888236dbc6d964d8a889b8bd9857511) --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9559736288cc..0d154f8d6d11 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3647,7 +3647,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * forced during kind-arity checking, so it is guarded by additional * tests to ensure we're sufficiently far along. */ - if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == List_apply)) + if (args.isEmpty && canTranslateEmptyListToNil && fun.symbol.isInitialized && ListModule.hasCompleteInfo && (fun.symbol == currentRun.runDefinitions.List_apply)) atPos(tree.pos)(gen.mkNil setType restpe) else constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe)) From 1181fdd24bef27a60753299979397bb5081fc540 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 23 May 2018 10:34:21 +1000 Subject: [PATCH 1420/2793] Avoid double open when overwriting a classfile on non-Windows (cherry picked from commit 1ceafd2275f6fab4016173a4fb93dbce941805e4) --- .../tools/nsc/backend/jvm/ClassfileWriters.scala | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 639f79bd5c2c..a477ec70c23d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -135,6 +135,7 @@ abstract class ClassfileWriters { private sealed class DirClassWriter extends UnderlyingClassfileWriter { val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() val noAttributes = Array.empty[FileAttribute[_]] + private val isWindows = scala.util.Properties.isWin def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { import java.lang.Boolean.TRUE @@ -174,10 +175,12 @@ abstract class ClassfileWriters { val path = getPath(className, paths) val bytes = formatData(rawBytes) ensureDirForPath(paths.outputPath, path) - val os = try FileChannel.open(path, fastOpenOptions) - catch { - case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) - } + val os = if (isWindows) { + try FileChannel.open(path, fastOpenOptions) + catch { + case _: FileAlreadyExistsException => FileChannel.open(path, fallbackOpenOptions) + } + } else FileChannel.open(path, fallbackOpenOptions) try { os.write(ByteBuffer.wrap(bytes), 0L) From 606d7d126f51a8b25d3991c7cfc5508ab694b6c4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Jun 2018 16:37:18 +1000 Subject: [PATCH 1421/2793] Cherry-picked into a different branch From b5a694109e1fc626341e7c0ab03949ebb79bf48b Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 14 Mar 2018 18:44:27 -0400 Subject: [PATCH 1422/2793] Compare positions in `normalTypedApply`'s error advice by focus. Follow-up to 1df3796485b4c72affa6eb1c185ec94ed1603798. Makes sure we get the extra advice to which we're entitled, even when using rangepos. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 ++-- test/files/neg/t0903.flags | 1 + test/files/neg/t1215.flags | 1 + test/files/neg/t9834.flags | 1 + 4 files changed, 5 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t0903.flags create mode 100644 test/files/neg/t1215.flags create mode 100644 test/files/neg/t9834.flags diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 3f90ee9afcfd..b34c466f4a92 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4708,7 +4708,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def advice1(convo: Tree, errors: List[AbsTypeError], err: SilentTypeError): List[AbsTypeError] = errors.map { e => - if (e.errPos == tree.pos) { + if (e.errPos samePointAs tree.pos) { val header = f"${e.errMsg}%n Expression does not convert to assignment because:%n " val expansion = f"%n expansion: ${show(convo)}" NormalTypeError(tree, err.errors.flatMap(_.errMsg.lines.toList).mkString(header, f"%n ", expansion)) @@ -4716,7 +4716,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def advice2(errors: List[AbsTypeError]): List[AbsTypeError] = errors.map { e => - if (e.errPos == tree.pos) { + if (e.errPos samePointAs tree.pos) { val msg = f"${e.errMsg}%n Expression does not convert to assignment because receiver is not assignable." NormalTypeError(tree, msg) } else e diff --git a/test/files/neg/t0903.flags b/test/files/neg/t0903.flags new file mode 100644 index 000000000000..fcf951d90723 --- /dev/null +++ b/test/files/neg/t0903.flags @@ -0,0 +1 @@ +-Yrangepos \ No newline at end of file diff --git a/test/files/neg/t1215.flags b/test/files/neg/t1215.flags new file mode 100644 index 000000000000..fcf951d90723 --- /dev/null +++ b/test/files/neg/t1215.flags @@ -0,0 +1 @@ +-Yrangepos \ No newline at end of file diff --git a/test/files/neg/t9834.flags b/test/files/neg/t9834.flags new file mode 100644 index 000000000000..fcf951d90723 --- /dev/null +++ b/test/files/neg/t9834.flags @@ -0,0 +1 @@ +-Yrangepos \ No newline at end of file From 036fcbcc79563ea2362b08503dccaaf8d8c9af30 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Fri, 8 Jun 2018 23:07:02 -0700 Subject: [PATCH 1423/2793] Test for 10935 --- test/files/neg/t10935.check | 7 +++++++ test/files/neg/t10935.scala | 13 +++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 test/files/neg/t10935.check create mode 100644 test/files/neg/t10935.scala diff --git a/test/files/neg/t10935.check b/test/files/neg/t10935.check new file mode 100644 index 000000000000..477961ff8056 --- /dev/null +++ b/test/files/neg/t10935.check @@ -0,0 +1,7 @@ +t10935.scala:4: error: value += is not a member of Int + Expression does not convert to assignment because: + value lengt is not a member of String + expansion: a.this.size = a.this.size.+(1.+("foo".)) + size += 1 + "foo".lengt + ^ +one error found diff --git a/test/files/neg/t10935.scala b/test/files/neg/t10935.scala new file mode 100644 index 000000000000..30f8a1aeb537 --- /dev/null +++ b/test/files/neg/t10935.scala @@ -0,0 +1,13 @@ + +object a { + var size = 0 + size += 1 + "foo".lengt +} + +/* + * +test/files/neg/t10935.scala:4: error: value += is not a member of Int + size += 1 + "foo".lengt + ^ +one error found + */ From 24f143869398505a4a20159ef1ee62b3a5af61e1 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 9 Jun 2018 19:22:04 -0400 Subject: [PATCH 1424/2793] Parenthesize infix type constructors before using them as a prefix. Otherwise, `(A ^ B)#T` prints as `A ^ B#T`, which just ain't right. Fixes scala/bug#10937. --- src/reflect/scala/reflect/internal/Types.scala | 5 ++++- test/files/run/t4700.check | 6 ++++++ test/files/run/t4700.scala | 2 ++ 3 files changed, 12 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index bec839b85606..4c99c52fbd72 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -932,7 +932,10 @@ trait Types def trimPrefix(str: String) = str stripPrefix objectPrefix stripPrefix packagePrefix /** The string representation of this type used as a prefix */ - def prefixString = trimPrefix(toString) + "#" + def prefixString = { + val pre = trimPrefix(toString) + if (isShowAsInfixType) s"($pre)#" else pre + "#" + } /** Convert toString avoiding infinite recursions by cutting off * after `maxToStringRecursions` recursion levels. Uses `safeToString` diff --git a/test/files/run/t4700.check b/test/files/run/t4700.check index ae854b959db9..2c72a3723f24 100644 --- a/test/files/run/t4700.check +++ b/test/files/run/t4700.check @@ -41,4 +41,10 @@ foo: (Int && String) &: Boolean scala> def foo: Int && (Boolean &: String) = ??? foo: Int && (Boolean &: String) +scala> trait ^[A, B] { type T } /* scala/bug#10937 */ +defined trait $up + +scala> def x[A, B] : (A ^ B)#T = ??? +x: [A, B]=> (A ^ B)#T + scala> :quit diff --git a/test/files/run/t4700.scala b/test/files/run/t4700.scala index 7c02676e89f0..b62aab0358fb 100644 --- a/test/files/run/t4700.scala +++ b/test/files/run/t4700.scala @@ -17,6 +17,8 @@ object Test extends ReplTest { |def foo: Int &: Boolean &: String = ??? |def foo: (Int && String) &: Boolean = ??? |def foo: Int && (Boolean &: String) = ??? + |trait ^[A, B] { type T } /* scala/bug#10937 */ + |def x[A, B] : (A ^ B)#T = ??? |""".stripMargin } From f3c70d9376290b332de7b4f8225ce032e3792ebc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 Jun 2018 09:19:52 +1000 Subject: [PATCH 1425/2793] [nomerge] Remove test that doesn't apply on 2.12.x --- test/files/pos/sip23-singleton-view.scala | 6 ------ 1 file changed, 6 deletions(-) delete mode 100644 test/files/pos/sip23-singleton-view.scala diff --git a/test/files/pos/sip23-singleton-view.scala b/test/files/pos/sip23-singleton-view.scala deleted file mode 100644 index 735173cacb9d..000000000000 --- a/test/files/pos/sip23-singleton-view.scala +++ /dev/null @@ -1,6 +0,0 @@ -import language.implicitConversions - -class Test { - implicit def singletonToString(c: Singleton): String = "" - def foo(a: 1): String = a // implicit was being ruled out because Int(1).widen was not a subclass of Singletom -} From 76cbf10f97310582aa300134ddffb8c5b0361fb2 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Mon, 11 Jun 2018 18:09:56 -0400 Subject: [PATCH 1426/2793] fix Windows-only test failure some whitespace in the check file got misplaced when the test was backported to 2.11 --- test/files/run/t9880-9881.check | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/files/run/t9880-9881.check b/test/files/run/t9880-9881.check index d600b9895b29..36513e249a4c 100644 --- a/test/files/run/t9880-9881.check +++ b/test/files/run/t9880-9881.check @@ -13,7 +13,7 @@ import scala.reflect.runtime.{universe=>ru} scala> import ru.TypeTag import ru.TypeTag -scala> +scala> scala> // show the imports @@ -26,7 +26,7 @@ scala> :imports 6) import scala.reflect.runtime.{universe=>ru} (...) 7) import ru.TypeTag (...) -scala> +scala> scala> // should be able to define this class with the imports above From 04cd5c392ccb4543520e39be738857f45c8ddc1a Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 13 Jun 2018 12:51:27 +0200 Subject: [PATCH 1427/2793] Consider delayedInit$body classes as top-level for Java compatibility We do the same for specialized subclasses. There's no real advantage in putting these classes into an INNERCLASS attribute. They have synthetic names anyway and are not intended to be used from Java. --- src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 4 +++- test/files/run/t10487.scala | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t10487.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 2f5f0ad246c6..c526306cecd6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -33,7 +33,9 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * True for classes generated by the Scala compiler that are considered top-level in terms of * the InnerClass / EnclosingMethod classfile attributes. See comment in BTypes. */ - def considerAsTopLevelImplementationArtifact(classSym: Symbol) = classSym.isSpecialized + def considerAsTopLevelImplementationArtifact(classSym: Symbol) = + classSym.isSpecialized || + classSym.isSynthetic && classSym.name.containsName(nme.delayedInitArg.toTypeName) /** * Cache the value of delambdafy == "inline" for each run. We need to query this value many diff --git a/test/files/run/t10487.scala b/test/files/run/t10487.scala new file mode 100644 index 000000000000..f1b50cde6d27 --- /dev/null +++ b/test/files/run/t10487.scala @@ -0,0 +1,3 @@ +object Test extends App { + assert(Class.forName("Test$delayedInit$body").getEnclosingClass() == null) +} From d736b3bded97af17fa7c477c7316a09c955a1771 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 22 May 2018 08:51:48 -0400 Subject: [PATCH 1428/2793] Move range position validation behind a flag. The motivation is that the validation step isn't fast, and takes up a good chunk of the "rangepos penalty" time difference. Moreover, Alex Average User can't do much about a fatal rangepos error other than twiddle around their source until it goes away, so it's likely to bother end users less like this. This is a backport to 2.12.x, since position validation is changing for performance, and we evidently want to be cautious about adding new breakages. References scala/scala-dev#472. --- build.sbt | 2 ++ project/ScalaOptionParser.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 3 +++ src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala | 3 --- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 3 ++- src/compiler/scala/tools/nsc/typechecker/Analyzer.scala | 1 - src/reflect/scala/reflect/internal/Positions.scala | 5 ++--- 7 files changed, 10 insertions(+), 9 deletions(-) diff --git a/build.sbt b/build.sbt index 3793e5128284..a44226b9ecee 100644 --- a/build.sbt +++ b/build.sbt @@ -720,6 +720,8 @@ lazy val test = project // test sources are compiled in partest run, not here sources in IntegrationTest := Seq.empty, fork in IntegrationTest := true, + // enable this in 2.13, when tests pass + //scalacOptions in Compile += "-Yvalidate-pos:parser,typer", javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), diff --git a/project/ScalaOptionParser.scala b/project/ScalaOptionParser.scala index 26d75e51be84..a5cbb35dde40 100644 --- a/project/ScalaOptionParser.scala +++ b/project/ScalaOptionParser.scala @@ -97,7 +97,7 @@ object ScalaOptionParser { "-Ypresentation-log", "-Ypresentation-replay", "-Yrepl-outdir", "-d", "-dependencyfile", "-encoding", "-Xscript") private def pathSettingNames = List("-bootclasspath", "-classpath", "-extdirs", "-javabootclasspath", "-javaextdirs", "-sourcepath", "-toolcp") private val phases = List("all", "parser", "namer", "packageobjects", "typer", "patmat", "superaccessors", "extmethods", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "posterasure", "fields", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "delambdafy", "icode", "jvm", "terminal") - private val phaseSettings = List("-Xprint-icode", "-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint") + private val phaseSettings = List("-Xprint-icode", "-Ystop-after", "-Yskip", "-Yshow", "-Ystop-before", "-Ybrowse", "-Ylog", "-Ycheck", "-Xprint", "-Yvalidate-pos") private def multiStringSettingNames = List("-Xmacro-settings", "-Xplugin", "-Xplugin-disable", "-Xplugin-require", "-Ywarn-unused") private def intSettingNames = List("-Xmax-classfile-name", "-Xelide-below", "-Ypatmat-exhaust-depth", "-Ypresentation-delay", "-Yrecursion") private def choiceSettingNames = Map[String, List[String]]( diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 3edac10cf026..4f0fa16cf52a 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1471,6 +1471,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) if (settings.browse containsPhase globalPhase) treeBrowser browse (phase.name, units) + if ((settings.Yvalidatepos containsPhase globalPhase) && !reporter.hasErrors) + currentRun.units.foreach(unit => validatePositions(unit.body)) + // move the pointer globalPhase = globalPhase.next diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index e0667b5a3ebb..b4b6f25dc999 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -98,9 +98,6 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse if (unit.body == EmptyTree) unit.body = initialUnitBody(unit) - if (settings.Yrangepos && !reporter.hasErrors) - validatePositions(unit.body) - if (settings.Ymemberpos.isSetByUser) new MemberPosReporter(unit) show (style = settings.Ymemberpos.value) } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 2e9477cf38c6..a0fbedc03924 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -220,7 +220,8 @@ trait ScalaSettings extends AbsScalaSettings val stopAfter = PhasesSetting ("-Ystop-after", "Stop after") withAbbreviation ("-stop") // backward compat val stopBefore = PhasesSetting ("-Ystop-before", "Stop before") val Yrangepos = BooleanSetting ("-Yrangepos", "Use range positions for syntax trees.") - val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", "Show start and end positions of members", "") withPostSetHook (_ => Yrangepos.value = true) + val Yvalidatepos = PhasesSetting ("-Yvalidate-pos", s"Validate positions after the given phases (implies ${Yrangepos.name})") withPostSetHook (_ => Yrangepos.value = true) + val Ymemberpos = StringSetting ("-Yshow-member-pos", "output style", s"Show start and end positions of members (implies ${Yrangepos.name})", "") withPostSetHook (_ => Yrangepos.value = true) val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler.", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal) val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None) diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index ccd414cc457d..5fc17c191477 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -102,7 +102,6 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - if (global.settings.Yrangepos && !global.reporter.hasErrors) global.validatePositions(unit.body) for (workItem <- unit.toCheck) workItem() if (settings.warnUnusedImport) warnUnusedImports(unit) diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index 1a1aa2e72160..b56762c42b37 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -105,7 +105,7 @@ trait Positions extends api.Positions { self: SymbolTable => def validate(tree: Tree, encltree: Tree): Unit = { if (!tree.isEmpty && tree.canHaveAttrs) { - if (settings.Yposdebug && (settings.verbose || settings.Yrangepos)) + if (settings.Yposdebug && settings.verbose) inform("[%10s] %s".format("validate", treeStatus(tree, encltree))) if (!tree.pos.isDefined) @@ -143,8 +143,7 @@ trait Positions extends api.Positions { self: SymbolTable => } } - if (!isPastTyper) - validate(tree, tree) + validate(tree, tree) } def solidDescendants(tree: Tree): List[Tree] = From 5af85b5ce690d62bab90f8738c3fe1d0d2e0e41f Mon Sep 17 00:00:00 2001 From: Danila Matveev Date: Tue, 27 Feb 2018 11:28:10 +0300 Subject: [PATCH 1429/2793] [nomerge] Several entries with the same key in mutable.HashMap - Add HashMap test to check scala/collection-strawman#382 / scala/bug#10703 - Clean code. - Repeat search of key after evaluating default value. - Fix code style. Cherry pick of 0c30c04a6920f7f7daa08df8ee22349f12686906 --- src/library/scala/collection/mutable/HashMap.scala | 13 ++++++++++--- .../scala/collection/mutable/HashMapTest.scala | 13 +++++++++++++ 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index 2391080658e3..c32e9d2f7d35 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -75,18 +75,25 @@ extends AbstractMap[A, B] override def getOrElseUpdate(key: A, defaultValue: => B): B = { val hash = elemHashCode(key) val i = index(hash) - val entry = findEntry(key, i) - if (entry != null) entry.value + val firstEntry = findEntry(key, i) + if (firstEntry != null) firstEntry.value else { val table0 = table val default = defaultValue // Avoid recomputing index if the `defaultValue()` hasn't triggered // a table resize. val newEntryIndex = if (table0 eq table) i else index(hash) - addEntry(createNewEntry(key, default), newEntryIndex) + val e = createNewEntry(key, default) + // Repeat search + // because evaluation of `default` can bring entry with `key` + val secondEntry = findEntry(key, newEntryIndex) + if (secondEntry == null) addEntry0(e, newEntryIndex) + else secondEntry.value = default + default } } + /* inlined HashTable.findEntry0 to preserve its visibility */ private[this] def findEntry(key: A, h: Int): Entry = { var e = table(h).asInstanceOf[Entry] diff --git a/test/junit/scala/collection/mutable/HashMapTest.scala b/test/junit/scala/collection/mutable/HashMapTest.scala index cc1979a9200c..426cc8de55bb 100644 --- a/test/junit/scala/collection/mutable/HashMapTest.scala +++ b/test/junit/scala/collection/mutable/HashMapTest.scala @@ -35,4 +35,17 @@ class HashMapTest { hm.put(0, 0) hm.getOrElseUpdate(0, throw new AssertionError()) } + + @Test + def getOrElseUpdate_keyIdempotence_t10703(): Unit = { + val map = mutable.HashMap[String, String]() + + val key = "key" + map.getOrElseUpdate(key, { + map.getOrElseUpdate(key, "value1") + "value2" + }) + + assertEquals(List((key, "value2")), map.toList) + } } From dc0b8b201553069ec9d5c2f3a1cb061f7e8402b4 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 23 Jun 2018 16:30:31 -0400 Subject: [PATCH 1430/2793] Minor optimizations in implicit search. - cache `settings.isScala213` - use `mutable.Set#add` rather than `contains` followed by `+=` - match on `_ :: _` rather than `nonEmpty` followed by `head` - use `mutable.Map#update` rather than `+=` (avoiding a tuple alloc) --- .../tools/nsc/typechecker/Implicits.scala | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index f167a653ffb9..4db10827f248 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1071,17 +1071,20 @@ trait Implicits { * bound, the implicits infos which are members of these companion objects. */ private def companionImplicitMap(tp: Type): InfoMap = { + val isScala213 = settings.isScala213 /* Populate implicit info map by traversing all parts of type `tp`. * Parameters as for `getParts`. */ - def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) = tp match { + def getClassParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.HashSet[Type], pending: Set[Symbol]) = tp match { case TypeRef(pre, sym, args) => infoMap get sym match { case Some(infos1) => - if (infos1.nonEmpty && !(pre =:= infos1.head.pre.prefix)) { - log(s"Ignoring implicit members of $pre#$sym as it is also visible via another prefix: ${infos1.head.pre.prefix}") - infoMap(sym) = List() // ambiguous prefix - ignore implicit members + infos1 match { + case head :: _ if !(pre =:= head.pre.prefix) => + log(s"Ignoring implicit members of $pre#$sym as it is also visible via another prefix: ${infos1.head.pre.prefix}") + infoMap(sym) = List() // ambiguous prefix - ignore implicit members + case _ => } case None => if (pre.isStable && !pre.typeSymbol.isExistentiallyBound) { @@ -1090,7 +1093,7 @@ trait Implicits { else singleType(pre, companionSymbolOf(sym, context)) val infos = pre1.implicitMembers.iterator.map(mem => new ImplicitInfo(mem.name, pre1, mem)).toList if (infos.nonEmpty) - infoMap += (sym -> infos) + infoMap(sym) = infos } val bts = tp.baseTypeSeq var i = 1 @@ -1110,14 +1113,11 @@ trait Implicits { * @param pending The set of static symbols for which we are currently trying to collect their parts * in order to cache them in infoMapCache */ - def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.Set[Type], pending: Set[Symbol]) { - if (seen(tp)) - return - seen += tp - tp match { + def getParts(tp: Type)(implicit infoMap: InfoMap, seen: mutable.HashSet[Type], pending: Set[Symbol]) { + if (seen add tp) tp match { case TypeRef(pre, sym, args) => if (sym.isClass && !sym.isRoot && - (settings.isScala213 || !sym.isAnonOrRefinementClass)) { + (isScala213 || !sym.isAnonOrRefinementClass)) { if (sym.isStatic && !(pending contains sym)) infoMap ++= { infoMapCache get sym match { @@ -1140,7 +1140,7 @@ trait Implicits { // - if `T` is an abstract type, the parts of its upper bound; getParts(tp.bounds.hi) - if(settings.isScala213) { + if (isScala213) { // - if `T` is a parameterized type `S[T1,…,Tn]`, the union of the parts of `S` and `T1,…,Tn` args foreach getParts From 8128fa790bd01833481999d077ccec99886bf0a5 Mon Sep 17 00:00:00 2001 From: NthPortal Date: Sat, 23 Jun 2018 17:22:43 -0400 Subject: [PATCH 1431/2793] bug#10747 Document thread-safety of Enumeration --- src/library/scala/Enumeration.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index 716d26164ea7..ab3fa18064c3 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -25,6 +25,11 @@ import scala.util.matching.Regex * `Value` type member of the enumeration (`Value` selected on the stable * identifier path of the enumeration instance). * + * Values SHOULD NOT be added to an enumeration after its construction; + * doing so makes the enumeration thread-unsafe. If values are added to an + * enumeration from multiple threads (in a non-synchronized fashion) after + * construction, the behavior of the enumeration is undefined. + * * @example {{{ * // Define a new enumeration with a type alias and work with the full set of enumerated values * object WeekDay extends Enumeration { From 3480f497b752f8416533e2d2b55b242ff3d51e9e Mon Sep 17 00:00:00 2001 From: exoego Date: Wed, 2 May 2018 23:06:22 +0900 Subject: [PATCH 1432/2793] Add ScalaCompilerOptionsExporter --- build.sbt | 16 +++ .../tools/nsc/settings/MutableSettings.scala | 8 +- .../nsc/ScalaCompilerOptionsExporter.scala | 126 ++++++++++++++++++ 3 files changed, 146 insertions(+), 4 deletions(-) create mode 100644 src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala diff --git a/build.sbt b/build.sbt index 3793e5128284..87c6e43e2aec 100644 --- a/build.sbt +++ b/build.sbt @@ -422,6 +422,22 @@ lazy val reflect = configureAsSubproject(project) ) .dependsOn(library) +lazy val exporter = configureAsSubproject(project) + .dependsOn(compiler, reflect, library) + .settings(clearSourceAndResourceDirectories) + .settings(commonSettings) + .settings(disableDocs) + .settings(disablePublishing) + .settings( + libraryDependencies ++= Seq( + "com.fasterxml.jackson.core" % "jackson-core" % "2.9.5", + "com.fasterxml.jackson.core" % "jackson-annotations" % "2.9.5", + "com.fasterxml.jackson.core" % "jackson-databind" % "2.9.5", + "com.fasterxml.jackson.dataformat" % "jackson-dataformat-yaml" % "2.9.5", + "com.fasterxml.jackson.module" %% "jackson-module-scala" % "2.9.5" + ) + ) + lazy val compiler = configureAsSubproject(project) .settings(generatePropertiesFileSettings) .settings(generateBuildCharacterFileSettings) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 85f65f6c6911..bddef769be99 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -530,7 +530,7 @@ class MutableSettings(val errorFn: String => Unit) name: String, val arg: String, descr: String, - initial: ScalaVersion, + val initial: ScalaVersion, default: Option[ScalaVersion]) extends Setting(name, descr) { type T = ScalaVersion @@ -631,7 +631,7 @@ class MutableSettings(val errorFn: String => Unit) */ class MultiChoiceSetting[E <: MultiChoiceEnumeration] private[nsc]( name: String, - helpArg: String, + val helpArg: String, descr: String, val domain: E, val default: Option[List[String]] @@ -838,7 +838,7 @@ class MutableSettings(val errorFn: String => Unit) */ class ChoiceSetting private[nsc]( name: String, - helpArg: String, + val helpArg: String, descr: String, override val choices: List[String], val default: String, @@ -893,7 +893,7 @@ class MutableSettings(val errorFn: String => Unit) class PhasesSetting private[nsc]( name: String, descr: String, - default: String + val default: String ) extends Setting(name, mkPhasesHelp(descr, default)) with Clearable { private[nsc] def this(name: String, descr: String) = this(name, descr, "") diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala new file mode 100644 index 000000000000..e506584da8ad --- /dev/null +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -0,0 +1,126 @@ +package scala.tools.nsc + +import scala.reflect.runtime.universe._ +import collection.JavaConverters._ +import com.fasterxml.jackson.annotation._ +import com.fasterxml.jackson.core.util.DefaultPrettyPrinter +import com.fasterxml.jackson.databind.ObjectMapper +import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.module.scala.DefaultScalaModule + +object ScalaCompilerOptionsExporter { + + case class Category(name: String, load: Int) extends Ordered[Category] { + def compare(that: Category): Int = (this.load) compare (that.load) + } + val StandardSettings = Category("Standard Settings", 0) + val JVMSettings = Category("JVM Settings", 1) + val PluginSettings = Category("Plugin Settings", 2) + val AdvancedSettings = Category("Advanced Settings", 3) + val PrivateSettings = Category("Private Settings", 4) + val WarningSettings = Category("Warning Settings", 5) + + trait JacksonWorkaround { + val category: String + } + @JsonIgnoreProperties(Array("_category")) + @JsonPropertyOrder(Array("category", "description", "options")) + case class Section(_category: Category, description: Option[String], options: List[ScalacOption]) extends JacksonWorkaround{ + val category: String = _category.name + } + case class ScalacOption( + option: String, + schema: Schema, + description: String, + abbreviation: Option[String] = None, + deprecated: Option[String] = None, + note: Option[String] = None + ) + case class Schema( + @JsonProperty("type") _type: String, + arg: Option[String] = None, + multiple: Option[Boolean] = None, + default: Option[Any] = None, + choices: Seq[Choice] = Seq.empty, + min: Option[Any] = None, + max: Option[Any] = None + ) + case class Choice(choice: String, description: Option[String] = None, deprecated: Option[String] = None) + + def main(args: Array[String]): Unit = { + val runtimeMirror = scala.reflect.runtime.currentMirror + + val settings = new scala.tools.nsc.Settings(s => ()) + val instanceMirror = runtimeMirror.reflect(settings) + val sortedInOrderOfAppearance = runtimeMirror.classSymbol(settings.getClass).toType.members.sorted + val accessors = sortedInOrderOfAppearance.collect { + case m: MethodSymbol if m.isGetter && m.isPublic => m + } + + def mergeChoice(labels: Seq[String], descriptions: Seq[String]): Seq[Choice] = { + for { + (choice, d) <- (labels zipAll (descriptions, "", "")) + } yield { + Choice( + choice, + description = Option(d).filter(_.nonEmpty), + deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => d.toLowerCase.contains("deprecated")) + ) + } + } + + val extractedSettings : List[ScalacOption] = accessors.map(acc => instanceMirror.reflectMethod(acc).apply()).collect { + case s: settings.Setting => + val schema = s match { + case b: settings.BooleanSetting => new Schema(_type="Boolean") + case i: settings.IntSetting => new Schema(_type="Int", default = Some(i.default), min = i.range.map(_._1), max = i.range.map(_._2)) + case c: settings.ChoiceSetting => + val choices = mergeChoice(c.choices, c.choicesHelp) + new Schema(_type="Choice", arg = Some(c.helpArg), default = Option(c.default), choices = choices) + case mc: settings.MultiChoiceSetting[_] => + val choices = mergeChoice(mc.choices, mc.descriptions) + new Schema(_type="Choice", multiple = Some(true), arg = Some(mc.helpArg), choices = choices) + case ps: settings.PhasesSetting => new Schema(_type="Phases", default = Option(ps.default)) + case px: settings.PrefixSetting => new Schema(_type="Prefix") + case sv: settings.ScalaVersionSetting => new Schema(_type="ScalaVerion", arg = Some(sv.arg), default = Some(sv.initial.unparse)) + case pathStr: settings.PathSetting => new Schema(_type="Path", arg = Some(pathStr.arg), default = Some(pathStr.default)) + case str: settings.StringSetting => new Schema(_type="String", arg = Some(str.arg), default = Some(str.default)) + case ms: settings.MultiStringSetting => new Schema(_type="String", multiple = Some(true), arg = Some(ms.arg)) + } + new ScalacOption(option = s.name, schema = schema, description = s.helpDescription, + deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated"))) + } + + + val categoriezed = extractedSettings.groupBy { option => + val name = option.option + if (name.startsWith("-Xfatal-warnings") || name.startsWith("-Ywarn")) { + WarningSettings + } else if (name.startsWith("-X")) { + AdvancedSettings + } else if (name.startsWith("-Y") || name.startsWith("-opt")) { + PrivateSettings + } else if (name.startsWith("-P")) { + PluginSettings + } else if (name.startsWith("-J") || name.startsWith("-D") || name.startsWith("-nobootcp")) { + JVMSettings + } else { + StandardSettings + } + } + + val source = categoriezed.toSeq.sortBy(_._1).map { case (key, options) => + new Section(key, Some("ADD_NICE_DESCRIPTION_HERE"),options = options) + } + + val mapper = new ObjectMapper(new YAMLFactory()) + mapper + .registerModule(DefaultScalaModule) + .setSerializationInclusion(JsonInclude.Include.NON_EMPTY) + + val yaml = mapper + .writer(new DefaultPrettyPrinter()) + .writeValueAsString(source) + println(yaml) + } +} From 2c9b07073a54d1a6d17041199e5a32e3181b9677 Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 06:43:55 +0900 Subject: [PATCH 1433/2793] Add abbreviations. --- .../tools/nsc/ScalaCompilerOptionsExporter.scala | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index e506584da8ad..f4c8f24fa9d6 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -32,7 +32,7 @@ object ScalaCompilerOptionsExporter { option: String, schema: Schema, description: String, - abbreviation: Option[String] = None, + abbreviations: Seq[String] = Seq.empty, deprecated: Option[String] = None, note: Option[String] = None ) @@ -87,8 +87,14 @@ object ScalaCompilerOptionsExporter { case str: settings.StringSetting => new Schema(_type="String", arg = Some(str.arg), default = Some(str.default)) case ms: settings.MultiStringSetting => new Schema(_type="String", multiple = Some(true), arg = Some(ms.arg)) } - new ScalacOption(option = s.name, schema = schema, description = s.helpDescription, - deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated"))) + + new ScalacOption( + option = s.name, + schema = schema, + description = s.helpDescription, + abbreviations = s.abbreviations, + deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated")) + ) } From 1ec656826edae95207891cf7abfbe72e442c09a0 Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 07:01:44 +0900 Subject: [PATCH 1434/2793] Move -optimise to standard settings. --- src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index f4c8f24fa9d6..6661475a41fe 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -104,7 +104,7 @@ object ScalaCompilerOptionsExporter { WarningSettings } else if (name.startsWith("-X")) { AdvancedSettings - } else if (name.startsWith("-Y") || name.startsWith("-opt")) { + } else if (name.startsWith("-Y") || name.startsWith("-opt") && name != "-optimise") { PrivateSettings } else if (name.startsWith("-P")) { PluginSettings From 4d1fc76a43a99f988aa0f1fb1bcc652247385a0c Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 07:18:07 +0900 Subject: [PATCH 1435/2793] Move -Xlint to warning settings. --- src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index 6661475a41fe..7f8b33abc138 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -100,7 +100,7 @@ object ScalaCompilerOptionsExporter { val categoriezed = extractedSettings.groupBy { option => val name = option.option - if (name.startsWith("-Xfatal-warnings") || name.startsWith("-Ywarn")) { + if (name.startsWith("-Xfatal-warnings") || name == "-Xlint" || name.startsWith("-Ywarn")) { WarningSettings } else if (name.startsWith("-X")) { AdvancedSettings From c9c98a0c5bf5711db5b89394b60825d7620c1c7a Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 08:16:49 +0900 Subject: [PATCH 1436/2793] Disable splitting line --- .../scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index 7f8b33abc138..f0f4745ef78a 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -5,7 +5,7 @@ import collection.JavaConverters._ import com.fasterxml.jackson.annotation._ import com.fasterxml.jackson.core.util.DefaultPrettyPrinter import com.fasterxml.jackson.databind.ObjectMapper -import com.fasterxml.jackson.dataformat.yaml.YAMLFactory +import com.fasterxml.jackson.dataformat.yaml.{YAMLFactory, YAMLGenerator} import com.fasterxml.jackson.module.scala.DefaultScalaModule object ScalaCompilerOptionsExporter { @@ -119,8 +119,9 @@ object ScalaCompilerOptionsExporter { new Section(key, Some("ADD_NICE_DESCRIPTION_HERE"),options = options) } - val mapper = new ObjectMapper(new YAMLFactory()) - mapper + val yamlFactory = new YAMLFactory() + .disable(YAMLGenerator.Feature.SPLIT_LINES) + val mapper = new ObjectMapper(yamlFactory) .registerModule(DefaultScalaModule) .setSerializationInclusion(JsonInclude.Include.NON_EMPTY) From a11bcbd2006b61cae96de20cc300e5a5662b4bde Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 08:34:58 +0900 Subject: [PATCH 1437/2793] Rewrite `_' to `_` so that it can be highlighted in markdown --- .../scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index f0f4745ef78a..ea413397290b 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -47,6 +47,12 @@ object ScalaCompilerOptionsExporter { ) case class Choice(choice: String, description: Option[String] = None, deprecated: Option[String] = None) + private val quoted = """`([^`']+)'""".r + + def markdownifyBackquote(string: String) : String = { + quoted.replaceAllIn(string, "`$1`") + } + def main(args: Array[String]): Unit = { val runtimeMirror = scala.reflect.runtime.currentMirror @@ -63,7 +69,7 @@ object ScalaCompilerOptionsExporter { } yield { Choice( choice, - description = Option(d).filter(_.nonEmpty), + description = Option(d).map(markdownifyBackquote).filter(_.nonEmpty), deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => d.toLowerCase.contains("deprecated")) ) } @@ -91,7 +97,7 @@ object ScalaCompilerOptionsExporter { new ScalacOption( option = s.name, schema = schema, - description = s.helpDescription, + description = markdownifyBackquote(s.helpDescription), abbreviations = s.abbreviations, deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated")) ) From 319d55674b414bb91f3fa32006d51ec42d252184 Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 08:45:55 +0900 Subject: [PATCH 1438/2793] Rewrite <_> to _. Without <>, it easier to be handled in HTML document --- .../nsc/ScalaCompilerOptionsExporter.scala | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index ea413397290b..c7939314cdfd 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -53,6 +53,11 @@ object ScalaCompilerOptionsExporter { quoted.replaceAllIn(string, "`$1`") } + private val htmlTag = """<([^>]+)>""".r + def dehtmlfy(string: String) : String = { + htmlTag.replaceAllIn(string, "$1") + } + def main(args: Array[String]): Unit = { val runtimeMirror = scala.reflect.runtime.currentMirror @@ -69,7 +74,7 @@ object ScalaCompilerOptionsExporter { } yield { Choice( choice, - description = Option(d).map(markdownifyBackquote).filter(_.nonEmpty), + description = Option(d).map(markdownifyBackquote).map(dehtmlfy).filter(_.nonEmpty), deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => d.toLowerCase.contains("deprecated")) ) } @@ -82,22 +87,22 @@ object ScalaCompilerOptionsExporter { case i: settings.IntSetting => new Schema(_type="Int", default = Some(i.default), min = i.range.map(_._1), max = i.range.map(_._2)) case c: settings.ChoiceSetting => val choices = mergeChoice(c.choices, c.choicesHelp) - new Schema(_type="Choice", arg = Some(c.helpArg), default = Option(c.default), choices = choices) + new Schema(_type="Choice", arg = Some(c.helpArg).map(dehtmlfy), default = Option(c.default), choices = choices) case mc: settings.MultiChoiceSetting[_] => val choices = mergeChoice(mc.choices, mc.descriptions) - new Schema(_type="Choice", multiple = Some(true), arg = Some(mc.helpArg), choices = choices) + new Schema(_type="Choice", multiple = Some(true), arg = Some(mc.helpArg).map(dehtmlfy), choices = choices) case ps: settings.PhasesSetting => new Schema(_type="Phases", default = Option(ps.default)) case px: settings.PrefixSetting => new Schema(_type="Prefix") - case sv: settings.ScalaVersionSetting => new Schema(_type="ScalaVerion", arg = Some(sv.arg), default = Some(sv.initial.unparse)) + case sv: settings.ScalaVersionSetting => new Schema(_type="ScalaVerion", arg = Some(sv.arg).map(dehtmlfy), default = Some(sv.initial.unparse)) case pathStr: settings.PathSetting => new Schema(_type="Path", arg = Some(pathStr.arg), default = Some(pathStr.default)) - case str: settings.StringSetting => new Schema(_type="String", arg = Some(str.arg), default = Some(str.default)) - case ms: settings.MultiStringSetting => new Schema(_type="String", multiple = Some(true), arg = Some(ms.arg)) + case str: settings.StringSetting => new Schema(_type="String", arg = Some(str.arg).map(dehtmlfy), default = Some(str.default)) + case ms: settings.MultiStringSetting => new Schema(_type="String", multiple = Some(true), arg = Some(ms.arg).map(dehtmlfy)) } new ScalacOption( option = s.name, schema = schema, - description = markdownifyBackquote(s.helpDescription), + description = dehtmlfy(markdownifyBackquote(s.helpDescription)), abbreviations = s.abbreviations, deprecated = Some("EXPLAIN_ALTERNATIVE").filter(_ => s.helpDescription.toLowerCase.contains("deprecated")) ) From ac97f0120b3f9acd1ef40d2409c43f1bfd63592c Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 08:54:15 +0900 Subject: [PATCH 1439/2793] Refactor. --- .../nsc/ScalaCompilerOptionsExporter.scala | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index c7939314cdfd..24ca966ca5aa 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -83,23 +83,31 @@ object ScalaCompilerOptionsExporter { val extractedSettings : List[ScalacOption] = accessors.map(acc => instanceMirror.reflectMethod(acc).apply()).collect { case s: settings.Setting => val schema = s match { - case b: settings.BooleanSetting => new Schema(_type="Boolean") - case i: settings.IntSetting => new Schema(_type="Int", default = Some(i.default), min = i.range.map(_._1), max = i.range.map(_._2)) + case b: settings.BooleanSetting => + Schema(_type = "Boolean") + case i: settings.IntSetting => + Schema(_type="Int", default = Some(i.default), min = i.range.map(_._1), max = i.range.map(_._2)) case c: settings.ChoiceSetting => val choices = mergeChoice(c.choices, c.choicesHelp) - new Schema(_type="Choice", arg = Some(c.helpArg).map(dehtmlfy), default = Option(c.default), choices = choices) + Schema(_type="Choice", arg = Some(c.helpArg).map(dehtmlfy), default = Option(c.default), choices = choices) case mc: settings.MultiChoiceSetting[_] => val choices = mergeChoice(mc.choices, mc.descriptions) - new Schema(_type="Choice", multiple = Some(true), arg = Some(mc.helpArg).map(dehtmlfy), choices = choices) - case ps: settings.PhasesSetting => new Schema(_type="Phases", default = Option(ps.default)) - case px: settings.PrefixSetting => new Schema(_type="Prefix") - case sv: settings.ScalaVersionSetting => new Schema(_type="ScalaVerion", arg = Some(sv.arg).map(dehtmlfy), default = Some(sv.initial.unparse)) - case pathStr: settings.PathSetting => new Schema(_type="Path", arg = Some(pathStr.arg), default = Some(pathStr.default)) - case str: settings.StringSetting => new Schema(_type="String", arg = Some(str.arg).map(dehtmlfy), default = Some(str.default)) - case ms: settings.MultiStringSetting => new Schema(_type="String", multiple = Some(true), arg = Some(ms.arg).map(dehtmlfy)) + Schema(_type="Choice", multiple = Some(true), arg = Some(mc.helpArg).map(dehtmlfy), choices = choices) + case ps: settings.PhasesSetting => + Schema(_type="Phases", default = Option(ps.default)) + case px: settings.PrefixSetting => + Schema(_type="Prefix") + case sv: settings.ScalaVersionSetting => + Schema(_type="ScalaVersion", arg = Some(sv.arg).map(dehtmlfy), default = Some(sv.initial.unparse)) + case pathStr: settings.PathSetting => + Schema(_type="Path", arg = Some(pathStr.arg), default = Some(pathStr.default)) + case str: settings.StringSetting => + Schema(_type="String", arg = Some(str.arg).map(dehtmlfy), default = Some(str.default)) + case ms: settings.MultiStringSetting => + Schema(_type="String", multiple = Some(true), arg = Some(ms.arg).map(dehtmlfy)) } - new ScalacOption( + ScalacOption( option = s.name, schema = schema, description = dehtmlfy(markdownifyBackquote(s.helpDescription)), @@ -127,7 +135,7 @@ object ScalaCompilerOptionsExporter { } val source = categoriezed.toSeq.sortBy(_._1).map { case (key, options) => - new Section(key, Some("ADD_NICE_DESCRIPTION_HERE"),options = options) + Section(key, Some("ADD_NICE_DESCRIPTION_HERE"),options = options) } val yamlFactory = new YAMLFactory() From 49315616005f6bfc3abefb0bfd3b0c542a19a53b Mon Sep 17 00:00:00 2001 From: exoego Date: Thu, 3 May 2018 19:33:58 +0900 Subject: [PATCH 1440/2793] Separate IDE specific settings that most developer do not need to care. --- .../scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index 24ca966ca5aa..e6199456d065 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -19,6 +19,7 @@ object ScalaCompilerOptionsExporter { val AdvancedSettings = Category("Advanced Settings", 3) val PrivateSettings = Category("Private Settings", 4) val WarningSettings = Category("Warning Settings", 5) + val IDESpecificSettings = Category("IDE Specific Settings", 6) trait JacksonWorkaround { val category: String @@ -121,6 +122,8 @@ object ScalaCompilerOptionsExporter { val name = option.option if (name.startsWith("-Xfatal-warnings") || name == "-Xlint" || name.startsWith("-Ywarn")) { WarningSettings + } else if (name.startsWith("-Ypresentation")) { + IDESpecificSettings } else if (name.startsWith("-X")) { AdvancedSettings } else if (name.startsWith("-Y") || name.startsWith("-opt") && name != "-optimise") { From 3e00af4525fc1e1ddf8747b792fe99950a72bde3 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Fri, 22 Jun 2018 20:18:15 -0400 Subject: [PATCH 1441/2793] Support @Repeatable Java annotations. Currently, duplicate classfile annotations cause a runtime crash when the JVM sees them (due to a call to `getAnnotations` or the like). Do instead exactly what Java does (since JEP-120): if the annotation type is (meta-)annotated with `@Repeatable`, wrap the annotations in an array and annotate the original element with a new annotation of the type given by `Repeatable#value`. It is now an error to have multiple annotations on the same tree with the same `typeSymbol` if the symbol is a classfile annotation. Fixes scala/bug#9529. --- .../tools/nsc/typechecker/RefChecks.scala | 61 ++++++++++++++----- .../mima-filters/2.12.0.forwards.excludes | 1 + .../reflect/internal/AnnotationInfos.scala | 3 + .../scala/reflect/internal/Definitions.scala | 3 +- .../scala/reflect/runtime/JavaMirrors.scala | 2 + .../reflect/runtime/JavaUniverseForce.scala | 1 + test/files/neg/t9529.check | 4 ++ test/files/neg/t9529.scala | 7 +++ test/files/pos/annotations.scala | 6 +- test/files/pos/attributes.scala | 2 - test/files/run/reify_ann1b.check | 31 ++++++---- test/files/run/reify_ann1b.scala | 11 ++-- test/files/run/t9529-types.check | 15 +++++ test/files/run/t9529-types/Test_1.scala | 29 +++++++++ test/files/run/t9529-types/TypeAnn_0.java | 16 +++++ test/files/run/t9529.check | 16 +++++ test/files/run/t9529/Ann_0.java | 15 +++++ test/files/run/t9529/Test_1.scala | 59 ++++++++++++++++++ 18 files changed, 243 insertions(+), 39 deletions(-) create mode 100644 test/files/neg/t9529.check create mode 100644 test/files/neg/t9529.scala create mode 100644 test/files/run/t9529-types.check create mode 100644 test/files/run/t9529-types/Test_1.scala create mode 100644 test/files/run/t9529-types/TypeAnn_0.java create mode 100644 test/files/run/t9529.check create mode 100644 test/files/run/t9529/Ann_0.java create mode 100644 test/files/run/t9529/Test_1.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index a40bf3be9c92..d4360308fa34 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1405,18 +1405,46 @@ abstract class RefChecks extends Transform { } } - private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach { tp => - checkTypeRef(tp, tree, skipBounds = false) - checkTypeRefBounds(tp, tree) - } - private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f - private def applyRefchecksToAnnotations(tree: Tree): Unit = { - def applyChecks(annots: List[AnnotationInfo]) = { - checkAnnotations(annots map (_.atp), tree) - transformTrees(annots flatMap (_.args)) + def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = { + annots.foreach { ann => + checkTypeRef(ann.tpe, tree, skipBounds = false) + checkTypeRefBounds(ann.tpe, tree) + } + + annots + .map(_.transformArgs(transformTrees)) + .groupBy(_.symbol) + .flatMap((groupRepeatableAnnotations _).tupled) + .toList } + // assumes non-empty `anns` + def groupRepeatableAnnotations(sym: Symbol, anns: List[AnnotationInfo]): List[AnnotationInfo] = + if (!(sym isSubClass ClassfileAnnotationClass)) anns else anns match { + case single :: Nil => anns + case multiple => + sym.getAnnotation(AnnotationRepeatableAttr) match { + case Some(repeatable) => + repeatable.assocs.collectFirst { + case (nme.value, LiteralAnnotArg(Constant(c: Type))) => c + } match { + case Some(container) => + val assocs = List( + nme.value -> ArrayAnnotArg(multiple.map(NestedAnnotArg(_)).toArray) + ) + AnnotationInfo(container, args = Nil, assocs = assocs) :: Nil + case None => + devWarning(s"@Repeatable $sym had no containing class") + multiple + } + + case None => + reporter.error(tree.pos, s"$sym may not appear multiple times on ${tree.symbol}") + multiple + } + } + def checkIsElisible(sym: Symbol) = if (sym ne null) sym.elisionLevel.foreach { level => if (!sym.isMethod || sym.isAccessor || sym.isLazy || sym.isDeferred) reporter.error(sym.pos, s"${sym.name}: Only methods can be marked @elidable.") @@ -1426,7 +1454,7 @@ abstract class RefChecks extends Transform { tree match { case m: MemberDef => val sym = m.symbol - applyChecks(sym.annotations) + sym.setAnnotations(applyChecks(sym.annotations)) def messageWarning(name: String)(warn: String) = reporter.warning(tree.pos, f"Invalid $name message for ${sym}%s${sym.locationString}%s:%n$warn") @@ -1444,11 +1472,12 @@ abstract class RefChecks extends Transform { } } - doTypeTraversal(tree) { - case tp @ AnnotatedType(annots, _) => - applyChecks(annots) - case tp => - } + if (!inPattern) + tree.setType(tree.tpe map { + case AnnotatedType(anns, ul) => + AnnotatedType(applyChecks(anns), ul) + case tp => tp + }) case _ => } } @@ -1713,7 +1742,7 @@ abstract class RefChecks extends Transform { var skipBounds = false // check all bounds, except those that are existential type parameters // or those within typed annotated with @uncheckedBounds - doTypeTraversal(tree) { + if (!inPattern) tree.tpe foreach { case tp @ ExistentialType(tparams, tpe) => existentialParams ++= tparams case ann: AnnotatedType if ann.hasAnnotation(UncheckedBoundsClass) => diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index fcac3f3749b7..d39f24039a81 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -9,6 +9,7 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.PlainNioFile") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.SynchronizedOps.newMappedBaseTypeSeq") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.JavaUniverse.newMappedBaseTypeSeq") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.JavaUniverse.statistics") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.JavaMirrors#JavaMirror#JavaAnnotationProxy.transformArgs") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.FileZipArchive$LazyEntry") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.closeZipFile") diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 54764268c7e2..411d6e01382f 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -338,6 +338,9 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => def argAtIndex(index: Int): Option[Tree] = if (index < args.size) Some(args(index)) else None + def transformArgs(f: List[Tree] => List[Tree]): AnnotationInfo = + new CompleteAnnotationInfo(atp, f(args), assocs) + override def hashCode = atp.## + args.## + assocs.## override def equals(other: Any) = other match { case x: AnnotationInfo => (atp == x.atp) && (args == x.args) && (assocs == x.assocs) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 20c330a56f2f..cdb2ab744936 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1118,9 +1118,10 @@ trait Definitions extends api.StandardDefinitions { lazy val ClassfileAnnotationClass = requiredClass[scala.annotation.ClassfileAnnotation] lazy val StaticAnnotationClass = requiredClass[scala.annotation.StaticAnnotation] - // Java retention annotations + // Java annotation annotations lazy val AnnotationRetentionAttr = requiredClass[java.lang.annotation.Retention] lazy val AnnotationRetentionPolicyAttr = requiredClass[java.lang.annotation.RetentionPolicy] + lazy val AnnotationRepeatableAttr = requiredClass[java.lang.annotation.Repeatable] // Annotations lazy val BridgeClass = requiredClass[scala.annotation.bridge] diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 7d0ef7ba3c31..436d652a62c6 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -176,6 +176,8 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive TermName(m.getName) -> toAnnotArg(m.getReturnType -> m.invoke(jann)) ) ) + + override def transformArgs(f: List[Tree] => List[Tree]) = this } def reflect[T: ClassTag](obj: T): InstanceMirror = new JavaInstanceMirror(obj) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 2926bd4d6946..0d72cbd6f607 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -384,6 +384,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.StaticAnnotationClass definitions.AnnotationRetentionAttr definitions.AnnotationRetentionPolicyAttr + definitions.AnnotationRepeatableAttr definitions.BridgeClass definitions.ElidableMethodClass definitions.ImplicitNotFoundClass diff --git a/test/files/neg/t9529.check b/test/files/neg/t9529.check new file mode 100644 index 000000000000..c1d30b7a1e70 --- /dev/null +++ b/test/files/neg/t9529.check @@ -0,0 +1,4 @@ +t9529.scala:7: error: Java annotation Resource may not appear multiple times on class TooMany +class TooMany + ^ +one error found diff --git a/test/files/neg/t9529.scala b/test/files/neg/t9529.scala new file mode 100644 index 000000000000..0be2254ae805 --- /dev/null +++ b/test/files/neg/t9529.scala @@ -0,0 +1,7 @@ +@deprecated("foo", "") +@deprecated("bar", "") +class `scala ftw` + +@javax.annotation.Resource(name = "baz") +@javax.annotation.Resource(name = "quux") +class TooMany \ No newline at end of file diff --git a/test/files/pos/annotations.scala b/test/files/pos/annotations.scala index 4832ce4ecdf7..8ab994dacef3 100644 --- a/test/files/pos/annotations.scala +++ b/test/files/pos/annotations.scala @@ -103,8 +103,10 @@ object Test3 { class Test4 { @Ann3(arr = Array("dlkfj", "DSF")) @Ann4(i = 2908) - @Ann4(i = Test3.i) @Ann5(value = classOf[Int]) - @Ann5(Test3.cls) def foo {} + + @Ann4(i = Test3.i) + @Ann5(Test3.cls) + def bar {} } diff --git a/test/files/pos/attributes.scala b/test/files/pos/attributes.scala index 60e00bff7d7c..c48c26d7a0de 100644 --- a/test/files/pos/attributes.scala +++ b/test/files/pos/attributes.scala @@ -19,8 +19,6 @@ object O5 { final val n = 2; @SerialVersionUID(0) class C1; @SerialVersionUID(n) class C2; - @SerialVersionUID(0) @SerialVersionUID(n) class C3; - @SerialVersionUID(0) @SerialVersionUID(n) class C4; } abstract class A1 { diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check index a046dafeab0d..92db7046146e 100644 --- a/test/files/run/reify_ann1b.check +++ b/test/files/run/reify_ann1b.check @@ -1,33 +1,38 @@ reify_ann1b.scala:6: warning: Implementation restriction: subclassing Classfile does not make your annotation visible at runtime. If that is what you want, you must write the annotation class in Java. -class ann(bar: String) extends annotation.ClassfileAnnotation +class ann0(bar: String) extends annotation.ClassfileAnnotation + ^ +reify_ann1b.scala:7: warning: Implementation restriction: subclassing Classfile does not +make your annotation visible at runtime. If that is what +you want, you must write the annotation class in Java. +class ann1(bar: String) extends annotation.ClassfileAnnotation ^ { - @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T] extends AnyRef { - @new ann(bar = "3a") @new ann(bar = "3b") private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _; - def (@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = { + @new ann0(bar = "1a") @new ann1(bar = "1b") class C[@new ann0(bar = "2a") @new ann1(bar = "2b") T] extends AnyRef { + @new ann0(bar = "3a") @new ann1(bar = "3b") private[this] val x: T @ann0(bar = "4a") @ann1(bar = "4b") = _; + def (@new ann0(bar = "3a") @new ann1(bar = "3b") x: T @ann0(bar = "4a") @ann1(bar = "4b")) = { super.(); () }; - @new ann(bar = "5a") @new ann(bar = "5b") def f(x: Int @ann(bar = "6a") @ann(bar = "6b")) = { - @new ann(bar = "7a") @new ann(bar = "7b") val r = x.$plus(3): @ann(bar = "8a"): @ann(bar = "8b"); - val s = (4: Int @ann(bar = "9a") @ann(bar = "9b")); + @new ann0(bar = "5a") @new ann1(bar = "5b") def f(x: Int @ann0(bar = "6a") @ann1(bar = "6b")) = { + @new ann0(bar = "7a") @new ann1(bar = "7b") val r = x.$plus(3): @ann0(bar = "8a"): @ann1(bar = "8b"); + val s = (4: Int @ann0(bar = "9a") @ann1(bar = "9b")); r.$plus(s) } }; () } { - @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends AnyRef { - @ann(bar = "3a") @ann(bar = "3b") private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _; - def (@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = { + @ann0(bar = "1a") @ann1(bar = "1b") class C[@ann0(bar = "2a") @ann1(bar = "2b") T] extends AnyRef { + @ann0(bar = "3a") @ann1(bar = "3b") private[this] val x: T @ann1(bar = "4b") @ann0(bar = "4a") = _; + def (@ann0(bar = "3a") @ann1(bar = "3b") x: T @ann1(bar = "4b") @ann0(bar = "4a")): C[T] = { C.super.(); () }; - @ann(bar = "5a") @ann(bar = "5b") def f(x: Int @ann(bar = "6b") @ann(bar = "6a")): Int = { - @ann(bar = "7a") @ann(bar = "7b") val r: Int @ann(bar = "8b") @ann(bar = "8a") = ((x.+(3): Int @ann(bar = "8a")): Int @ann(bar = "8b") @ann(bar = "8a")); - val s: Int @ann(bar = "9b") @ann(bar = "9a") = (4: Int @ann(bar = "9b") @ann(bar = "9a")); + @ann0(bar = "5a") @ann1(bar = "5b") def f(x: Int @ann1(bar = "6b") @ann0(bar = "6a")): Int = { + @ann0(bar = "7a") @ann1(bar = "7b") val r: Int @ann1(bar = "8b") @ann0(bar = "8a") = ((x.+(3): Int @ann0(bar = "8a")): Int @ann1(bar = "8b") @ann0(bar = "8a")); + val s: Int @ann1(bar = "9b") @ann0(bar = "9a") = (4: Int @ann1(bar = "9b") @ann0(bar = "9a")); r.+(s) } }; diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala index 30bafadf75a3..f1710d6fc4c5 100644 --- a/test/files/run/reify_ann1b.scala +++ b/test/files/run/reify_ann1b.scala @@ -3,15 +3,16 @@ import scala.reflect.runtime.{universe => ru} import scala.reflect.runtime.{currentMirror => cm} import scala.tools.reflect.ToolBox -class ann(bar: String) extends annotation.ClassfileAnnotation +class ann0(bar: String) extends annotation.ClassfileAnnotation +class ann1(bar: String) extends annotation.ClassfileAnnotation object Test extends App { // test 1: reify val tree = reify{ - @ann(bar="1a") @ann(bar="1b") class C[@ann(bar="2a") @ann(bar="2b") T](@ann(bar="3a") @ann(bar="3b") x: T @ann(bar="4a") @ann(bar="4b")) { - @ann(bar="5a") @ann(bar="5b") def f(x: Int @ann(bar="6a") @ann(bar="6b")) = { - @ann(bar="7a") @ann(bar="7b") val r = (x + 3): @ann(bar="8a") @ann(bar="8b") - val s = 4: Int @ann(bar="9a") @ann(bar="9b") + @ann0(bar="1a") @ann1(bar="1b") class C[@ann0(bar="2a") @ann1(bar="2b") T](@ann0(bar="3a") @ann1(bar="3b") x: T @ann0(bar="4a") @ann1(bar="4b")) { + @ann0(bar="5a") @ann1(bar="5b") def f(x: Int @ann0(bar="6a") @ann1(bar="6b")) = { + @ann0(bar="7a") @ann1(bar="7b") val r = (x + 3): @ann0(bar="8a") @ann1(bar="8b") + val s = 4: Int @ann0(bar="9a") @ann1(bar="9b") r + s } } diff --git a/test/files/run/t9529-types.check b/test/files/run/t9529-types.check new file mode 100644 index 000000000000..44fa1f050ae3 --- /dev/null +++ b/test/files/run/t9529-types.check @@ -0,0 +1,15 @@ +[[syntax trees at end of pickler]] // newSource1.scala +package { + import anns._; + abstract trait Foo extends AnyRef with Int @anns.TypeAnn_0(value = "b") @anns.TypeAnn_0(value = "a") => String @anns.TypeAnn_0(value = "y") @anns.TypeAnn_0(value = "x") { + type Meh = Any @anns.TypeAnn_0(value = "q") @anns.TypeAnn_0(value = "p") + } +} + +[[syntax trees at end of refchecks]] // newSource1.scala +package { + abstract trait Foo extends AnyRef with Int @anns.TypeAnn_0.Anns(value = [anns.TypeAnn_0(value = "b"), anns.TypeAnn_0(value = "a")]) => String @anns.TypeAnn_0.Anns(value = [anns.TypeAnn_0(value = "y"), anns.TypeAnn_0(value = "x")]) { + type Meh = Any @anns.TypeAnn_0.Anns(value = [anns.TypeAnn_0(value = "q"), anns.TypeAnn_0(value = "p")]) + } +} + diff --git a/test/files/run/t9529-types/Test_1.scala b/test/files/run/t9529-types/Test_1.scala new file mode 100644 index 000000000000..956c6b960196 --- /dev/null +++ b/test/files/run/t9529-types/Test_1.scala @@ -0,0 +1,29 @@ +/* evidently annotations on types don't make it into bytecode yet, even though + * such a thing is allowed in Java 8 and onwards. Here's a test that it'll work + * with repeatable annotations anyways. + * + * nb. currently multiple annotations on type trees get reversed by typer + */ + +import scala.tools.partest._ + +import anns._ + +@TypeAnn_0("") +object Test extends DirectTest { + + override def extraSettings: String = + s"-usejavacp -cp ${testOutput.path} -Xprint:pic,ref -Ystop-after:ref -d ${testOutput.path}" + + override def code = + """import anns._ + |trait Foo extends ( + | (Int @TypeAnn_0("a") @TypeAnn_0("b")) + | => (String @TypeAnn_0("x") @TypeAnn_0("y")) + |) { + | type Meh = Any@TypeAnn_0("p")@TypeAnn_0("q") + |} + """.stripMargin + + override def show() = compile() +} diff --git a/test/files/run/t9529-types/TypeAnn_0.java b/test/files/run/t9529-types/TypeAnn_0.java new file mode 100644 index 000000000000..dbede53ba5c8 --- /dev/null +++ b/test/files/run/t9529-types/TypeAnn_0.java @@ -0,0 +1,16 @@ +package anns; + +import java.lang.annotation.*; + +@Repeatable(TypeAnn_0.Anns.class) +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.TYPE_USE) +public @interface TypeAnn_0 { + String value(); + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.TYPE_USE) + public static @interface Anns { + TypeAnn_0[] value(); + } +} \ No newline at end of file diff --git a/test/files/run/t9529.check b/test/files/run/t9529.check new file mode 100644 index 000000000000..7f567720ba6d --- /dev/null +++ b/test/files/run/t9529.check @@ -0,0 +1,16 @@ +A: List() +B: List(@javax.annotation.Resource(shareable=true, lookup=, name=B, description=, authenticationType=CONTAINER, type=class java.lang.Object, mappedName=)) +C: List(@anns.Ann_0(name=C, value=see)) +D: List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=D, value=dee), @anns.Ann_0(name=D, value=dye)])) + +x: List(@anns.Ann_0(name=x, value=eks)) +y: List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=y, value=why), @anns.Ann_0(name=y, value=wye)])) + +t: List(@anns.Ann_0(name=t, value=tee)) +u: List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=u, value=you), @anns.Ann_0(name=u, value=yew)])) + +1: List(@anns.Ann_0(name=1, value=one)) +2: List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=2, value=two), @anns.Ann_0(name=2, value=tew)])) + +List(@anns.Ann_0$Container(value=[@anns.Ann_0(name=, value=constructor), @anns.Ann_0(name=, value=initializer)])) + diff --git a/test/files/run/t9529/Ann_0.java b/test/files/run/t9529/Ann_0.java new file mode 100644 index 000000000000..bc5e9b0dea9e --- /dev/null +++ b/test/files/run/t9529/Ann_0.java @@ -0,0 +1,15 @@ +package anns; + +import java.lang.annotation.*; + +@Retention(RetentionPolicy.RUNTIME) +@Repeatable(Ann_0.Container.class) +public @interface Ann_0 { + String name(); + String value(); + + @Retention(RetentionPolicy.RUNTIME) + public static @interface Container { + public Ann_0[] value() default {}; + } +} \ No newline at end of file diff --git a/test/files/run/t9529/Test_1.scala b/test/files/run/t9529/Test_1.scala new file mode 100644 index 000000000000..d4efcddeb079 --- /dev/null +++ b/test/files/run/t9529/Test_1.scala @@ -0,0 +1,59 @@ +import java.lang.reflect._ +import anns._ + +class A +@javax.annotation.Resource(name = "B") class B +@Ann_0(name = "C", value = "see") class C +@Ann_0(name = "D", value = "dee") @Ann_0(name = "D", value = "dye") class D + +class Test @Ann_0(name = "", value = "constructor") @Ann_0(name = "", value = "initializer") () { + @Ann_0(name = "x", value = "eks") val x = 1 + @Ann_0(name = "y", value = "why") @Ann_0(name = "y", value = "wye") val y = 2 + + @Ann_0(name = "t", value = "tee") def t = 1 + @Ann_0(name = "u", value = "you") @Ann_0(name = "u", value = "yew") def u = 2 + + def meh( + @Ann_0(name = "1", value = "one") `1`: Int, + @Ann_0(name = "2", value = "two") @Ann_0(name = "2", value = "tew") `2`: Int, + ) = () + + // todo: annotations on types + // todo? annotaitons on packages +} + +object Test extends App { + val cls_test = classOf[Test] + + prints { + List(classOf[A], classOf[B], classOf[C], classOf[D]) + .map(cls => s"${cls.getName}: ${anns(cls)}") + } + + prints { + List("x", "y") + .map(cls_test.getDeclaredField) + .map(f => s"${f.getName}: ${anns(f)}") + } + + prints { + List("t", "u") + .map(cls_test.getDeclaredMethod(_)) + .map(m => s"${m.getName}: ${anns(m)}") + } + + prints { + cls_test + .getDeclaredMethod("meh", classOf[Int], classOf[Int]) + .getParameters.toList + .map(p => s"${p.getName}: ${anns(p)}") + } + + println { + anns(cls_test.getConstructor()).map(_.toString) + } ; println() + + def anns(ae: AnnotatedElement) = + ae.getAnnotations.toList.filterNot(_.isInstanceOf[reflect.ScalaSignature]) + def prints(l: List[String]) = { println(l mkString "\n") ; println() } +} \ No newline at end of file From 6dda47dcec419e7cf8156dbec665e3b2847be174 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Thu, 28 Jun 2018 06:06:11 -0700 Subject: [PATCH 1442/2793] [nomerge] Backport REPL big print fix --- src/repl/scala/tools/nsc/interpreter/IMain.scala | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 060a6044defe..fbc6e137d0c1 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -925,18 +925,30 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends | %s | lazy val %s: _root_.java.lang.String = %s { | %s - | ("" """.stripMargin.format( lineRep.evalName, evalResult, lineRep.printName, executionWrapper, fullAccessPath ) val postamble = """ - | ) | } |} """.stripMargin val generate = (m: MemberHandler) => m resultExtractionCode Request.this + + override def apply(contributors: List[MemberHandler]): String = stringFromWriter { code => + code println preamble + if (contributors.lengthCompare(1) > 0) { + code.println("val sb = new _root_.scala.StringBuilder") + contributors foreach (x => code.println(s"""sb.append("" ${generate(x)})""")) + code.println("sb.toString") + } else { + code.print(""""" """) // start with empty string + contributors foreach (x => code.print(generate(x))) + code.println() + } + code println postamble + } } /** Compile the object file. Returns whether the compilation succeeded. From 37d0ca05b9cbe89d9f51071c635fb5f7d0d8a0b2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 27 Jun 2018 15:08:20 +1000 Subject: [PATCH 1443/2793] Unit test for range position validation --- .../reflect/internal/PositionsTest.scala | 57 +++++++++++++++++++ .../symtab/SymbolTableForUnitTesting.scala | 4 +- 2 files changed, 59 insertions(+), 2 deletions(-) create mode 100644 test/junit/scala/reflect/internal/PositionsTest.scala diff --git a/test/junit/scala/reflect/internal/PositionsTest.scala b/test/junit/scala/reflect/internal/PositionsTest.scala new file mode 100644 index 000000000000..348c63fecd2f --- /dev/null +++ b/test/junit/scala/reflect/internal/PositionsTest.scala @@ -0,0 +1,57 @@ +package scala.reflect.internal + +import org.junit.Test + +import scala.reflect.internal.util.NoSourceFile +import scala.tools.nsc.reporters.StoreReporter +import scala.tools.nsc.symtab.SymbolTableForUnitTesting +import scala.tools.testing.AssertUtil + +class PositionsTest { + + private object symbolTable extends SymbolTableForUnitTesting { + override def useOffsetPositions: Boolean = false + override val reporter = new StoreReporter + } + + @Test def positionValidation(): Unit = { + import symbolTable._ + def checkInvalid(tree: Tree): Unit = { + reporter.reset() + AssertUtil.assertThrows[ValidateException](validatePositions(tree)) + } + + def checkValid(tree: Tree): Unit = { + reporter.reset() + validatePositions(tree) + assert(!reporter.hasErrors) + } + def rangePos(start: Int, end: Int): util.Position = util.Position.range(NoSourceFile, start, start, end) + def offsetPos(point: Int): util.Position = util.Position.offset(NoSourceFile, point) + def tree: Tree = Ident(TermName("x")) + def rangePositioned(start: Int, end: Int): Tree = { + Ident(TermName("x")).setPos(rangePos(start, end)) + } + // overlapping ranges + checkInvalid(Block(rangePositioned(0, 2), rangePositioned(1, 2), EmptyTree).setPos(rangePos(0, 2))) + checkInvalid(Block(rangePositioned(1, 2), rangePositioned(0, 2), EmptyTree).setPos(rangePos(0, 2))) + + // transparent position not deemed to overlap itself + checkValid(Block(rangePositioned(0, 2), tree.setPos(rangePos(1, 2).makeTransparent), EmptyTree).setPos(rangePos(0, 2))) + + // children of transparent position overlapping with sibling of transparent position. + checkInvalid(Block(rangePositioned(0, 2), Block(Nil, rangePositioned(1, 2)).setPos(rangePos(1, 2).makeTransparent), EmptyTree).setPos(rangePos(0, 2))) + + // adjacent ranges are allowed to touch + checkValid(Block(rangePositioned(0, 1), rangePositioned(1, 2), EmptyTree).setPos(rangePos(0, 2))) + + // offset position between overlapping ranges + checkInvalid(Block(rangePositioned(0, 2), tree.setPos(offsetPos(0)), rangePositioned(1, 2), EmptyTree).setPos(rangePos(0, 2))) + + // child range position larger than parent + checkInvalid(Block(Nil, rangePositioned(0, 3)).setPos(rangePos(0, 2))) + + // child offset position outside of parent + checkInvalid(Block(Nil, tree.setPos(offsetPos(3)).setPos(rangePos(0, 2)))) + } +} diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index 7e2028eefb39..cbd5634f292f 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -2,7 +2,7 @@ package scala.tools.nsc package symtab import scala.reflect.ClassTag -import scala.reflect.internal.{NoPhase, Phase, SomePhase} +import scala.reflect.internal.{NoPhase, Phase, Reporter, SomePhase} import scala.reflect.internal.util.Statistics import scala.tools.util.PathResolver import util.ClassPath @@ -81,7 +81,7 @@ class SymbolTableForUnitTesting extends SymbolTable { def erasurePhase: scala.reflect.internal.Phase = SomePhase // Members declared in scala.reflect.internal.Reporting - def reporter = new scala.reflect.internal.ReporterImpl { + def reporter: Reporter = new scala.reflect.internal.ReporterImpl { protected def info0(pos: Position, msg: String, severity: Severity, force: Boolean): Unit = println(msg) } From 49bb79f3f2e03a7ad2abf41378f714ada11cc423 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Tue, 19 Jun 2018 08:20:18 +0100 Subject: [PATCH 1444/2793] Optimise rangepos checking use traverser rather than external iteration reuse temporary results where possible --- .../tools/nsc/interactive/ContextTrees.scala | 12 +- .../collection/mutable/ArrayBuilder.scala | 6 +- .../scala/reflect/internal/Positions.scala | 274 ++++++++++-------- .../scala/reflect/internal/Trees.scala | 17 +- .../reflect/runtime/JavaUniverseForce.scala | 1 - 5 files changed, 183 insertions(+), 127 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala index 975761bb8772..2d513f7e429c 100644 --- a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala +++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala @@ -108,10 +108,14 @@ trait ContextTrees { self: Global => */ def addContext(contexts: Contexts, context: Context): Unit = { val cpos = context.tree.pos - if (cpos.isTransparent) - for (t <- context.tree.children flatMap solidDescendants) - addContext(contexts, context, t.pos) - else + if (cpos.isTransparent) { + val traverser = new ChildSolidDescendantsCollector() { + override def traverseSolidChild(t: Tree): Unit = { + addContext(contexts, context, t.pos) + } + } + traverser.apply(context.tree) + } else addContext(contexts, context, cpos) } diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala index d023110c1b42..10c1c94f7054 100644 --- a/src/library/scala/collection/mutable/ArrayBuilder.scala +++ b/src/library/scala/collection/mutable/ArrayBuilder.scala @@ -60,9 +60,9 @@ object ArrayBuilder { private var size: Int = 0 private def mkArray(size: Int): Array[T] = { - val newelems = new Array[T](size) - if (this.size > 0) Array.copy(elems, 0, newelems, 0, this.size) - newelems + if (capacity == size && capacity > 0) elems + else if (elems eq null) new Array[T](size) + else java.util.Arrays.copyOf[T](elems, size) } private def resize(size: Int) { diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index b56762c42b37..f7c488c7d365 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -3,7 +3,6 @@ package reflect package internal import util._ -import scala.collection.mutable.ListBuffer /** Handling range positions * atPos, the main method in this trait, will add positions to a tree, @@ -37,9 +36,22 @@ trait Positions extends api.Positions { self: SymbolTable => def wrappingPos(default: Position, trees: List[Tree]): Position = wrappingPos(default, trees, focus = true) def wrappingPos(default: Position, trees: List[Tree], focus: Boolean): Position = { if (useOffsetPositions) default else { - val ranged = trees filter (_.pos.isRange) - if (ranged.isEmpty) if (focus) default.focus else default - else Position.range(default.source, (ranged map (_.pos.start)).min, default.point, (ranged map (_.pos.end)).max) + var rest = trees + var min = Int.MaxValue + var max = Int.MinValue + while (rest ne Nil) { + val head = rest.head + rest = rest.tail + val pos = head.pos + if (pos.isRange) { + min = Math.min(min, pos.start) + max = Math.max(max, pos.end) + } + } + if (min > max) + //there are no ranges + if (focus) default.focus else default + else Position.range(default.source, min, default.point, max) } } @@ -66,12 +78,13 @@ trait Positions extends api.Positions { self: SymbolTable => def isOverlapping(pos: Position) = pos.isRange && (others exists (pos overlaps _.pos)) - if (isOverlapping(tree.pos)) { + val treePos = tree.pos + if (isOverlapping(treePos)) { val children = tree.children children foreach (ensureNonOverlapping(_, others, focus)) - if (tree.pos.isOpaqueRange) { - val wpos = wrappingPos(tree.pos, children, focus) - tree setPos (if (isOverlapping(wpos)) tree.pos.makeTransparent else wpos) + if (treePos.isOpaqueRange) { + val wpos = wrappingPos(treePos, children, focus) + tree setPos (if (isOverlapping(wpos)) treePos.makeTransparent else wpos) } } } @@ -80,124 +93,163 @@ trait Positions extends api.Positions { self: SymbolTable => if (useOffsetPositions) Position.offset(source, point) else Position.range(source, start, point, end) - def validatePositions(tree: Tree) { - if (useOffsetPositions) return - - def reportTree(prefix : String, tree : Tree) { - val source = if (tree.pos.isDefined) tree.pos.source else "" - inform("== "+prefix+" tree ["+tree.id+"] of type "+tree.productPrefix+" at "+tree.pos.show+source) - inform("") - inform(treeStatus(tree)) - inform("") - } - def positionError(msg: String)(body : => Unit) { - inform("======= Position error\n" + msg) - body - inform("\nWhile validating #" + tree.id) - inform(treeStatus(tree)) - inform("\nChildren:") - tree.children foreach (t => inform(" " + treeStatus(t, tree))) - inform("=======") - throw new ValidateException(msg) - } + abstract class ChildSolidDescendantsCollector extends Traverser { + // don't traverse annotations + override def traverseModifiers(mods: Modifiers): Unit = () - def validate(tree: Tree, encltree: Tree): Unit = { + override def traverse(tree: Tree): Unit = + if (tree ne EmptyTree) { + if (tree.pos.isTransparent) super.traverse(tree) + else { + traverseSolidChild(tree) + } + } + def traverseSolidChild(t: Tree): Unit + def apply(t: Tree): Unit = super.traverse(t) + } - if (!tree.isEmpty && tree.canHaveAttrs) { - if (settings.Yposdebug && settings.verbose) - inform("[%10s] %s".format("validate", treeStatus(tree, encltree))) + private[this] def reportTree(prefix: String, tree: Tree) { + val source = if (tree.pos.isDefined) tree.pos.source else "" + inform("== " + prefix + " tree [" + tree.id + "] of type " + tree.productPrefix + " at " + tree.pos.show + source) + inform("") + inform(treeStatus(tree)) + inform("") + } - if (!tree.pos.isDefined) - positionError("Unpositioned tree #"+tree.id) { - inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) - inform("%15s %s".format("enclosing", treeStatus(encltree))) - encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) - } - if (tree.pos.isRange) { - if (!encltree.pos.isRange) - positionError("Synthetic tree ["+encltree.id+"] contains nonsynthetic tree ["+tree.id+"]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } - if (!(encltree.pos includes tree.pos)) - positionError("Enclosing tree ["+encltree.id+"] does not include tree ["+tree.id+"]") { - reportTree("Enclosing", encltree) - reportTree("Enclosed", tree) - } + private[this] def positionError(topTree: Tree, msg: String)(body: => Unit) { + inform("======= Position error\n" + msg) + body + inform("\nWhile validating #" + topTree.id) + inform(treeStatus(topTree)) + inform("\nChildren:") + topTree.children foreach (t => inform(" " + treeStatus(t, topTree))) + inform("=======") + throw new ValidateException(msg) + } - findOverlapping(tree.children flatMap solidDescendants) match { - case List() => ; - case xs => { - positionError("Overlapping trees "+xs.map { case (x, y) => (x.id, y.id) }.mkString("", ", ", "")) { - reportTree("Ancestor", tree) - for((x, y) <- xs) { - reportTree("First overlapping", x) - reportTree("Second overlapping", y) - } - } - } - } - } - for (ct <- tree.children flatMap solidDescendants) validate(ct, tree) + private val posStartOrdering: Ordering[Tree] = new Ordering[Tree] { + override def compare(x: Tree, y: Tree): Int = { + @inline def posOf(t: Tree): Int = { + val pos = t.pos + if (pos eq NoPosition) Int.MinValue else pos.start } + Integer.compare(posOf(x), posOf(y)) } - - validate(tree, tree) } + def validatePositions(tree: Tree): Unit = if (!useOffsetPositions) { + object worker { + val trace = settings.Yposdebug && settings.verbose + val topTree = tree + + object solidChildrenCollector extends ChildSolidDescendantsCollector { + private[this] var size = 0 + private[this] var childSolidDescendants = new Array[Tree](32) + private[this] val spares = new java.util.ArrayList[Array[Tree]] + + def borrowArray: Array[Tree] = { + val borrowed = childSolidDescendants + childSolidDescendants = if (spares.isEmpty) new Array[Tree](32) else spares.remove(spares.size - 1) + clear() + borrowed + } + def spareArray(array: Array[Tree]): Unit = { + spares.add(array) + } - def solidDescendants(tree: Tree): List[Tree] = - if (tree.pos.isTransparent) tree.children flatMap solidDescendants - else List(tree) + def child(i:Int) = childSolidDescendants(i) + def collectedSize = size + def sortedArray: Array[Tree] = { + if (size > 1) + java.util.Arrays.sort(childSolidDescendants, 0, size, posStartOrdering) + childSolidDescendants + } - /** A free range from `lo` to `hi` */ - private def free(lo: Int, hi: Int): Range = - Range(Position.range(null, lo, lo, hi), EmptyTree) + //we dont care about zeroing the array + def clear() {size = 0} - /** The maximal free range */ - private lazy val maxFree: Range = free(0, Int.MaxValue) + def traverseSolidChild(t: Tree): Unit = { + if (size == childSolidDescendants.length) { + spareArray(childSolidDescendants) + childSolidDescendants = java.util.Arrays.copyOf(childSolidDescendants, size << 1) + } + childSolidDescendants(size) = t + size += 1 + } + } - /** A singleton list of a non-empty range from `lo` to `hi`, or else the empty List */ - private def maybeFree(lo: Int, hi: Int) = - if (lo < hi) List(free(lo, hi)) - else List() + def loop(tree: Tree, encltree: Tree) { + if (!tree.isEmpty && tree.canHaveAttrs) { + val treePos = tree.pos + if (trace) + inform("[%10s] %s".format("validate", treeStatus(tree, encltree))) + + if (!treePos.isDefined) + positionError(topTree, "Unpositioned tree #" + tree.id) { + inform("%15s %s".format("unpositioned", treeStatus(tree, encltree))) + inform("%15s %s".format("enclosing", treeStatus(encltree))) + encltree.children foreach (t => inform("%15s %s".format("sibling", treeStatus(t, encltree)))) + } - /** Insert `pos` into ranges `rs` if possible; - * otherwise add conflicting trees to `conflicting`. - */ - private def insert(rs: List[Range], t: Tree, conflicting: ListBuffer[Tree]): List[Range] = rs match { - case List() => - assert(conflicting.nonEmpty) - rs - case r :: rs1 => - assert(!t.pos.isTransparent) - if (r.isFree && (r.pos includes t.pos)) { -// inform("subdividing "+r+"/"+t.pos) - maybeFree(t.pos.end, r.pos.end) ::: List(Range(t.pos, t)) ::: maybeFree(r.pos.start, t.pos.start) ::: rs1 - } else { - if (!r.isFree && (r.pos overlaps t.pos)) conflicting += r.tree - r :: insert(rs1, t, conflicting) - } - } + solidChildrenCollector(tree) + val numChildren = solidChildrenCollector.collectedSize - /** Replace elem `t` of `ts` by `replacement` list. */ - private def replace(ts: List[Tree], t: Tree, replacement: List[Tree]): List[Tree] = - if (ts.head == t) replacement ::: ts.tail - else ts.head :: replace(ts.tail, t, replacement) + if (treePos.isRange) { + val enclPos = encltree.pos + if (!enclPos.isRange) + positionError(topTree, "Synthetic tree [" + encltree.id + "] contains nonsynthetic tree [" + tree.id + "]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } + if (!(enclPos includes treePos)) + positionError(topTree, "Enclosing tree [" + encltree.id + "] does not include tree [" + tree.id + "]") { + reportTree("Enclosing", encltree) + reportTree("Enclosed", tree) + } - /** Does given list of trees have mutually non-overlapping positions? - * pre: None of the trees is transparent - */ - def findOverlapping(cts: List[Tree]): List[(Tree, Tree)] = { - var ranges = List(maxFree) - for (ct <- cts) { - if (ct.pos.isOpaqueRange) { - val conflicting = new ListBuffer[Tree] - ranges = insert(ranges, ct, conflicting) - if (conflicting.nonEmpty) return conflicting.toList map (t => (t, ct)) + if (numChildren > 1) { + val childSolidDescendants = solidChildrenCollector.sortedArray + var t1 = childSolidDescendants(0) + var t1Pos = t1.pos + var i = 1 + while (i < numChildren) { + val t2 = childSolidDescendants(i) + val t2Pos = t2.pos + if (t1Pos.overlaps(t2Pos)) { + positionError(topTree, "Overlapping trees") { + reportTree("Ancestor", tree) + reportTree("First overlapping", t1) + reportTree("Second overlapping", t2) + } + } + //why only for range + if (t2Pos.isRange) { + t1 = t2 + t1Pos = t2Pos + } + i += 1 + } + } + } + if (numChildren > 0) { + if (numChildren == 1) { + val first = solidChildrenCollector.child(0) + solidChildrenCollector.clear() + loop(first, tree) + } else { + val snap = solidChildrenCollector.borrowArray + var i = 0 + while (i < numChildren) { + loop(snap(i), tree) + i += 1 + } + solidChildrenCollector.spareArray(snap) + } + } + } } } - List() + worker.loop(tree, tree) } /** Set position of all children of a node @@ -265,10 +317,6 @@ trait Positions extends api.Positions { self: SymbolTable => } } - case class Range(pos: Position, tree: Tree) { - def isFree = tree == EmptyTree - } - class TypedLocator(pos: Position) extends Locator(pos) { override protected def isEligible(t: Tree) = super.isEligible(t) && t.tpe != null } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 7b78fca09b58..f94e16a0afb7 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -9,6 +9,7 @@ package internal import Flags._ import scala.collection.mutable +import scala.collection.mutable.ListBuffer import scala.reflect.macros.Attachments import util.{Statistics, StatisticsStatics} @@ -150,13 +151,17 @@ trait Trees extends api.Trees { }) override def children: List[Tree] = { - def subtrees(x: Any): List[Tree] = x match { - case EmptyTree => Nil - case t: Tree => List(t) - case xs: List[_] => xs flatMap subtrees - case _ => Nil + var builder: ListBuffer[Tree] = null + def subtrees(x: Any): Unit = x match { + case EmptyTree => + case t: Tree => + if (builder eq null) builder = new ListBuffer[Tree] + builder += t + case xs: List[_] => xs foreach subtrees + case _ => } - productIterator.toList flatMap subtrees + productIterator foreach subtrees + if (builder eq null) Nil else builder.result() } def freeTerms: List[FreeTermSymbol] = freeSyms[FreeTermSymbol](_.isFreeTerm, _.termSymbol) diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 0d72cbd6f607..c2751fea80ab 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -53,7 +53,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.KnownDirectSubclassesCalled this.noPrint this.typeDebug - this.Range // inaccessible: this.posAssigner this.ConsoleWriter this.RefTree From d9cedb599c42877ef8c39bd3f0a1cf7633218559 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 3 Jul 2018 06:16:10 -0700 Subject: [PATCH 1445/2793] [nomerge] Backport test --- test/files/run/t10956.check | 39 +++++++++++++++++++++++++++++++++++++ test/files/run/t10956.scala | 17 ++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100644 test/files/run/t10956.check create mode 100644 test/files/run/t10956.scala diff --git a/test/files/run/t10956.check b/test/files/run/t10956.check new file mode 100644 index 000000000000..e52daede735f --- /dev/null +++ b/test/files/run/t10956.check @@ -0,0 +1,39 @@ + +scala> :paste < EOF +// Entering paste mode (EOF to finish) + +import java.awt.AWTError,java.awt.Dialog,java.awt.KeyEventDispatcher,java.awt.Robot,java.awt.AWTEvent,java.awt.Dimension,java.awt.KeyEventPostProcessor,java.awt.ScrollPane,java.awt.AWTEventMulticaster,java.awt.DisplayMode,java.awt.KeyboardFocusManager,java.awt.ScrollPaneAdjustable,java.awt.AWTException,java.awt.Event,java.awt.Label,java.awt.Scrollbar,java.awt.AWTKeyStroke,java.awt.EventQueue,java.awt.LayoutManager,java.awt.SecondaryLoop,java.awt.AWTPermission,java.awt.FileDialog,java.awt.LayoutManager2,java.awt.Shape,java.awt.ActiveEvent,java.awt.FlowLayout,java.awt.LinearGradientPaint,java.awt.SplashScreen,java.awt.Adjustable,java.awt.FocusTraversalPolicy,java.awt.List,java.awt.Stroke,java.awt.AlphaComposite,java.awt.Font,java.awt.MediaTracker,java.awt.SystemColor,java.awt.BasicStroke,java.awt.FontFormatException,java.awt.Menu,java.awt.SystemTray,java.awt.BorderLayout,java.awt.FontMetrics,java.awt.MenuBar,java.awt.TextArea,java.awt.BufferCapabilities,java.awt.Frame,java.awt.MenuComponent,java.awt.TextComponent,java.awt.Button,java.awt.GradientPaint,java.awt.MenuContainer,java.awt.TextField,java.awt.Canvas,java.awt.Graphics,java.awt.MenuItem,java.awt.TexturePaint,java.awt.CardLayout,java.awt.Graphics2D,java.awt.MenuShortcut,java.awt.Toolkit,java.awt.Checkbox,java.awt.GraphicsConfigTemplate,java.awt.MouseInfo,java.awt.Transparency,java.awt.CheckboxGroup,java.awt.GraphicsConfiguration,java.awt.MultipleGradientPaint,java.awt.TrayIcon,java.awt.CheckboxMenuItem,java.awt.GraphicsDevice,java.awt.PageAttributes,java.awt.Window,java.awt.Choice,java.awt.GraphicsEnvironment,java.awt.Paint,java.awt.color,java.awt.Color,java.awt.GridBagConstraints,java.awt.PaintContext,java.awt.datatransfer,java.awt.Component,java.awt.GridBagLayout,java.awt.Panel,java.awt.dnd,java.awt.ComponentOrientation,java.awt.GridBagLayoutInfo,java.awt.Point,java.awt.event,java.awt.Composite,java.awt.GridLayout,java.awt.PointerInfo,java.awt.font,java.awt.CompositeContext,java.awt.HeadlessException,java.awt.Polygon,java.awt.geom,java.awt.Container,java.awt.IllegalComponentStateException,java.awt.PopupMenu,java.awt.im,java.awt.ContainerOrderFocusTraversalPolicy,java.awt.Image,java.awt.PrintGraphics,java.awt.image,java.awt.Cursor,java.awt.ImageCapabilities,java.awt.PrintJob,java.awt.peer,java.awt.DefaultFocusTraversalPolicy,java.awt.Insets,java.awt.RadialGradientPaint,java.awt.print,java.awt.DefaultKeyboardFocusManager,java.awt.ItemSelectable,java.awt.Rectangle,java.awt.Desktop,java.awt.JobAttributes,java.awt.RenderingHints +EOF + +// Exiting paste mode, now interpreting. + +import java.awt.AWTError +import java.awt.Dialog +import java.awt.KeyEventDispatcher +import java.awt.Robot +import java.awt.AWTEvent +import java.awt.Dimension +import java.awt.KeyEventPostProcessor +import java.awt.ScrollPane +import java.awt.AWTEventMulticaster +import java.awt.DisplayMode +import java.awt.KeyboardFocusManager +import java.awt.ScrollPaneAdjustable +import java.awt.AWTException +import java.awt.Event +import java.awt.Label +import java.awt.Scrollbar +import java.awt.AWTKeyStroke +import java.awt.EventQueue +import java.awt.LayoutManager +import java.awt.SecondaryLoop +import java.awt.AWTPermission +import java.awt.FileDialog +import java.awt.LayoutManager2 +import java.awt.Shape +import java.awt.ActiveEvent +import java.awt.FlowLayout +import java.awt.LinearGradientPaint +import java.awt.Splash... + +scala> :quit diff --git a/test/files/run/t10956.scala b/test/files/run/t10956.scala new file mode 100644 index 000000000000..88377dde4ee6 --- /dev/null +++ b/test/files/run/t10956.scala @@ -0,0 +1,17 @@ + +import scala.tools.partest.ReplTest +import scala.tools.nsc.Settings + + +/* + * Huge import clause resulted in long "result string" construction. + * That would blow the stack on typecheck or refchecks. + */ +object Test extends ReplTest { + def code = + """ +:paste < EOF +import java.awt.AWTError,java.awt.Dialog,java.awt.KeyEventDispatcher,java.awt.Robot,java.awt.AWTEvent,java.awt.Dimension,java.awt.KeyEventPostProcessor,java.awt.ScrollPane,java.awt.AWTEventMulticaster,java.awt.DisplayMode,java.awt.KeyboardFocusManager,java.awt.ScrollPaneAdjustable,java.awt.AWTException,java.awt.Event,java.awt.Label,java.awt.Scrollbar,java.awt.AWTKeyStroke,java.awt.EventQueue,java.awt.LayoutManager,java.awt.SecondaryLoop,java.awt.AWTPermission,java.awt.FileDialog,java.awt.LayoutManager2,java.awt.Shape,java.awt.ActiveEvent,java.awt.FlowLayout,java.awt.LinearGradientPaint,java.awt.SplashScreen,java.awt.Adjustable,java.awt.FocusTraversalPolicy,java.awt.List,java.awt.Stroke,java.awt.AlphaComposite,java.awt.Font,java.awt.MediaTracker,java.awt.SystemColor,java.awt.BasicStroke,java.awt.FontFormatException,java.awt.Menu,java.awt.SystemTray,java.awt.BorderLayout,java.awt.FontMetrics,java.awt.MenuBar,java.awt.TextArea,java.awt.BufferCapabilities,java.awt.Frame,java.awt.MenuComponent,java.awt.TextComponent,java.awt.Button,java.awt.GradientPaint,java.awt.MenuContainer,java.awt.TextField,java.awt.Canvas,java.awt.Graphics,java.awt.MenuItem,java.awt.TexturePaint,java.awt.CardLayout,java.awt.Graphics2D,java.awt.MenuShortcut,java.awt.Toolkit,java.awt.Checkbox,java.awt.GraphicsConfigTemplate,java.awt.MouseInfo,java.awt.Transparency,java.awt.CheckboxGroup,java.awt.GraphicsConfiguration,java.awt.MultipleGradientPaint,java.awt.TrayIcon,java.awt.CheckboxMenuItem,java.awt.GraphicsDevice,java.awt.PageAttributes,java.awt.Window,java.awt.Choice,java.awt.GraphicsEnvironment,java.awt.Paint,java.awt.color,java.awt.Color,java.awt.GridBagConstraints,java.awt.PaintContext,java.awt.datatransfer,java.awt.Component,java.awt.GridBagLayout,java.awt.Panel,java.awt.dnd,java.awt.ComponentOrientation,java.awt.GridBagLayoutInfo,java.awt.Point,java.awt.event,java.awt.Composite,java.awt.GridLayout,java.awt.PointerInfo,java.awt.font,java.awt.CompositeContext,java.awt.HeadlessException,java.awt.Polygon,java.awt.geom,java.awt.Container,java.awt.IllegalComponentStateException,java.awt.PopupMenu,java.awt.im,java.awt.ContainerOrderFocusTraversalPolicy,java.awt.Image,java.awt.PrintGraphics,java.awt.image,java.awt.Cursor,java.awt.ImageCapabilities,java.awt.PrintJob,java.awt.peer,java.awt.DefaultFocusTraversalPolicy,java.awt.Insets,java.awt.RadialGradientPaint,java.awt.print,java.awt.DefaultKeyboardFocusManager,java.awt.ItemSelectable,java.awt.Rectangle,java.awt.Desktop,java.awt.JobAttributes,java.awt.RenderingHints +EOF + """ +} From 393a2661d6648f62711b0ffa883f7c0c214f3bc6 Mon Sep 17 00:00:00 2001 From: naldo Date: Tue, 3 Jul 2018 17:17:54 +0200 Subject: [PATCH 1446/2793] Fix error in HashMap.HashMapCollision1.merge0 --- src/library/scala/collection/immutable/HashMap.scala | 2 +- .../scala/collection/immutable/HashMapTest.scala | 12 +++++++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index c3217385d067..79c4ac2d14b8 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -297,7 +297,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = { // this can be made more efficient by passing the entire ListMap at once var m = that - for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p, merger) + for (p <- kvs) m = m.updated0(p._1, this.hash, level, p._2, p, merger.invert) m } } diff --git a/test/junit/scala/collection/immutable/HashMapTest.scala b/test/junit/scala/collection/immutable/HashMapTest.scala index a970786455e4..8b036f26ac4c 100644 --- a/test/junit/scala/collection/immutable/HashMapTest.scala +++ b/test/junit/scala/collection/immutable/HashMapTest.scala @@ -45,4 +45,14 @@ class HashMapTest { } assertEquals(expected, mergedWithMergeFunction) } -} \ No newline at end of file + + @Test + def canMergeHashMapCollision1WithCorrectMerege() { + case class A(k: Int) { override def hashCode = 0 } + val m1 = HashMap(A(0) -> 2, A(1) -> 2) + val m2 = HashMap(A(0) -> 1, A(1) -> 1) + val merged = m1.merged(m2) { case ((k, l), (_, r)) => k -> (l - r) } + val expected = HashMap(A(0) -> 1, A(1) -> 1) + assertEquals(merged, expected) + } +} From f3cad7e7211ee425d5be4e14b6465219d6318ff7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 9 Jul 2018 19:54:27 +1000 Subject: [PATCH 1447/2793] Fix regression with curried implicit defs conforming to function types Fixes scala/bug#10858 --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 7 +++++-- test/files/pos/t10858.scala | 6 ++++++ 2 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t10858.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 4db10827f248..4c66b77a54dd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -386,10 +386,12 @@ trait Implicits { /** The type parameters to instantiate */ val undetParams = if (isView) Nil else context.outer.undetparams val wildPt = approximate(pt) - private val ptFunctionArity: Int = { - val dealiased = pt.dealiasWiden + private[this] def functionArityOf(tp: Type): Int = { + val dealiased = tp.dealiasWiden if (isFunctionTypeDirect(dealiased)) dealiased.typeArgs.length - 1 else -1 } + private val cachedPtFunctionArity: Int = functionArityOf(pt) + final def functionArity(tp: Type): Int = if (tp eq pt) cachedPtFunctionArity else functionArityOf(tp) private val stableRunDefsForImport = currentRun.runDefinitions import stableRunDefsForImport._ @@ -579,6 +581,7 @@ trait Implicits { if (sym.isAliasType) loop(tp, pt.dealias) else if (sym.isAbstractType) loop(tp, pt.bounds.lo) else { + val ptFunctionArity = functionArity(pt) ptFunctionArity > 0 && hasLength(params, ptFunctionArity) && { var ps = params var as = args diff --git a/test/files/pos/t10858.scala b/test/files/pos/t10858.scala new file mode 100644 index 000000000000..8d439f0d0c98 --- /dev/null +++ b/test/files/pos/t10858.scala @@ -0,0 +1,6 @@ +import language.implicitConversions + +object Test { + implicit def foo(a: Int)(b: Int, c: Int): String = "" + a + b; + implicitly[Int => (Int, Int) => String].apply(1).apply(2, 3) +} From fa7ba070050fdbe9dec4f091fa9089a9c6973650 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 12 Jul 2018 16:17:25 +0200 Subject: [PATCH 1448/2793] Non-sensical comparison check checks erased type Also, improve handling of refined types in isEffectivelyFinal: it suffices for any parent type to be effectively final, for their intersection to be effectively final (if not by attempting to subclass that final parent, how could you create another subclass?) (PS: How many types can a non-sensical comparison check check?) --- .../tools/nsc/typechecker/RefChecks.scala | 45 +++++++++++-------- .../scala/reflect/internal/Symbols.scala | 6 ++- test/files/neg/nonsense_eq_refine.check | 9 ++++ test/files/neg/nonsense_eq_refine.flags | 1 + test/files/neg/nonsense_eq_refine.scala | 10 +++++ 5 files changed, 52 insertions(+), 19 deletions(-) create mode 100644 test/files/neg/nonsense_eq_refine.check create mode 100644 test/files/neg/nonsense_eq_refine.flags create mode 100644 test/files/neg/nonsense_eq_refine.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index d4360308fa34..dd4699cef988 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -971,7 +971,12 @@ abstract class RefChecks extends Transform { case Object_eq | Object_ne | Object_== | Object_!= | Any_== | Any_!= => true case _ => false } - /** Check the sensibility of using the given `equals` to compare `qual` and `other`. */ + + /** + * Check the sensibility of using the given `equals` to compare `qual` and `other`. + * + * NOTE: I'm really not convinced by the logic here. I also think this would work better after erasure. + */ private def checkSensibleEquals(pos: Position, qual: Tree, name: Name, sym: Symbol, other: Tree) = { def isReferenceOp = sym == Object_eq || sym == Object_ne def isNew(tree: Tree) = tree match { @@ -991,8 +996,12 @@ abstract class RefChecks extends Transform { // @MAT normalize for consistency in error message, otherwise only part is normalized due to use of `typeSymbol` def typesString = normalizeAll(qual.tpe.widen)+" and "+normalizeAll(other.tpe.widen) + // TODO: this should probably be used in more type comparisons in checkSensibleEquals + def erasedClass(tp: Type) = erasure.javaErasure(tp).typeSymbol + /* Symbols which limit the warnings we can issue since they may be value types */ - val isMaybeValue = Set[Symbol](AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass) + val couldBeAnything = Set[Symbol](ObjectClass, ComparableClass, JavaSerializableClass) + def isMaybeValue(sym: Symbol): Boolean = couldBeAnything(erasedClass(sym.tpe)) // Whether def equals(other: Any) has known behavior: it is the default // inherited from java.lang.Object, or it is a synthetically generated @@ -1084,12 +1093,7 @@ abstract class RefChecks extends Transform { nonSensiblyNew() else if (isNew(other) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y nonSensiblyNew() - else if (!(receiver.isRefinementClass || actual.isRefinementClass) && - // Rule out receiver of refinement class because checking receiver.isEffectivelyFinal does not work for them. - // (the owner of the refinement depends on where the refinement was inferred, which has no bearing on the finality of the intersected classes) - // TODO: should we try to decide finality for refinements? - // TODO: Also, is subclassing really the right relationship to detect non-sensible equals between "effectively final" types?? - receiver.isEffectivelyFinal && !(receiver isSubClass actual)) { // object X, Y; X == Y + else if (actual.isEffectivelyFinal && receiver.isEffectivelyFinal && !haveSubclassRelationship) { // object X, Y; X == Y if (isEitherNullable) nonSensible("non-null ", false) else @@ -1104,12 +1108,20 @@ abstract class RefChecks extends Transform { unrelatedTypes() // possibleNumericCount is insufficient or this will warn on e.g. Boolean == j.l.Boolean else if (isWarnable && nullCount == 0 && !(isSpecial(receiver) && isSpecial(actual))) { - // better to have lubbed and lost + // Warn if types are unrelated, without interesting lub. (Don't bother if we don't know anything about the values we're comparing.) def warnIfLubless(): Unit = { - val common = global.lub(List(actual.tpe, receiver.tpe)) - if (ObjectTpe <:< common && !(ObjectTpe <:< actual.tpe) && !(ObjectTpe <:< receiver.tpe)) - unrelatedTypes() + if (isMaybeValue(actual) || isMaybeValue(receiver) || haveSubclassRelationship) {} // ignore trivial or related types + else { + // better to have lubbed and lost + // We erase the lub because the erased type is closer to what happens at run time. + // Also, the lub of `S` and `String` is, weirdly, the refined type `Serializable{}` (for `class S extends Serializable`), + // which means we can't just take its type symbol and look it up in our isMaybeValue Set. Erasure restores sanity. + val commonRuntimeClass = erasedClass(global.lub(List(actual.tpe, receiver.tpe))) + if (commonRuntimeClass == ObjectClass) + unrelatedTypes() + } } + // warn if actual has a case parent that is not same as receiver's; // if actual is not a case, then warn if no common supertype, as below if (isCaseEquals) { @@ -1122,14 +1134,11 @@ abstract class RefChecks extends Transform { //else // if a class, it must be super to thisCase (and receiver) since not <: thisCase if (!actual.isTrait && !(receiver isSubClass actual)) nonSensiblyNeq() - else if (!haveSubclassRelationship) warnIfLubless() + else warnIfLubless() case _ => } } - // warn only if they have no common supertype below Object - else if (!haveSubclassRelationship) { - warnIfLubless() - } + else warnIfLubless() } } /** Sensibility check examines flavors of equals. */ @@ -1558,7 +1567,7 @@ abstract class RefChecks extends Transform { // analyses in the pattern matcher if (!inPattern) { checkImplicitViewOptionApply(tree.pos, fn, args) - checkSensible(tree.pos, fn, args) + checkSensible(tree.pos, fn, args) // TODO: this should move to preEraseApply, as reasoning about runtime semantics makes more sense in the JVM type system } currentApplication = tree tree diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c5cee9c72398..a516f49e605d 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1015,7 +1015,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => (this hasFlag FINAL | PACKAGE) || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects) || isTerm && (isPrivate || isLocalToBlock || (hasAllFlags(notPRIVATE | METHOD) && !hasFlag(DEFERRED))) - || isClass && originalOwner.isTerm && children.isEmpty // we track known subclasses of term-owned classes, use that infer finality + || isClass && !isRefinementClass && originalOwner.isTerm && children.isEmpty // we track known subclasses of term-owned classes, use that infer finality + // don't look at owner for refinement classes (it's basically arbitrary) -- instead, + // it suffices for one parent of an intersection to be final, for the resulting type to be final + // any subclass of the refinement would have to be a subclass of that final parent, which is not allowed + || isRefinementClass && info.parents.exists { _.typeSymbol.isEffectivelyFinal } ) /** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */ final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden) diff --git a/test/files/neg/nonsense_eq_refine.check b/test/files/neg/nonsense_eq_refine.check new file mode 100644 index 000000000000..41c469e5ee48 --- /dev/null +++ b/test/files/neg/nonsense_eq_refine.check @@ -0,0 +1,9 @@ +nonsense_eq_refine.scala:6: warning: E and String are unrelated: they will most likely never compare equal + if (e == "") ??? // warn about comparing unrelated types + ^ +nonsense_eq_refine.scala:9: warning: SE and String are unrelated: they will most likely never compare equal + if (se == "") ??? // types are still unrelated + ^ +error: No warnings can be incurred under -Xfatal-warnings. +two warnings found +one error found diff --git a/test/files/neg/nonsense_eq_refine.flags b/test/files/neg/nonsense_eq_refine.flags new file mode 100644 index 000000000000..65faf53579c2 --- /dev/null +++ b/test/files/neg/nonsense_eq_refine.flags @@ -0,0 +1 @@ +-Xfatal-warnings -deprecation \ No newline at end of file diff --git a/test/files/neg/nonsense_eq_refine.scala b/test/files/neg/nonsense_eq_refine.scala new file mode 100644 index 000000000000..d74c2bbbe15c --- /dev/null +++ b/test/files/neg/nonsense_eq_refine.scala @@ -0,0 +1,10 @@ +class E +class SE extends Serializable + +object Test { + val e = new E + if (e == "") ??? // warn about comparing unrelated types + + val se = new SE + if (se == "") ??? // types are still unrelated +} From 1fd64fa0a40093f28a798a202c0e62bb4927637b Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 13 Jul 2018 14:42:35 +0200 Subject: [PATCH 1449/2793] Respect BaseTypeSeq invar (3) symbols are distinct In a BTS, a singleton type is redundant with its underlying type. In other words, what could we learn about the superclasses of a singleton type that is not captured entirely by its super type? This duplicate type symbol leads to confusion during asSeenFrom when looking up the base class that defines the T type param (Since `typeOf[this.type].typeSymbol == typeOf[Poly[_]].typeSymbol`, we return `this.type` for the base type at Poly, which does not have the expected type params.) The interesting part is that you have to embed the singleton in a compound type to trigger the bug because SubType (a supertype of SingletonType) delegates `baseType` to its underlying (super) type! TODO: a similar deviation still exists in AbstractTypeRef. I hesitate to bring that one in line with the spec, because I could imagine it actually being used. --- .../scala/reflect/internal/Types.scala | 38 +++++++------------ test/files/pos/t11020.scala | 7 ++++ 2 files changed, 21 insertions(+), 24 deletions(-) create mode 100644 test/files/pos/t11020.scala diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 4c99c52fbd72..701ae8ac0868 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -881,24 +881,15 @@ trait Types /** Same as matches, except that non-method types are always assumed to match. */ def looselyMatches(that: Type): Boolean = matchesType(this, that, alwaysMatchSimple = true) - /** The shortest sorted upwards closed array of types that contains - * this type as first element. - * - * A list or array of types ts is upwards closed if - * - * for all t in ts: - * for all typerefs p.s[args] such that t <: p.s[args] - * there exists a typeref p'.s[args'] in ts such that - * t <: p'.s['args] <: p.s[args], - * - * and - * - * for all singleton types p.s such that t <: p.s - * there exists a singleton type p'.s in ts such that - * t <: p'.s <: p.s - * - * Sorting is with respect to Symbol.isLess() on type symbols. - */ + /** The base type sequence of T is the smallest set of (potentially existentially quantified) + * class types Ti, so that for each supertype T' (T <:< T'), + * there is a Ti so that T <:< Ti <:< T'. + * + * This is also known as the upward closed set of the partially ordered set of + * class types under Symbol#isLess (a refinement of Symbol#isSubclass). + * + * See "Base Types and Member Definitions" in spec/03-types.md. + */ def baseTypeSeq: BaseTypeSeq = baseTypeSingletonSeq(this) /** The maximum depth (@see typeDepth) @@ -1090,7 +1081,8 @@ trait Types override def baseTypeSeq: BaseTypeSeq = supertype.baseTypeSeq override def baseTypeSeqDepth: Depth = supertype.baseTypeSeqDepth override def baseClasses: List[Symbol] = supertype.baseClasses - override def boundSyms: Set[Symbol] = emptySymbolSet} + override def boundSyms: Set[Symbol] = emptySymbolSet + } /** A base class for types that represent a single value * (single-types and this-types). @@ -1098,11 +1090,8 @@ trait Types abstract class SingletonType extends SubType with SimpleTypeProxy with SingletonTypeApi { def supertype = underlying override def isTrivial = false - override def widen: Type = underlying.widen - override def baseTypeSeq: BaseTypeSeq = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(singletonBaseTypeSeqCount) - underlying.baseTypeSeq prepend this - } +// Spec: "The base types of a singleton type `$p$.type` are the base types of the type of $p$." +// override def baseTypeSeq: BaseTypeSeq = underlying.baseTypeSeq override def isHigherKinded = false // singleton type classifies objects, thus must be kind * override def safeToString: String = { // Avoiding printing Predef.type and scala.package.type as "type", @@ -2091,6 +2080,7 @@ trait Types override def decls = relativeInfo.decls override def bounds = relativeInfo.bounds + // TODO: this deviates from the spec "The base types of an abstract type are the base types of its upper bound." override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = bounds.hi.baseTypeSeq prepend this override protected[Types] def parentsImpl: List[Type] = relativeInfo.parents diff --git a/test/files/pos/t11020.scala b/test/files/pos/t11020.scala new file mode 100644 index 000000000000..bb04cce3b496 --- /dev/null +++ b/test/files/pos/t11020.scala @@ -0,0 +1,7 @@ +// asSeenFrom crash related to BaseTypeSeq bug for singleton types +trait Poly[T] { type TT = T + def foo: (this.type with Any)#TT +} + +// equivalent: +// class C { def meh[T](x: Poly[T]): (x.type with Any)#TT = ??? } From e37f3f3afb675c74a4705130e09095c0fdcdc6bc Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Tue, 17 Jul 2018 14:19:32 -0700 Subject: [PATCH 1450/2793] Avoid truncation of REPL output Line endings on windows changes the output length. --- test/files/run/t10956.check | 95 ++++++++++++++++++++++++++++++++++++- test/files/run/t10956.scala | 1 + 2 files changed, 95 insertions(+), 1 deletion(-) diff --git a/test/files/run/t10956.check b/test/files/run/t10956.check index e52daede735f..1bec9ae8f2ae 100644 --- a/test/files/run/t10956.check +++ b/test/files/run/t10956.check @@ -1,4 +1,7 @@ +scala> $intp.isettings.maxPrintString = 0 +$intp.isettings.maxPrintString: Int = 0 + scala> :paste < EOF // Entering paste mode (EOF to finish) @@ -34,6 +37,96 @@ import java.awt.Shape import java.awt.ActiveEvent import java.awt.FlowLayout import java.awt.LinearGradientPaint -import java.awt.Splash... +import java.awt.SplashScreen +import java.awt.Adjustable +import java.awt.FocusTraversalPolicy +import java.awt.List +import java.awt.Stroke +import java.awt.AlphaComposite +import java.awt.Font +import java.awt.MediaTracker +import java.awt.SystemColor +import java.awt.BasicStroke +import java.awt.FontFormatException +import java.awt.Menu +import java.awt.SystemTray +import java.awt.BorderLayout +import java.awt.FontMetrics +import java.awt.MenuBar +import java.awt.TextArea +import java.awt.BufferCapabilities +import java.awt.Frame +import java.awt.MenuComponent +import java.awt.TextComponent +import java.awt.Button +import java.awt.GradientPaint +import java.awt.MenuContainer +import java.awt.TextField +import java.awt.Canvas +import java.awt.Graphics +import java.awt.MenuItem +import java.awt.TexturePaint +import java.awt.CardLayout +import java.awt.Graphics2D +import java.awt.MenuShortcut +import java.awt.Toolkit +import java.awt.Checkbox +import java.awt.GraphicsConfigTemplate +import java.awt.MouseInfo +import java.awt.Transparency +import java.awt.CheckboxGroup +import java.awt.GraphicsConfiguration +import java.awt.MultipleGradientPaint +import java.awt.TrayIcon +import java.awt.CheckboxMenuItem +import java.awt.GraphicsDevice +import java.awt.PageAttributes +import java.awt.Window +import java.awt.Choice +import java.awt.GraphicsEnvironment +import java.awt.Paint +import java.awt.color +import java.awt.Color +import java.awt.GridBagConstraints +import java.awt.PaintContext +import java.awt.datatransfer +import java.awt.Component +import java.awt.GridBagLayout +import java.awt.Panel +import java.awt.dnd +import java.awt.ComponentOrientation +import java.awt.GridBagLayoutInfo +import java.awt.Point +import java.awt.event +import java.awt.Composite +import java.awt.GridLayout +import java.awt.PointerInfo +import java.awt.font +import java.awt.CompositeContext +import java.awt.HeadlessException +import java.awt.Polygon +import java.awt.geom +import java.awt.Container +import java.awt.IllegalComponentStateException +import java.awt.PopupMenu +import java.awt.im +import java.awt.ContainerOrderFocusTraversalPolicy +import java.awt.Image +import java.awt.PrintGraphics +import java.awt.image +import java.awt.Cursor +import java.awt.ImageCapabilities +import java.awt.PrintJob +import java.awt.peer +import java.awt.DefaultFocusTraversalPolicy +import java.awt.Insets +import java.awt.RadialGradientPaint +import java.awt.print +import java.awt.DefaultKeyboardFocusManager +import java.awt.ItemSelectable +import java.awt.Rectangle +import java.awt.Desktop +import java.awt.JobAttributes +import java.awt.RenderingHints scala> :quit diff --git a/test/files/run/t10956.scala b/test/files/run/t10956.scala index 88377dde4ee6..94d95de6546c 100644 --- a/test/files/run/t10956.scala +++ b/test/files/run/t10956.scala @@ -10,6 +10,7 @@ import scala.tools.nsc.Settings object Test extends ReplTest { def code = """ +$intp.isettings.maxPrintString = 0 :paste < EOF import java.awt.AWTError,java.awt.Dialog,java.awt.KeyEventDispatcher,java.awt.Robot,java.awt.AWTEvent,java.awt.Dimension,java.awt.KeyEventPostProcessor,java.awt.ScrollPane,java.awt.AWTEventMulticaster,java.awt.DisplayMode,java.awt.KeyboardFocusManager,java.awt.ScrollPaneAdjustable,java.awt.AWTException,java.awt.Event,java.awt.Label,java.awt.Scrollbar,java.awt.AWTKeyStroke,java.awt.EventQueue,java.awt.LayoutManager,java.awt.SecondaryLoop,java.awt.AWTPermission,java.awt.FileDialog,java.awt.LayoutManager2,java.awt.Shape,java.awt.ActiveEvent,java.awt.FlowLayout,java.awt.LinearGradientPaint,java.awt.SplashScreen,java.awt.Adjustable,java.awt.FocusTraversalPolicy,java.awt.List,java.awt.Stroke,java.awt.AlphaComposite,java.awt.Font,java.awt.MediaTracker,java.awt.SystemColor,java.awt.BasicStroke,java.awt.FontFormatException,java.awt.Menu,java.awt.SystemTray,java.awt.BorderLayout,java.awt.FontMetrics,java.awt.MenuBar,java.awt.TextArea,java.awt.BufferCapabilities,java.awt.Frame,java.awt.MenuComponent,java.awt.TextComponent,java.awt.Button,java.awt.GradientPaint,java.awt.MenuContainer,java.awt.TextField,java.awt.Canvas,java.awt.Graphics,java.awt.MenuItem,java.awt.TexturePaint,java.awt.CardLayout,java.awt.Graphics2D,java.awt.MenuShortcut,java.awt.Toolkit,java.awt.Checkbox,java.awt.GraphicsConfigTemplate,java.awt.MouseInfo,java.awt.Transparency,java.awt.CheckboxGroup,java.awt.GraphicsConfiguration,java.awt.MultipleGradientPaint,java.awt.TrayIcon,java.awt.CheckboxMenuItem,java.awt.GraphicsDevice,java.awt.PageAttributes,java.awt.Window,java.awt.Choice,java.awt.GraphicsEnvironment,java.awt.Paint,java.awt.color,java.awt.Color,java.awt.GridBagConstraints,java.awt.PaintContext,java.awt.datatransfer,java.awt.Component,java.awt.GridBagLayout,java.awt.Panel,java.awt.dnd,java.awt.ComponentOrientation,java.awt.GridBagLayoutInfo,java.awt.Point,java.awt.event,java.awt.Composite,java.awt.GridLayout,java.awt.PointerInfo,java.awt.font,java.awt.CompositeContext,java.awt.HeadlessException,java.awt.Polygon,java.awt.geom,java.awt.Container,java.awt.IllegalComponentStateException,java.awt.PopupMenu,java.awt.im,java.awt.ContainerOrderFocusTraversalPolicy,java.awt.Image,java.awt.PrintGraphics,java.awt.image,java.awt.Cursor,java.awt.ImageCapabilities,java.awt.PrintJob,java.awt.peer,java.awt.DefaultFocusTraversalPolicy,java.awt.Insets,java.awt.RadialGradientPaint,java.awt.print,java.awt.DefaultKeyboardFocusManager,java.awt.ItemSelectable,java.awt.Rectangle,java.awt.Desktop,java.awt.JobAttributes,java.awt.RenderingHints EOF From f8628f38e8d31ddf3abb54920e9a911b4d70fc5d Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 19 Mar 2018 10:19:54 -0400 Subject: [PATCH 1451/2793] Add synthetic value class companion near its class. Redux of 9e1de6ee81e9eaf9d8ac59446bc97c79b5ff0cb6. Make sure that the class in question actually exists in the tree we're about to put the synthetic companion in. Otherwise extmethods might not see the extendable methods until after it's too late to add it to the companion stats. Fixes scala/bug#10783. --- .../scala/tools/nsc/typechecker/Typers.scala | 15 ++++----- test/files/run/t10783.scala | 31 +++++++++++++++++++ 2 files changed, 37 insertions(+), 9 deletions(-) create mode 100644 test/files/run/t10783.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index b34c466f4a92..599c003d9da5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3205,17 +3205,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // the corresponding synthetics to the package class, only to the package object class. // scala/bug#6734 Locality test below is meaningless if we're not even in the correct tree. // For modules that are synthetic case companions, check that case class is defined here. + // scala/bug#10783 ditto for synthetic companions of derived value classes. def shouldAdd(sym: Symbol): Boolean = { + def classDefinedHere(s: Symbol): Boolean = stats exists { + case t: ClassDef => t.symbol eq s + case _ => false + } def shouldAddAsModule: Boolean = - sym.moduleClass.attachments.get[ClassForCaseCompanionAttachment] match { - case Some(att) => - val cdef = att.caseClass - stats.exists { - case t @ ClassDef(_, _, _, _) => t.symbol == cdef.symbol // cdef ne t - case _ => false - } - case _ => true - } + classDefinedHere(companionSymbolOf(sym, context)) (!sym.isModule || shouldAddAsModule) && (inBlock || !context.isInPackageObject(sym, context.owner)) } diff --git a/test/files/run/t10783.scala b/test/files/run/t10783.scala new file mode 100644 index 000000000000..160cbb6867bf --- /dev/null +++ b/test/files/run/t10783.scala @@ -0,0 +1,31 @@ +package com.example { + object X { + def bar: Int = (new Value(42)).foo + def baz: Int = (new Walue(42)).foo + def bip: Int = (new Xalue(42)).foo + } +} + +package com.example { + class Value(val value: Int) extends AnyVal { + def foo: Int = value + 1 + } + object Walue + class Walue(val value: Int) extends AnyVal { + def foo: Int = value + 1 + } + class Xalue(val value: Int) extends AnyVal { + def foo: Int = value + 1 + } + object Xalue +} + +object Test { + import com.example._ + + def main(args: Array[String]): Unit = { + assert(X.bar == 43) + assert(X.baz == 43) + assert(X.bip == 43) + } +} \ No newline at end of file From 41479e0695d80bf4ec28da9e2a5118ee650a1ffa Mon Sep 17 00:00:00 2001 From: exoego Date: Tue, 7 Aug 2018 10:35:57 +0900 Subject: [PATCH 1452/2793] More descriptive name --- build.sbt | 8 +++++--- .../scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 7 ++++--- 2 files changed, 9 insertions(+), 6 deletions(-) rename src/{exporter => compilerOptionsExporter}/scala/tools/nsc/ScalaCompilerOptionsExporter.scala (98%) diff --git a/build.sbt b/build.sbt index 87c6e43e2aec..9c4d06d64f04 100644 --- a/build.sbt +++ b/build.sbt @@ -422,7 +422,7 @@ lazy val reflect = configureAsSubproject(project) ) .dependsOn(library) -lazy val exporter = configureAsSubproject(project) +lazy val compilerOptionsExporter = Project("compilerOptionsExporter", file(".") / "src" / "compilerOptionsExporter") .dependsOn(compiler, reflect, library) .settings(clearSourceAndResourceDirectories) .settings(commonSettings) @@ -953,7 +953,7 @@ lazy val root: Project = (project in file(".")) .withRecompileOnMacroDef(false) // // macros in library+reflect are hard-wired to implementations with `FastTrack`. } ) - .aggregate(library, reflect, compiler, interactive, repl, replJline, replJlineEmbedded, + .aggregate(library, reflect, compiler, compilerOptionsExporter, interactive, repl, replJline, replJlineEmbedded, scaladoc, scalap, partestExtras, junit, libraryAll, scalaDist).settings( sources in Compile := Seq.empty, onLoadMessage := """|*** Welcome to the sbt build definition for Scala! *** @@ -1149,7 +1149,9 @@ intellij := { moduleDeps(scalacheck, config = Test).value, moduleDeps(scaladoc).value, moduleDeps(scalap).value, - moduleDeps(testP).value) + moduleDeps(testP).value, + moduleDeps(compilerOptionsExporter).value + ) } def moduleDep(name: String, jars: Seq[File]) = { diff --git a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala similarity index 98% rename from src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala rename to src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index e6199456d065..44694abd320f 100644 --- a/src/exporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -60,6 +60,8 @@ object ScalaCompilerOptionsExporter { } def main(args: Array[String]): Unit = { + val writer = new java.io.StringWriter(2000) + val runtimeMirror = scala.reflect.runtime.currentMirror val settings = new scala.tools.nsc.Settings(s => ()) @@ -147,9 +149,8 @@ object ScalaCompilerOptionsExporter { .registerModule(DefaultScalaModule) .setSerializationInclusion(JsonInclude.Include.NON_EMPTY) - val yaml = mapper + mapper .writer(new DefaultPrettyPrinter()) - .writeValueAsString(source) - println(yaml) + .writeValue(writer, source) } } From 26224cdfa6c8bf0ab338f457d6dc8b015a28b976 Mon Sep 17 00:00:00 2001 From: exoego Date: Tue, 7 Aug 2018 10:41:38 +0900 Subject: [PATCH 1453/2793] Add todo. --- .../scala/tools/nsc/ScalaCompilerOptionsExporter.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index 44694abd320f..54504f9d99f4 100644 --- a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -152,5 +152,7 @@ object ScalaCompilerOptionsExporter { mapper .writer(new DefaultPrettyPrinter()) .writeValue(writer, source) + // TODO: println can be deleted if write can write to file + println(writer.toString) } } From c02308d10f596784f4b5e4bf2567bad0e95c3264 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 8 Aug 2018 16:35:45 +1000 Subject: [PATCH 1454/2793] Add a test to show a problem in switch pattern translation The performance optimization in #6607 assumed that a translated match would always be a `Match` node itself, but it can also be a `{ synthetic val x1 = ...; x1 match { .. } }` block. --- test/files/run/patmat-origtp-switch.check | 18 ++++++++++++++++++ test/files/run/patmat-origtp-switch.scala | 21 +++++++++++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 test/files/run/patmat-origtp-switch.check create mode 100644 test/files/run/patmat-origtp-switch.scala diff --git a/test/files/run/patmat-origtp-switch.check b/test/files/run/patmat-origtp-switch.check new file mode 100644 index 000000000000..a5bb136c6604 --- /dev/null +++ b/test/files/run/patmat-origtp-switch.check @@ -0,0 +1,18 @@ +[[syntax trees at end of patmat]] // newSource1.scala +package {.type} { + class C extends scala.AnyRef { + def (): C = { + C.super{C.super.type}.{()Object}(){Object}; + (){Unit} + }{Unit}; + def foo[A](a: A, b: A with C, i: Int): A = { + case val x1: Int = i{Int}; + x1{Int} match { + case 0{Int(0)} => a{A} + case 1{Int(1)} => b{A with C} + case _{Int} => throw new MatchError{MatchError}{(obj: Any)MatchError}(x1{Int}){MatchError}{Nothing} + }{Any} + }{A} + } +} + diff --git a/test/files/run/patmat-origtp-switch.scala b/test/files/run/patmat-origtp-switch.scala new file mode 100644 index 000000000000..bf7eb62a9123 --- /dev/null +++ b/test/files/run/patmat-origtp-switch.scala @@ -0,0 +1,21 @@ +import scala.tools.partest._ +import java.io.{Console => _, _} + +object Test extends DirectTest { + + override def extraSettings: String = "-usejavacp -Xprint:patmat -Xprint-types -d " + testOutput.path + + override def code = """class C { + def foo[A](a: A, b: A with C, i: Int) = i match { + case 0 => a + case 1 => b + } + } + """ + + override def show(): Unit = { + Console.withErr(System.out) { + compile() + } + } +} From a8de631d7d80671ca276aa3019bcdcc28519fc6c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 8 Aug 2018 16:36:14 +1000 Subject: [PATCH 1455/2793] Fix regression in switch pattern translation The performance optimization in #6607 assumed that a translated match would always be a `Match` node itself, but it can also be a `{ synthetic val x1 = ...; x1 match { .. } }` block. --- .../tools/nsc/transform/patmat/PatternMatching.scala | 8 +++++++- test/files/run/patmat-origtp-switch.check | 2 +- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 50003ad94b04..a5459beddee2 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -64,7 +64,13 @@ trait PatternMatching extends Transform // Keep 2.12 behaviour of using wildcard expected type, recomputing the LUB, then throwing it away for the continuations plugins // but for the rest of us pass in top as the expected type to avoid waste. val pt = if (origTp <:< definitions.AnyTpe) definitions.AnyTpe else WildcardType - localTyper.typed(translated, definitions.AnyTpe) setType origTp + localTyper.typed(translated, pt) match { + case b @ Block(stats, m: Match) => + b.setType(origTp) + m.setType(origTp) + b + case tree => tree setType origTp + } } catch { case x: (Types#TypeError) => // TODO: this should never happen; error should've been reported during type checking diff --git a/test/files/run/patmat-origtp-switch.check b/test/files/run/patmat-origtp-switch.check index a5bb136c6604..84a92e1c6ab3 100644 --- a/test/files/run/patmat-origtp-switch.check +++ b/test/files/run/patmat-origtp-switch.check @@ -11,7 +11,7 @@ package {.type} { case 0{Int(0)} => a{A} case 1{Int(1)} => b{A with C} case _{Int} => throw new MatchError{MatchError}{(obj: Any)MatchError}(x1{Int}){MatchError}{Nothing} - }{Any} + }{A} }{A} } } From e3fc8043a20c33b98721a724ab1c4705d7448449 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 13 Aug 2018 14:20:23 -0400 Subject: [PATCH 1456/2793] update intellij files to reckon with new compilerOptionsExporter project otherwise, one sees: ``` > intellij Update library classpaths in the current src/intellij/scala.ipr (y/N)? y [info] Updating library classpaths in src/intellij/scala.ipr. [trace] Stack trace suppressed: run last root/*:intellij for the full output. [error] (root/*:intellij) Replacing library classpath for compilerOptionsExporter-deps failed, no existing library found. [error] Total time: 3 s, completed Aug 13, 2018 3:13:16 PM ``` (Bump scala-asm version, too, since that was apparently out of date) --- .../nsc/ScalaCompilerOptionsExporter.scala | 4 +- .../compilerOptionsExporter.iml.SAMPLE | 21 ++++++++++ src/intellij/scala.ipr.SAMPLE | 41 ++++++++++++++----- 3 files changed, 53 insertions(+), 13 deletions(-) create mode 100644 src/intellij/compilerOptionsExporter.iml.SAMPLE diff --git a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index 54504f9d99f4..45221343c8df 100644 --- a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -1,13 +1,13 @@ package scala.tools.nsc -import scala.reflect.runtime.universe._ -import collection.JavaConverters._ import com.fasterxml.jackson.annotation._ import com.fasterxml.jackson.core.util.DefaultPrettyPrinter import com.fasterxml.jackson.databind.ObjectMapper import com.fasterxml.jackson.dataformat.yaml.{YAMLFactory, YAMLGenerator} import com.fasterxml.jackson.module.scala.DefaultScalaModule +import scala.reflect.runtime.universe._ + object ScalaCompilerOptionsExporter { case class Category(name: String, load: Int) extends Ordered[Category] { diff --git a/src/intellij/compilerOptionsExporter.iml.SAMPLE b/src/intellij/compilerOptionsExporter.iml.SAMPLE new file mode 100644 index 000000000000..c1a1ee49e720 --- /dev/null +++ b/src/intellij/compilerOptionsExporter.iml.SAMPLE @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 16cddfa1d431..632fc64940cf 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -166,6 +166,7 @@ + @@ -200,18 +201,36 @@ - + + + + + + + + + + + + + + + + + + + - + @@ -221,7 +240,7 @@ - + @@ -248,7 +267,7 @@ - + @@ -260,7 +279,7 @@ - + @@ -269,7 +288,7 @@ - + @@ -279,7 +298,7 @@ - + @@ -388,7 +407,7 @@ - + @@ -398,7 +417,7 @@ - + @@ -408,7 +427,7 @@ - + @@ -433,7 +452,7 @@ - + From 7d17726ffa84086700ac0f704984ca9fc7c7d145 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 14 Aug 2018 13:06:03 -0400 Subject: [PATCH 1457/2793] prepare to remove UninitializedError It doesn't have any Scala-specific semantics and is unlikely to be worth more than a custom exception type to anyone currently using it. --- src/library/scala/UninitializedError.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/library/scala/UninitializedError.scala b/src/library/scala/UninitializedError.scala index 0641a6638880..bb0d5a863c34 100644 --- a/src/library/scala/UninitializedError.scala +++ b/src/library/scala/UninitializedError.scala @@ -15,4 +15,6 @@ package scala * @author Martin Odersky * @since 2.5 */ +// TODO: remove in 2.14 +@deprecated("will be removed in a future release", since = "2.12.7") final class UninitializedError extends RuntimeException("uninitialized value") From ab99db089bd8b73b03a7ebaafb6eeeffdc03f8f4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 1 Aug 2018 00:02:59 +0100 Subject: [PATCH 1458/2793] Rewrite List(a, b) to `new ::(a, new ::(b, Nil)` Conservatively limits the extra stack frames consumed by the generated program to 8. Author: Jason Zaugg Date: Wed Aug 1 00:02:59 2018 +0100 --- .../scala/tools/nsc/transform/CleanUp.scala | 18 ++++++++++++++++++ .../scala/reflect/internal/Definitions.scala | 1 + 2 files changed, 19 insertions(+) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 0876fde23395..1dc4479809b2 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -31,6 +31,13 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { private val newStaticMembers = mutable.Buffer.empty[Tree] private val newStaticInits = mutable.Buffer.empty[Tree] private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol] + private var transformListApplyLimit = 16 + private def reducingTransformListApply[A](depth: Int)(body: => A): A = { + val saved = transformListApplyLimit + transformListApplyLimit -= depth + try body + finally transformListApplyLimit = saved + } private def clearStatics() { newStaticMembers.clear() newStaticInits.clear() @@ -472,6 +479,17 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems)) + // List(a, b, c) ~> new ::(a, new ::(b, new ::(c, Nil))) + case Apply(appMeth, List(Apply(wrapArrayMeth, List(StripCast(rest @ ArrayValue(elemtpt, _)))))) + if wrapArrayMeth.symbol == currentRun.runDefinitions.Predef_wrapRefArray && appMeth.symbol == List_apply && rest.elems.length < transformListApplyLimit => + val consed = rest.elems.reverse.foldLeft(gen.mkAttributedRef(NilModule): Tree)( + (acc, elem) => New(ConsClass, elem, acc) + ) + // Limiting extra stack frames consumed by generated code + reducingTransformListApply(rest.elems.length) { + super.transform(localTyper.typedPos(tree.pos)(consed)) + } + case _ => super.transform(tree) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index cdb2ab744936..69370475a176 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -435,6 +435,7 @@ trait Definitions extends api.StandardDefinitions { lazy val IteratorClass = requiredClass[scala.collection.Iterator[_]] lazy val IterableClass = requiredClass[scala.collection.Iterable[_]] lazy val ListClass = requiredClass[scala.collection.immutable.List[_]] + def List_cons = getMemberMethod(ListClass, nme.CONS) lazy val SeqClass = requiredClass[scala.collection.Seq[_]] lazy val JavaStringBuilderClass = requiredClass[java.lang.StringBuilder] lazy val JavaStringBufferClass = requiredClass[java.lang.StringBuffer] From 296e3167a8bd671860321d3cf0a71a761c3def9b Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Thu, 16 Aug 2018 00:01:48 +0100 Subject: [PATCH 1459/2793] add a simple test for optimised List.apply rewrite in cleanup --- .../scala/tools/nsc/transform/CleanUp.scala | 2 +- test/files/run/list-apply-eval.scala | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 test/files/run/list-apply-eval.scala diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 1dc4479809b2..dbb0b4b15e3f 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -31,7 +31,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { private val newStaticMembers = mutable.Buffer.empty[Tree] private val newStaticInits = mutable.Buffer.empty[Tree] private val symbolsStoredAsStatic = mutable.Map.empty[String, Symbol] - private var transformListApplyLimit = 16 + private var transformListApplyLimit = 8 private def reducingTransformListApply[A](depth: Int)(body: => A): A = { val saved = transformListApplyLimit transformListApplyLimit -= depth diff --git a/test/files/run/list-apply-eval.scala b/test/files/run/list-apply-eval.scala new file mode 100644 index 000000000000..6e012cdcd6ec --- /dev/null +++ b/test/files/run/list-apply-eval.scala @@ -0,0 +1,15 @@ +object Test { + var counter = 0 + def next = { + counter += 1 + counter.toString + } + def main(args: Array[String]) { + //List.apply is subject to an optimisation in cleanup + //ensure that the arguments are evaluated in the currect order + // Rewritten to: + // val myList: List = new collection.immutable.::(Test.this.next(), new collection.immutable.::(Test.this.next(), new collection.immutable.::(Test.this.next(), scala.collection.immutable.Nil))); + val myList = List(next, next, next) + assert(myList == List("1", "2", "3"), myList) + } +} From 73da4fc10d9aac48014b77a8d981f2ec7a2b03b6 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Mon, 30 Apr 2018 13:09:12 +0100 Subject: [PATCH 1460/2793] Prune polymorphic implicits more aggressively In rankImplicits, before we attempt to fully typecheck the pending candidate implicit, we first attempt to partially instantiate type variables in both the candidate and the target type and check for compatibility. If the compatibility check fails we can immediately prune the the candidate without having to fully typecheck it. In the kinds of implicit searches typical of the inductive style found in shapeless and related libraries this can result in a drastic reduction in the search space and a corresponding reduction in compile times. As an added bonus users of shapeless and shapeless based libraries which use shapeless's Lazy type will see benefits immediately without needing to wait for and port to byname implicit arguments. --- .../tools/nsc/typechecker/Implicits.scala | 54 ++++++++++++++++++- test/files/pos/prune-poly-bound.scala | 13 +++++ .../files/pos/prune-poly-f-bounded-view.scala | 19 +++++++ test/files/pos/prune-poly-infer-nothing.scala | 12 +++++ test/files/pos/prune-poly-view.scala | 30 +++++++++++ 5 files changed, 127 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/prune-poly-bound.scala create mode 100644 test/files/pos/prune-poly-f-bounded-view.scala create mode 100644 test/files/pos/prune-poly-infer-nothing.scala create mode 100644 test/files/pos/prune-poly-view.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 4c66b77a54dd..0e5bb1bbe72b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -565,6 +565,42 @@ trait Implicits { } } + private def matchesPtInst(info: ImplicitInfo): Boolean = { + def isViewLike = pt match { + case Function1(_, _) => true + case _ => false + } + + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) + info.tpe match { + case PolyType(tparams, restpe) => + try { + val allUndetparams = (undetParams ++ tparams).distinct + val tvars = allUndetparams map freshVar + val tp = ApproximateDependentMap(restpe) + val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) + if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) + false + } else if(!isView && !isViewLike) { + // we can't usefully prune views any further because we would need to type an application + // of the view to the term as is done in the computation of itree2 in typedImplicit1. + val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) + val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(allUndetparams, tvars, targs) + val remainingUndet = allUndetparams diff okParams + val tpSubst = deriveTypeWithWildcards(remainingUndet)(tp.instantiateTypeParams(okParams, okArgs)) + if(!matchesPt(tpSubst, wildPt, remainingUndet)) { + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) + false + } else true + } else true + } catch { + case _: NoInstance => false + } + case _ => true + } + } + /** Capturing the overlap between isPlausiblyCompatible and normSubType. * This is a faithful translation of the code which was there, but it * seems likely the methods are intended to be even more similar than @@ -961,6 +997,13 @@ trait Implicits { * - find the most likely one * - if it matches, forget about all others it improves upon */ + + // the pt for views can have embedded unification type variables, BoundedWildcardTypes or + // Nothings which can't be solved for. Rather than attempt to patch things up later we + // just skip those cases altogether. + lazy val wildPtNotInstantiable = + wildPt.exists { case _: BoundedWildcardType | _: TypeVar => true ; case tp if typeIsNothing(tp) => true; case _ => false } + @tailrec private def rankImplicits(pending: Infos, acc: List[(SearchResult, ImplicitInfo)]): List[(SearchResult, ImplicitInfo)] = pending match { case Nil => acc case firstPending :: otherPending => @@ -974,7 +1017,10 @@ trait Implicits { } ) - val typedFirstPending = typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) + val typedFirstPending = + if(wildPtNotInstantiable || matchesPtInst(firstPending)) + typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) + else SearchFailure // Pass the errors to `DivergentImplicitRecovery` so that it can note // the first `DivergentImplicitTypeError` that is being propagated @@ -1620,4 +1666,10 @@ trait ImplicitsStats { val matchesPtNanos = newSubTimer (" matchesPT", typerNanos) val implicitCacheAccs = newCounter ("implicit cache accesses", "typer") val implicitCacheHits = newSubCounter("implicit cache hits", implicitCacheAccs) + + val matchesPtInstCalls = newCounter ("implicits instantiated for pruning") + val matchesPtInstMismatch1 + = newSubCounter(" immediate mismatches", matchesPtInstCalls) + val matchesPtInstMismatch2 + = newSubCounter(" instantiated mismatches", matchesPtInstCalls) } diff --git a/test/files/pos/prune-poly-bound.scala b/test/files/pos/prune-poly-bound.scala new file mode 100644 index 000000000000..723c8733d0cc --- /dev/null +++ b/test/files/pos/prune-poly-bound.scala @@ -0,0 +1,13 @@ +class Base[T0] +class Derived[T1] extends Base[T1] + +class Foo[T2, U2] + +object Foo { + implicit def mkFoo[T3, U3 <: Base[T3]](implicit ev: U3 <:< Base[T3]) : Foo[U3, Base[T3]] = ??? +} + +object Test { + def foo[T4, U4](t: T4)(implicit ftu: Foo[T4, U4]): U4 = ??? + val bi: Base[Int] = foo(null.asInstanceOf[Derived[Int]]) +} diff --git a/test/files/pos/prune-poly-f-bounded-view.scala b/test/files/pos/prune-poly-f-bounded-view.scala new file mode 100644 index 000000000000..189a2df78e99 --- /dev/null +++ b/test/files/pos/prune-poly-f-bounded-view.scala @@ -0,0 +1,19 @@ +object Foo { + implicit def toBar[T <: Bar[T]](t: T): Baz = ??? +} + +import Foo._ + +trait Bar[T] + +class Baz { + def wibble = 23 +} + +class Quux extends Bar[Quux] { + def blah = this.wibble +} + +object Test { + (new Quux).blah +} diff --git a/test/files/pos/prune-poly-infer-nothing.scala b/test/files/pos/prune-poly-infer-nothing.scala new file mode 100644 index 000000000000..d88c62d3878a --- /dev/null +++ b/test/files/pos/prune-poly-infer-nothing.scala @@ -0,0 +1,12 @@ +object Test { + trait Pure[+A] + trait Stream[+F[_], +O] + object Stream { + implicit def covaryPure[F[_], O, O2 >: O](s: Stream[Pure, O]): Stream[F, O2] = ??? + def empty: Stream[Pure, Nothing] = ??? + } + + type EntityBody[+F[_]] = Stream[F, Byte] + + val EmptyBody: EntityBody[Nothing] = Stream.empty +} diff --git a/test/files/pos/prune-poly-view.scala b/test/files/pos/prune-poly-view.scala new file mode 100644 index 000000000000..e831294506fe --- /dev/null +++ b/test/files/pos/prune-poly-view.scala @@ -0,0 +1,30 @@ +object Test { + class Foo[T] + object Foo { + implicit def fromT[T](t: T): Foo[T] = ??? + } + + def bar[T](foo: Foo[T]) = ??? + + bar[Double](foo = 0) +} + +object Test2 { + class Foo[T] + object Foo { + implicit def fromT[T](t: T): Foo[T] = ??? + } + + def bar[T](foo: Foo[T]) = ??? + + class C + object O extends C + + bar[C](foo = O) +} + +object Test3 { + implicit def toOption[T](v: T): Option[T] = Option(v) + val a: Int = 123 + val b: Option[Long] = a // Works under 2.12.6 but not with the implicit-poly-prune-2.12.x PR +} From 79b7f2a56427835c0a8375404fee460def5551b8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 Jun 2018 13:21:05 +1000 Subject: [PATCH 1461/2793] Backport #6733 to 2.12.x Avoid performance problem after ASM upgrade in prod/cons analysis ASM 6.2 now creates a new Frame inside the loop in which `newExceptionValue` is called. We were including this frame in the case-class equality of the pseudo-instruction, `ExceptionProducer`, and upon receiving new instances each time the `ProdCons` analysis massively slowed down. This commit just captures the data we need: the stack top of the handler frame. Upgrade to scala-asm 6.2 See: https://github.com/scala/scala-asm/issues/5 Upstream changes in ASM: https://github.com/scala/scala-asm/compare/ASM_6_0...ASM_6_2 http://asm.ow2.io/versions.html The motivations, other than just keeping current, are: - support for Java 9/10/11 updates to the classfile format. - reducing needless String => Array[Char] conversions thanks to internal changes in ASM. This PR will fail to build until we publish artifact from scala/scala-asm. Includes a workaround for scala/bug#10418 Move to the standard way of defining a custom asm.Attribute It seems we don't need CustomAttr in our fork of scala-asm, we can just override Attribute.write. Customise label handling without needing to modify ASM directly Comment on our customizations to asm.tree.*Node --- src/compiler/scala/tools/asm/LabelAccess.java | 18 ------ .../tools/nsc/backend/jvm/AsmUtils.scala | 2 +- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 15 +++-- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 2 +- .../scala/tools/nsc/backend/jvm/BTypes.scala | 2 + .../tools/nsc/backend/jvm/ClassNode1.java | 31 ++++++++++ .../tools/nsc/backend/jvm/LabelNode1.java | 23 +++++++ .../tools/nsc/backend/jvm/MethodNode1.java | 39 ++++++++++++ .../backend/jvm/analysis/BackendUtils.scala | 19 ++++-- .../jvm/analysis/ProdConsAnalyzerImpl.scala | 19 +++--- .../jvm/analysis/TypeFlowInterpreter.scala | 2 +- .../backend/jvm/opt/ByteCodeRepository.scala | 2 +- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 2 +- .../tools/partest/nest/StreamCapture.scala | 61 +++++++++++++++++++ .../nsc/backend/jvm/ProdConsBenchmark.scala | 43 +++++++++++++ .../nsc/backend/jvm/opt/InlinerTest.scala | 2 +- .../scala/tools/testing/BytecodeTesting.scala | 4 +- versions.properties | 2 +- 18 files changed, 243 insertions(+), 45 deletions(-) delete mode 100644 src/compiler/scala/tools/asm/LabelAccess.java create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java create mode 100644 src/partest/scala/tools/partest/nest/StreamCapture.scala create mode 100644 test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala diff --git a/src/compiler/scala/tools/asm/LabelAccess.java b/src/compiler/scala/tools/asm/LabelAccess.java deleted file mode 100644 index 29ed302b4f7f..000000000000 --- a/src/compiler/scala/tools/asm/LabelAccess.java +++ /dev/null @@ -1,18 +0,0 @@ -package scala.tools.asm; - -/** - * Temporary class to allow access to the package-private status field of class Label. - */ -public class LabelAccess { - public static boolean isLabelFlagSet(Label l, int f) { - return (l.status & f) != 0; - } - - public static void setLabelFlag(Label l, int f) { - l.status |= f; - } - - public static void clearLabelFlag(Label l, int f) { - l.status &= ~f; - } -} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala index e5bac42b66e9..f7b457e3a02f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala @@ -66,7 +66,7 @@ object AsmUtils { } def classFromBytes(bytes: Array[Byte]): ClassNode = { - val node = new ClassNode() + val node = new ClassNode1() new ClassReader(bytes).accept(node, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES) node diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index c526306cecd6..c85155117750 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -12,6 +12,8 @@ import scala.tools.nsc.io.AbstractFile import GenBCode._ import BackendReporting._ import scala.reflect.internal.Flags +import scala.tools.asm.{ByteVector, ClassWriter} +import scala.reflect.internal.Flags import scala.tools.nsc.reporters.NoReporter /* @@ -359,9 +361,14 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * can-multi-thread */ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - val dest = new Array[Byte](len) - System.arraycopy(b, offset, dest, 0, len) - new asm.CustomAttr(name, dest) + new asm.Attribute(name) { + override def write(classWriter: ClassWriter, code: Array[Byte], + codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { + val byteVector = new asm.ByteVector(len) + byteVector.putByteArray(b, offset, len) + byteVector + } + } } /* @@ -957,7 +964,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { assert(moduleClass.companionClass == NoSymbol, moduleClass) val bType = mirrorClassClassBType(moduleClass) - val mirrorClass = new asm.tree.ClassNode + val mirrorClass = new ClassNode1 mirrorClass.visit( backendUtils.classfileVersion.get, bType.info.get.flags, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 65c1dd46f360..c3e9850a1e35 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -90,7 +90,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { isCZRemote = isRemote(claszSymbol) thisBType = classBTypeFromSymbol(claszSymbol) - cnode = new asm.tree.ClassNode() + cnode = new ClassNode1() initJClass(cnode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index a1e7f18006fc..d2d1139a519a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -761,6 +761,8 @@ abstract class BTypes { // finds the first common one. // MOST LIKELY the answer can be found here, see the comments and links by Miguel: // - https://github.com/scala/bug/issues/3872 + // @jz Wouldn't it be better to walk the superclass chain of both types in reverse (starting from Object), and + // finding the last common link? That would be O(N), whereas this looks O(N^2) firstCommonSuffix(this :: this.superClassesTransitive.orThrow, other :: other.superClassesTransitive.orThrow) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java new file mode 100644 index 000000000000..b62374dcc53b --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java @@ -0,0 +1,31 @@ +/* NSC -- new Scala compiler + * Copyright 2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc.backend.jvm; + +import scala.tools.asm.MethodVisitor; +import scala.tools.asm.Opcodes; +import scala.tools.asm.tree.ClassNode; +import scala.tools.asm.tree.MethodNode; + +/** + * A subclass of {@link ClassNode} to customize the representation of + * label nodes with {@link LabelNode1}. + */ +public class ClassNode1 extends ClassNode { + public ClassNode1() { + this(Opcodes.ASM6); + } + + public ClassNode1(int api) { + super(api); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodNode method = new MethodNode1(access, name, descriptor, signature, exceptions); + methods.add(method); + return method; + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java new file mode 100644 index 000000000000..5bb3c5835428 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java @@ -0,0 +1,23 @@ +/* NSC -- new Scala compiler + * Copyright 2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc.backend.jvm; + +import scala.tools.asm.Label; +import scala.tools.asm.tree.ClassNode; +import scala.tools.asm.tree.LabelNode; + +/** + * A subclass of {@link LabelNode} to add user-definable flags. + */ +public class LabelNode1 extends LabelNode { + public LabelNode1() { + } + + public LabelNode1(Label label) { + super(label); + } + + public int flags; +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java new file mode 100644 index 000000000000..9c735acdd65a --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java @@ -0,0 +1,39 @@ +/* NSC -- new Scala compiler + * Copyright 2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc.backend.jvm; + +import scala.tools.asm.Label; +import scala.tools.asm.Opcodes; +import scala.tools.asm.tree.LabelNode; +import scala.tools.asm.tree.MethodNode; +/** + * A subclass of {@link MethodNode} to customize the representation of + * label nodes with {@link LabelNode1}. + */ +public class MethodNode1 extends MethodNode { + public MethodNode1(int api, int access, String name, String descriptor, String signature, String[] exceptions) { + super(api, access, name, descriptor, signature, exceptions); + } + + public MethodNode1(int access, String name, String descriptor, String signature, String[] exceptions) { + this(Opcodes.ASM6, access, name, descriptor, signature, exceptions); + } + + public MethodNode1(int api) { + super(api); + } + + public MethodNode1() { + this(Opcodes.ASM6); + } + + @Override + protected LabelNode getLabelNode(Label label) { + if (!(label.info instanceof LabelNode)) { + label.info = new LabelNode1(label); + } + return (LabelNode) label.info; + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index d4d49b0ca0cf..9ace2e952981 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -13,7 +13,7 @@ import scala.tools.asm import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ import scala.tools.asm.tree.analysis._ -import scala.tools.asm.{Handle, Label, LabelAccess, Type} +import scala.tools.asm.{Handle, Label, Type} import scala.tools.nsc.backend.jvm.BTypes._ import scala.tools.nsc.backend.jvm.GenBCode._ import scala.tools.nsc.backend.jvm.analysis.BackendUtils._ @@ -189,7 +189,7 @@ abstract class BackendUtils extends PerRunInit { val javaLabelMap = labelMap.asJava val result = new InsnList var map = Map.empty[AbstractInsnNode, AbstractInsnNode] - var inlinedTargetHandles = mutable.ListBuffer[Handle]() + val inlinedTargetHandles = mutable.ListBuffer[Handle]() for (ins <- methodNode.instructions.iterator.asScala) { ins match { case callGraph.LambdaMetaFactoryCall(indy, _, _, _) => indy.bsmArgs match { @@ -588,9 +588,18 @@ object BackendUtils { def clearDceDone(method: MethodNode) = method.access &= ~ACC_DCE_DONE private val LABEL_REACHABLE_STATUS = 0x1000000 - def isLabelReachable(label: LabelNode) = LabelAccess.isLabelFlagSet(label.getLabel, LABEL_REACHABLE_STATUS) - def setLabelReachable(label: LabelNode) = LabelAccess.setLabelFlag(label.getLabel, LABEL_REACHABLE_STATUS) - def clearLabelReachable(label: LabelNode) = LabelAccess.clearLabelFlag(label.getLabel, LABEL_REACHABLE_STATUS) + private def isLabelFlagSet(l: LabelNode1, f: Int): Boolean = (l.flags & f) != 0 + + private def setLabelFlag(l: LabelNode1, f: Int): Unit = { + l.flags |= f + } + + private def clearLabelFlag(l: LabelNode1, f: Int): Unit = { + l.flags &= ~f + } + def isLabelReachable(label: LabelNode) = isLabelFlagSet(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) + def setLabelReachable(label: LabelNode) = setLabelFlag(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) + def clearLabelReachable(label: LabelNode) = clearLabelFlag(label.asInstanceOf[LabelNode1], LABEL_REACHABLE_STATUS) abstract class NestedClassesCollector[T] extends GenericSignatureVisitor { val innerClasses = mutable.Set.empty[T] diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala index 7d7aef9bf6e0..98e171cfd168 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala @@ -94,9 +94,9 @@ trait ProdConsAnalyzerImpl { } def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match { - case _: UninitializedLocalProducer => Set.empty - case ParameterProducer(local) => consumersOfValueAt(methodNode.instructions.getFirst, local) - case ExceptionProducer(handlerLabel, handlerFrame) => consumersOfValueAt(handlerLabel, handlerFrame.stackTop) + case _: UninitializedLocalProducer => Set.empty + case ParameterProducer(local) => consumersOfValueAt(methodNode.instructions.getFirst, local) + case ExceptionProducer(handlerLabel, handlerStackTop) => consumersOfValueAt(handlerLabel, handlerStackTop) case _ => _consumersOfOutputsFrom.get(insn).map(v => v.indices.flatMap(v.apply)(collection.breakOut): Set[AbstractInsnNode]).getOrElse(Set.empty) } @@ -388,7 +388,7 @@ trait ProdConsAnalyzerImpl { private def outputValueSlots(insn: AbstractInsnNode): Seq[Int] = insn match { case ParameterProducer(local) => Seq(local) case UninitializedLocalProducer(local) => Seq(local) - case ExceptionProducer(_, frame) => Seq(frame.stackTop) + case ExceptionProducer(_, stackTop) => Seq(stackTop) case _ => if (insn.getOpcode == -1) return Seq.empty if (isStore(insn)) { @@ -453,11 +453,11 @@ abstract class InitialProducer extends AbstractInsnNode(-1) { override def accept(cv: MethodVisitor): Unit = throw new UnsupportedOperationException } -case class ParameterProducer(local: Int) extends InitialProducer -case class UninitializedLocalProducer(local: Int) extends InitialProducer -case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerFrame: Frame[V]) extends InitialProducer +case class ParameterProducer(local: Int) extends InitialProducer +case class UninitializedLocalProducer(local: Int) extends InitialProducer +case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerStackTop: Int) extends InitialProducer -class InitialProducerSourceInterpreter extends SourceInterpreter { +class InitialProducerSourceInterpreter extends SourceInterpreter(scala.tools.asm.Opcodes.ASM7_EXPERIMENTAL) { override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): SourceValue = { new SourceValue(tp.getSize, ParameterProducer(local)) } @@ -467,6 +467,7 @@ class InitialProducerSourceInterpreter extends SourceInterpreter { } override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[_ <: Value], exceptionType: Type): SourceValue = { - new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerFrame)) + val handlerStackTop = handlerFrame.stackTop + 1 // +1 because this value is about to be pushed onto `handlerFrame`. + new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerStackTop)) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala index bcf9978c164a..9bb79eae24dc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala @@ -5,7 +5,7 @@ package analysis import scala.tools.asm.Type import scala.tools.asm.tree.analysis.{BasicValue, BasicInterpreter} -abstract class TypeFlowInterpreter extends BasicInterpreter { +abstract class TypeFlowInterpreter extends BasicInterpreter(scala.tools.asm.Opcodes.ASM7_EXPERIMENTAL) { override def newValue(tp: Type) = { if (tp == null) super.newValue(tp) else if (isRef(tp)) new BasicValue(tp) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 1ac470883917..206b21a961b3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -248,7 +248,7 @@ abstract class ByteCodeRepository extends PerRunInit { private def parseClass(internalName: InternalName): Either[ClassNotFound, ClassNode] = { val fullName = internalName.replace('/', '.') backendClassPath.findClassFile(fullName) map { classFile => - val classNode = new asm.tree.ClassNode() + val classNode = new ClassNode1 val classReader = new asm.ClassReader(classFile.toByteArray) // Passing the InlineInfoAttributePrototype makes the ClassReader invoke the specific `read` diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 5248fb6aae35..788070e79769 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -312,7 +312,7 @@ object BytecodeUtils { */ def newLabelNode: LabelNode = { val label = new Label - val labelNode = new LabelNode(label) + val labelNode = new LabelNode1(label) label.info = labelNode labelNode } diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala new file mode 100644 index 000000000000..b24a4f9c768e --- /dev/null +++ b/src/partest/scala/tools/partest/nest/StreamCapture.scala @@ -0,0 +1,61 @@ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ +package scala.tools.partest +package nest + +import java.io.{Console => _, _} +import java.nio.charset.Charset + +object StreamCapture { + def savingSystem[T](body: => T): T = { + val savedOut = System.out + val savedErr = System.err + try body + finally { + System setErr savedErr + System setOut savedOut + } + } + + def capturingOutErr[A](output: OutputStream)(f: => A): A = { + import java.io._ + val charset = Charset.defaultCharset() + val printStream = new PrintStream(output, true, charset.name()) + savingSystem { + System.setOut(printStream) + System.setErr(printStream) + try { + scala.Console.withErr(printStream) { + scala.Console.withOut(printStream) { + f + } + } + } finally { + printStream.close() + } + } + } + + def withExtraProperties[A](extra: Map[String, String])(action: => A): A = { + val saved = System.getProperties() + val modified = new java.util.Properties() + // on Java 9, we need to cast our way around this: + // src/main/scala/scala/tools/partest/nest/StreamCapture.scala:44: ambiguous reference to overloaded definition, + // both method putAll in class Properties of type (x$1: java.util.Map[_, _])Unit + // and method putAll in class Hashtable of type (x$1: java.util.Map[_ <: Object, _ <: Object])Unit + // match argument types (java.util.Properties) + (modified: java.util.Hashtable[AnyRef, AnyRef]).putAll(saved) + extra.foreach { case (k, v) => modified.setProperty(k, v) } + // Trying to avoid other threads seeing the new properties object prior to the new entries + // https://github.com/scala/scala/pull/6391#issuecomment-371346171 + UnsafeAccess.U.storeFence() + System.setProperties(modified) + try { + action + } finally { + System.setProperties(saved) + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala new file mode 100644 index 000000000000..761b1168576e --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala @@ -0,0 +1,43 @@ +package scala.tools.nsc +package backend.jvm + +import java.util.concurrent.TimeUnit + +import scala.tools.asm.tree.ClassNode +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.tools.asm.tree.ClassNode + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ProdConsBenchmark { + type G <: Global + var global: G = _ + private var classNode: ClassNode = _ + + @Setup(Level.Trial) def setup(): Unit = { + val settings = new Settings() + settings.usejavacp.value = true + val global = new Global(settings) + import global._ + this.global = global.asInstanceOf[G] + classNode = AsmUtils.readClass(global.classPath.findClassFile("scala.tools.nsc.typechecker.Implicits$ImplicitSearch").get.toByteArray) + } + + @Benchmark + def prodCons(bh: Blackhole): Unit = { + val global: G = this.global + import global.genBCode.postProcessor.backendUtils._ + for (m <- classNode.methods.iterator().asScala) { + bh.consume(new ProdConsAnalyzer(m, classNode.name)) + } + } +} + diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index d430cba1b29e..61fecada673e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1720,7 +1720,7 @@ class InlinerTest extends BytecodeTesting { """.stripMargin val warn = """T::m()I is annotated @inline but could not be inlined: - |The callee T::m()I contains the instruction INVOKESPECIAL T.impl$1 ()I + |The callee T::m()I contains the instruction INVOKESPECIAL T.impl$1 ()I (itf) |that would cause an IllegalAccessError when inlined into class C.""".stripMargin val List(a, c, t) = compileClasses(code, allowMessage = _.msg contains warn) assertInvoke(getMethod(c, "t"), "T", "m$") diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala index def87db4713b..0ced131d29ed 100644 --- a/test/junit/scala/tools/testing/BytecodeTesting.scala +++ b/test/junit/scala/tools/testing/BytecodeTesting.scala @@ -11,7 +11,7 @@ import scala.reflect.io.VirtualDirectory import scala.tools.asm.Opcodes import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, MethodNode} import scala.tools.cmd.CommandLineParser -import scala.tools.nsc.backend.jvm.AsmUtils +import scala.tools.nsc.backend.jvm.{AsmUtils, MethodNode1} import scala.tools.nsc.backend.jvm.AsmUtils._ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import scala.tools.nsc.io.AbstractFile @@ -142,7 +142,7 @@ object BytecodeTesting { throwsExceptions: Array[String] = null, handlers: List[ExceptionHandler] = Nil, localVars: List[LocalVariable] = Nil)(body: Instruction*): MethodNode = { - val node = new MethodNode(flags, name, descriptor, genericSignature, throwsExceptions) + val node = new MethodNode1(flags, name, descriptor, genericSignature, throwsExceptions) applyToMethod(node, Method(body.toList, handlers, localVars)) node } diff --git a/versions.properties b/versions.properties index ed01a92413cf..72fd78bfc3d5 100644 --- a/versions.properties +++ b/versions.properties @@ -23,5 +23,5 @@ scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.3 partest.version.number=1.1.7 -scala-asm.version=6.0.0-scala-1 +scala-asm.version=6.2.0-scala-2 jline.version=2.14.6 From 53df273c6f952e260b8000981ecb92f7a9c6f294 Mon Sep 17 00:00:00 2001 From: Ismael Juma Date: Sat, 18 Aug 2018 09:44:09 -0700 Subject: [PATCH 1462/2793] Detect CallerSensitive in Java 9+ The annotation's package was changed in Java 9. This fix is already in the 2.13.x branch: https://github.com/scala/scala/pull/6889/files#diff-79df42960bfb7be4f216dd68c8d73e60R124 I am hoping that this will fix the following error when using the `-release` option: Class sun.reflect.CallerSensitive not found - continuing with a stub --- .../scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 5248fb6aae35..5ec695e080a4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -120,7 +120,12 @@ object BytecodeUtils { def isNativeMethod(methodNode: MethodNode): Boolean = (methodNode.access & ACC_NATIVE) != 0 - def hasCallerSensitiveAnnotation(methodNode: MethodNode): Boolean = methodNode.visibleAnnotations != null && methodNode.visibleAnnotations.asScala.exists(_.desc == "Lsun/reflect/CallerSensitive;") + // cross-jdk + def hasCallerSensitiveAnnotation(methodNode: MethodNode): Boolean = + methodNode.visibleAnnotations != null && + methodNode.visibleAnnotations.stream.filter(ann => + ann.desc == "Lsun/reflect/CallerSensitive;" || ann.desc == "Ljdk/internal/reflect/CallerSensitive;" + ).findFirst.isPresent def isFinalClass(classNode: ClassNode): Boolean = (classNode.access & ACC_FINAL) != 0 From 0763168ca6501cf28e826e6a46688eb5b4bbb6d3 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 10 Aug 2018 22:32:24 -0400 Subject: [PATCH 1463/2793] [nomerge] Emit bridge method forwarders with BRIDGE flag Fixes scala/bug#11061 Ref scala/bug#10812 On 2.13.x branch #6531 removed the mirror class forwarders for bridge methods. I would like to do same in 2.12.x since Java 11-ea started to find them ambiguous as seen in akka/akka#25449 / scala/bug#11061. To keep binary compatibility, I am still emitting the forwarders for bridge methods, but with `ACC_BRIDGE` flag. --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 22 +++++++++--- .../tools/nsc/backend/jvm/BytecodeTest.scala | 36 +++++++++++++++++++ 2 files changed, 54 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index c526306cecd6..83e8181805b0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -790,7 +790,12 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * * must-single-thread */ - private def addForwarder(isRemoteClass: Boolean, jclass: asm.ClassVisitor, moduleClass: Symbol, m: Symbol): Unit = { + private def addForwarder( + isRemoteClass: Boolean, + isBridge: Boolean, + jclass: asm.ClassVisitor, + moduleClass: Symbol, + m: Symbol): Unit = { def staticForwarderGenericSignature: String = { // scala/bug#3452 Static forwarder generation uses the same erased signature as the method if forwards to. // By rights, it should use the signature as-seen-from the module class, and add suitable @@ -814,8 +819,8 @@ abstract class BCodeHelpers extends BCodeIdiomatic { * and we don't know what classes might be subclassing the companion class. See scala/bug#4827. */ // TODO: evaluate the other flags we might be dropping on the floor here. - // TODO: ACC_SYNTHETIC ? val flags = GenBCode.PublicStatic | + (if (isBridge) asm.Opcodes.ACC_BRIDGE else 0) | (if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0) | (if (m.isDeprecated) asm.Opcodes.ACC_DEPRECATED else 0) @@ -885,7 +890,11 @@ abstract class BCodeHelpers extends BCodeIdiomatic { log(s"No forwarder for non-public member $m") else { log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") - addForwarder(isRemoteClass, jclass, moduleClass, m) + addForwarder(isRemoteClass, + isBridge = m.isBridge, + jclass, + moduleClass, + m) } } } @@ -1161,7 +1170,12 @@ object BCodeHelpers { val ExcludedForwarderFlags = { import scala.tools.nsc.symtab.Flags._ // Should include DEFERRED but this breaks findMember. - SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | BridgeAndPrivateFlags | MACRO + // Note that BRIDGE is *not* excluded. Trying to exclude bridges by flag doesn't work, findMembers + // will then include the member from the parent (which the bridge overrides / implements). + // This caused scala/bug#11061 and scala/bug#10812. In 2.13, they are fixed by not emitting + // forwarders for bridges. But in 2.12 that's not binary compatible, so instead we continue to + // emit forwarders for bridges, but mark them with ACC_BRIDGE. + SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | PRIVATE | MACRO } /** diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 3147bc90d14a..dd433db1dc7a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -15,6 +15,42 @@ import scala.collection.JavaConverters._ class BytecodeTest extends BytecodeTesting { import compiler._ + @Test + def bridgeFlag(): Unit = { + val code = + """ A { def f: Object = null } + |object B extends A { override def f: String = "b" } + """.stripMargin + for (base <- List("trait", "class")) { + val List(a, bMirror, bModule) = compileClasses(base + code) + assertEquals("B", bMirror.name) + assertEquals(List("f()Ljava/lang/Object;0x49", "f()Ljava/lang/String;0x9"), + bMirror.methods.asScala + .filter(_.name == "f") + .map(m => m.name + m.desc + "0x" + Integer.toHexString(m.access)).toList.sorted) + } + } + + @Test + def varArg(): Unit = { + val code = + """ A { @annotation.varargs def f(i: Int*): Object = null } + |object B extends A { @annotation.varargs override def f(i: Int*): String = "b" } + """.stripMargin + for (base <- List("trait", "class")) { + val List(a, bMirror, bModule) = compileClasses(base + code) + assertEquals("B", bMirror.name) + assertEquals(List( + "f(Lscala/collection/Seq;)Ljava/lang/Object;0x49", + "f(Lscala/collection/Seq;)Ljava/lang/String;0x9", + "f([I)Ljava/lang/Object;0xc9", + "f([I)Ljava/lang/String;0x89"), + bMirror.methods.asScala + .filter(_.name == "f") + .map(m => m.name + m.desc + "0x" + Integer.toHexString(m.access)).toList.sorted) + } + } + @Test def t6288bJumpPosition(): Unit = { val code = From 3065bf6b833991f966736482527f836340f104c5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 24 Jul 2018 10:00:20 +1000 Subject: [PATCH 1464/2793] Avoid needless storage/lookup for constants in Scope --- src/reflect/scala/reflect/internal/Scopes.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 8aa9a6d41e75..1ae62452f956 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -82,12 +82,12 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** size and mask of hash tables * todo: make hashtables grow? */ - private val HASHSIZE = 0x80 - private val HASHMASK = 0x7f + private final val HASHSIZE = 0x80 + private final val HASHMASK = 0x7f /** the threshold number of entries from which a hashtable is constructed. */ - private val MIN_HASH = 8 + private final val MIN_HASH = 8 /** Returns a new scope with the same content as this one. */ def cloneScope: Scope = newScopeWith(this.toList: _*) From d8fc8606134196b65fcdd2bb21370f0a7be0fafc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 30 Jul 2018 10:42:42 +1000 Subject: [PATCH 1465/2793] Speedup some symbol lookups in Definitions --- .../scala/reflect/internal/Definitions.scala | 32 ++++++++++++++++--- .../reflect/runtime/JavaUniverseForce.scala | 2 ++ 2 files changed, 29 insertions(+), 5 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 69370475a176..59ba8dc860e1 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -108,7 +108,7 @@ trait Definitions extends api.StandardDefinitions { ) /** Is symbol a numeric value class? */ - def isNumericValueClass(sym: Symbol) = ScalaNumericValueClasses contains sym + def isNumericValueClass(sym: Symbol) = ScalaNumericValueClassesSet contains sym def isGetClass(sym: Symbol) = ( sym.name == nme.getClass_ // this condition is for performance only, this is called from `Typer#stabilize`. @@ -151,6 +151,26 @@ trait Definitions extends api.StandardDefinitions { FloatClass, DoubleClass ) + lazy val ScalaValueClassesSet: SymbolSet = new SymbolSet(ScalaValueClasses) + lazy val ScalaNumericValueClassesSet: SymbolSet = new SymbolSet(ScalaNumericValueClasses) + final class SymbolSet(syms: List[Symbol]) { + private[this] val ids: Array[Symbol] = syms.toArray + private[this] val commonOwner = syms.map(_.rawowner).distinct match { + case common :: Nil => common + case _ => null + } + final def contains(sym: Symbol): Boolean = { + if (commonOwner != null && (commonOwner ne sym.rawowner)) + return false + val array = ids + var i = 0 + while (i < array.length) { + if (array(i) eq sym) return true + i += 1 + } + false + } + } def ScalaPrimitiveValueClasses: List[ClassSymbol] = ScalaValueClasses def underlyingOfValueClass(clazz: Symbol): Type = @@ -566,6 +586,8 @@ trait Definitions extends api.StandardDefinitions { private val offset = countFrom - init.size private def isDefinedAt(i: Int) = i < seq.length + offset && i >= offset val seq: IndexedSeq[ClassSymbol] = (init ++: countFrom.to(maxArity).map { i => getRequiredClass("scala." + name + i) }).toVector + private val symSet = new SymbolSet(seq.toList) + def contains(sym: Symbol): Boolean = symSet.contains(sym) def apply(i: Int) = if (isDefinedAt(i)) seq(i - offset) else NoSymbol def specificType(args: List[Type], others: Type*): Type = { val arity = args.length @@ -604,9 +626,9 @@ trait Definitions extends api.StandardDefinitions { else nme.genericWrapArray } - def isTupleSymbol(sym: Symbol) = TupleClass.seq contains unspecializedSymbol(sym) - def isFunctionSymbol(sym: Symbol) = FunctionClass.seq contains unspecializedSymbol(sym) - def isProductNSymbol(sym: Symbol) = ProductClass.seq contains unspecializedSymbol(sym) + def isTupleSymbol(sym: Symbol) = TupleClass contains unspecializedSymbol(sym) + def isFunctionSymbol(sym: Symbol) = FunctionClass contains unspecializedSymbol(sym) + def isProductNSymbol(sym: Symbol) = ProductClass contains unspecializedSymbol(sym) def unspecializedSymbol(sym: Symbol): Symbol = { if (sym hasFlag SPECIALIZED) { @@ -1376,7 +1398,7 @@ trait Definitions extends api.StandardDefinitions { private lazy val boxedValueClassesSet = boxedClass.values.toSet[Symbol] + BoxedUnitClass /** Is symbol a value class? */ - def isPrimitiveValueClass(sym: Symbol) = ScalaValueClasses contains sym + def isPrimitiveValueClass(sym: Symbol) = ScalaValueClassesSet contains sym def isPrimitiveValueType(tp: Type) = isPrimitiveValueClass(tp.typeSymbol) /** Is symbol a boxed value class, e.g. java.lang.Integer? */ diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index c2751fea80ab..ef081c8055fd 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -468,6 +468,8 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.ScalaNumericValueClasses definitions.ScalaValueClassesNoUnit definitions.ScalaValueClasses + definitions.ScalaValueClassesSet + definitions.ScalaNumericValueClassesSet uncurry.VarargsSymbolAttachment uncurry.DesugaredParameterType From fe7d11567b318d99181e6bf14dbac2870d385002 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 30 Jul 2018 10:44:00 +1000 Subject: [PATCH 1466/2793] Avoid some virtual call overhead to get to Symbol.equals --- src/reflect/scala/reflect/internal/Symbols.scala | 2 +- .../scala/reflect/internal/transform/Erasure.scala | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c5cee9c72398..2f43a550ac19 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -828,7 +828,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME) final def isDelambdafyTarget = isArtifact && isMethod && hasAttachment[DelambdafyTarget.type] final def isDefinedInPackage = effectiveOwner.isPackageClass - final def needsFlatClasses = phase.flatClasses && rawowner != NoSymbol && !rawowner.isPackageClass + final def needsFlatClasses = phase.flatClasses && (rawowner ne NoSymbol) && !rawowner.isPackageClass // TODO introduce a flag for these? final def isPatternTypeVariable: Boolean = diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index fff3ef59ae92..aab6d72e7493 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -121,12 +121,12 @@ trait Erasure { case st: SubType => apply(st.supertype) case tref @ TypeRef(pre, sym, args) => - if (sym == ArrayClass) + if (sym eq ArrayClass) if (unboundedGenericArrayLevel(tp) == 1) ObjectTpe else if (args.head.typeSymbol.isBottomClass) arrayType(ObjectTpe) else typeRef(apply(pre), sym, args map applyInArray) - else if (sym == AnyClass || sym == AnyValClass || sym == SingletonClass) ObjectTpe - else if (sym == UnitClass) BoxedUnitTpe + else if ((sym eq AnyClass) || (sym eq AnyValClass) || (sym eq SingletonClass)) ObjectTpe + else if (sym eq UnitClass) BoxedUnitTpe else if (sym.isRefinementClass) apply(mergeParents(tp.parents)) else if (sym.isDerivedValueClass) eraseDerivedValueClassRef(tref) else if (sym.isClass) eraseNormalClassRef(tref) @@ -148,15 +148,15 @@ trait Erasure { apply(atp) case ClassInfoType(parents, decls, clazz) => val newParents = - if (parents.isEmpty || clazz == ObjectClass || isPrimitiveValueClass(clazz)) Nil - else if (clazz == ArrayClass) ObjectTpe :: Nil + if (parents.isEmpty || (clazz eq ObjectClass) || isPrimitiveValueClass(clazz)) Nil + else if (clazz eq ArrayClass) ObjectTpe :: Nil else { val erasedParents = parents mapConserve this // drop first parent for traits -- it has been normalized to a class by now, // but we should drop that in bytecode if (clazz.hasFlag(Flags.TRAIT) && !clazz.hasFlag(Flags.JAVA)) - ObjectTpe :: erasedParents.tail.filter(_.typeSymbol != ObjectClass) + ObjectTpe :: erasedParents.tail.filter(_.typeSymbol ne ObjectClass) else erasedParents } if (newParents eq parents) tp From 3554a09b15b2b36b0435e6b5f3b94f1d0023dd28 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 20 Aug 2018 12:19:28 +1000 Subject: [PATCH 1467/2793] Optimize MethodType.resultType InstantiateDependentMethodType typically doesn't encounter singleton types, even after the initial fast path for trivial MethodTypes in resultType This commit defers some collection copying until the first time it is needed, and also switches to using an Array rather than a Vector. --- .../scala/reflect/internal/tpe/TypeMaps.scala | 26 ++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index bf0220e168e3..e378ffb41c60 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -924,9 +924,29 @@ private[internal] trait TypeMaps { /** Note: This map is needed even for non-dependent method types, despite what the name might imply. */ class InstantiateDependentMap(params: List[Symbol], actuals0: List[Type]) extends TypeMap with KeepOnlyTypeConstraints { - private val actuals = actuals0.toIndexedSeq - private val existentials = new Array[Symbol](actuals.size) - def existentialsNeeded: List[Symbol] = existentials.iterator.filter(_ ne null).toList + private[this] var _actuals: Array[Type] = _ + private[this] var _existentials: Array[Symbol] = _ + private def actuals: Array[Type] = { + if (_actuals eq null) { + // OPT: hand rolled actuals0.toArray to avoid intermediate object creation. + val temp = new Array[Type](actuals0.size) + var i = 0 + var l = actuals0 + while (i < temp.length) { + temp(i) = l.head + l = l.tail // will not generated a NoSuchElementException because temp.size == actuals0.size + i += 1 + } + _actuals = temp + } + _actuals + } + private def existentials: Array[Symbol] = { + if (_existentials eq null) _existentials = new Array[Symbol](actuals.length) + _existentials + } + + def existentialsNeeded: List[Symbol] = if (_existentials eq null) Nil else existentials.iterator.filter(_ ne null).toList private object StableArgTp { // type of actual arg corresponding to param -- if the type is stable From 25be835eebbc39e38f600475a24ae76893d9bbe1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 20 Aug 2018 15:44:59 +1000 Subject: [PATCH 1468/2793] Optimize caseFieldAccessors to avoid temporary object creation --- src/reflect/scala/reflect/internal/Symbols.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index c5cee9c72398..cfa0a73bd014 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2080,9 +2080,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => // The slightly more principled approach of using the paramss of the // primary constructor leads to cycles in, for example, pos/t5084.scala. val primaryNames = constrParamAccessors map (_.name.dropLocal) + def nameStartsWithOrigDollar(name: Name, prefix: Name) = + name.startsWith(prefix) && name.length > prefix.length + 1 && name.charAt(prefix.length) == '$' caseFieldAccessorsUnsorted.sortBy { acc => primaryNames indexWhere { orig => - (acc.name == orig) || (acc.name startsWith (orig append "$")) + (acc.name == orig) || nameStartsWithOrigDollar(acc.name, orig) } } } From a2ffb7e113bfcd9e83e9a78052b5e14be8d701d2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 20 Aug 2018 15:04:31 +1000 Subject: [PATCH 1469/2793] Optimize Constant.{equals,hashCode} - Avoid boxing of the raw bits of double/floats before using them in equals/hashcode - Avoid cooperative equality by directly calling .equals / .hashCode for other values. --- .../scala/reflect/internal/Constants.scala | 48 ++++++++++++------- 1 file changed, 30 insertions(+), 18 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala index bb497956a8fb..89ee962d452d 100644 --- a/src/reflect/scala/reflect/internal/Constants.scala +++ b/src/reflect/scala/reflect/internal/Constants.scala @@ -82,7 +82,28 @@ trait Constants extends api.Constants { // !!! In what circumstance could `equalHashValue == that.equalHashValue && tag != that.tag` be true? override def equals(other: Any): Boolean = other match { case that: Constant => - this.tag == that.tag && equalHashValue == that.equalHashValue + this.tag == that.tag && { + // + // Consider two `NaN`s to be identical, despite non-equality + // Consider -0d to be distinct from 0d, despite equality + // + // We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) + // to avoid treating different encodings of `NaN` as the same constant. + // You probably can't express different `NaN` varieties as compile time + // constants in regular Scala code, but it is conceivable that you could + // conjure them with a macro. + // + this.tag match { + case NullTag => + true + case FloatTag => + floatToRawIntBits(value.asInstanceOf[Float]) == floatToRawIntBits(that.value.asInstanceOf[Float]) + case DoubleTag => + doubleToRawLongBits(value.asInstanceOf[Double]) == doubleToRawLongBits(that.value.asInstanceOf[Double]) + case _ => + this.value.equals(that.value) + } + } case _ => false } @@ -242,28 +263,19 @@ trait Constants extends api.Constants { def typeValue: Type = value.asInstanceOf[Type] def symbolValue: Symbol = value.asInstanceOf[Symbol] - /** - * Consider two `NaN`s to be identical, despite non-equality - * Consider -0d to be distinct from 0d, despite equality - * - * We use the raw versions (i.e. `floatToRawIntBits` rather than `floatToIntBits`) - * to avoid treating different encodings of `NaN` as the same constant. - * You probably can't express different `NaN` varieties as compile time - * constants in regular Scala code, but it is conceivable that you could - * conjure them with a macro. - */ - private def equalHashValue: Any = value match { - case f: Float => floatToRawIntBits(f) - case d: Double => doubleToRawLongBits(d) - case v => v - } - override def hashCode: Int = { import scala.util.hashing.MurmurHash3._ val seed = 17 var h = seed h = mix(h, tag.##) // include tag in the hash, otherwise 0, 0d, 0L, 0f collide. - h = mix(h, equalHashValue.##) + val valueHash = tag match { + case NullTag => 0 + // We could just use value.hashCode here, at the cost of a collition between different NaNs + case FloatTag => java.lang.Integer.hashCode(floatToRawIntBits(value.asInstanceOf[Float])) + case DoubleTag => java.lang.Long.hashCode(doubleToRawLongBits(value.asInstanceOf[Double])) + case _ => value.hashCode() + } + h = mix(h, valueHash) finalizeHash(h, length = 2) } } From fc47c26d64012d357564326e173acc71299a5a86 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 6 Aug 2018 14:44:36 +1000 Subject: [PATCH 1470/2793] Avoid a virtual call for Phase.erasedTypes I can't quite untangle the history to know why things are setup this way (maybe it goes back to the MSIL backend?). But with this small refactor we can avoid the virtual call overhead. --- src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala | 2 +- src/reflect/scala/reflect/internal/Phase.scala | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 3d826901d807..256090d77caa 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -62,7 +62,7 @@ abstract class GenBCode extends SubComponent { class BCodePhase(prev: Phase) extends StdPhase(prev) { override def description = "Generate bytecode from ASTs using the ASM library" - override val erasedTypes = true + erasedTypes = true def apply(unit: CompilationUnit): Unit = codeGen.genUnit(unit) diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala index eb193adbf2b9..aa3ce8387247 100644 --- a/src/reflect/scala/reflect/internal/Phase.scala +++ b/src/reflect/scala/reflect/internal/Phase.scala @@ -41,8 +41,9 @@ abstract class Phase(val prev: Phase) { def checkable: Boolean = true // NOTE: sbt injects its own phases which extend this class, and not GlobalPhase, so we must implement this logic here - private val _erasedTypes = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "erasure" || prev.erasedTypes) - def erasedTypes: Boolean = _erasedTypes // overridden in back-end + private var _erasedTypes = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "erasure" || prev.erasedTypes) + protected def erasedTypes_=(value: Boolean): Unit = {_erasedTypes = value} + final def erasedTypes: Boolean = _erasedTypes // overridden in back-end final val flatClasses: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "flatten" || prev.flatClasses) final val specialized: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "specialize" || prev.specialized) final val refChecked: Boolean = ((prev ne null) && (prev ne NoPhase)) && (prev.name == "refchecks" || prev.refChecked) From 4b680b398af94202d6a066f365a1a0b89187a4be Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 10:46:20 +1000 Subject: [PATCH 1471/2793] Move Shadower to outer level I want to get rid of this altogeher, but in the meantime we can be more efficnet and overhead by unnesting NoShadower. --- .../tools/nsc/typechecker/Implicits.scala | 40 +++++++++---------- 1 file changed, 20 insertions(+), 20 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 4c66b77a54dd..94c69543c5f7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -856,26 +856,7 @@ trait Implicits { * enclosing scope, and so on. */ class ImplicitComputation(iss: Infoss, isLocalToCallsite: Boolean) { - abstract class Shadower { - def addInfos(infos: Infos) - def isShadowed(name: Name): Boolean - } - private val shadower: Shadower = { - /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */ - final class LocalShadower extends Shadower { - val shadowed = util.HashSet[Name](512) - def addInfos(infos: Infos) { - infos.foreach(i => shadowed.addEntry(i.name)) - } - def isShadowed(name: Name) = shadowed(name) - } - /** Used for the implicits of expected type, when no shadowing checks are needed. */ - object NoShadower extends Shadower { - def addInfos(infos: Infos) {} - def isShadowed(name: Name) = false - } - if (isLocalToCallsite) new LocalShadower else NoShadower - } + private val shadower: Shadower = if (isLocalToCallsite) new LocalShadower else NoShadower private var best: SearchResult = SearchFailure @@ -1592,6 +1573,25 @@ trait Implicits { } } } + + private abstract class Shadower { + def addInfos(infos: Infos): Unit + def isShadowed(name: Name): Boolean + } + + /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */ + private final class LocalShadower extends Shadower { + val shadowed = util.HashSet[Name](512) + def addInfos(infos: Infos): Unit = { + infos.foreach(i => shadowed.addEntry(i.name)) + } + def isShadowed(name: Name) = shadowed(name) + } + /** Used for the implicits of expected type, when no shadowing checks are needed. */ + private object NoShadower extends Shadower { + def addInfos(infos: Infos): Unit = {} + def isShadowed(name: Name) = false + } } trait ImplicitsStats { From 63bf292f214c7e69bc3455b9e1a4bad489ea1e40 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 14:39:14 +1000 Subject: [PATCH 1472/2793] Avoid SeqFactory.unapplySeq in hot pattern matches. (cherry picked from commit 85d8ed8408d02c662819a9d58f62beeb1c0768d0) --- .../scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala | 4 ++-- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index b3d97e9afe94..4885083938e9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -629,14 +629,14 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { genInvokeDynamicLambda(attachment) generatedType = methodBTypeFromSymbol(fun.symbol).returnType - case Apply(fun, List(expr)) if currentRun.runDefinitions.isBox(fun.symbol) => + case Apply(fun, expr :: Nil) if currentRun.runDefinitions.isBox(fun.symbol) => val nativeKind = typeToBType(fun.symbol.firstParam.info) genLoad(expr, nativeKind) val MethodNameAndType(mname, methodType) = srBoxesRuntimeBoxToMethods(nativeKind) bc.invokestatic(srBoxesRunTimeRef.internalName, mname, methodType.descriptor, itf = false, app.pos) generatedType = boxResultType(fun.symbol) - case Apply(fun, List(expr)) if currentRun.runDefinitions.isUnbox(fun.symbol) => + case Apply(fun, expr :: Nil) if currentRun.runDefinitions.isUnbox(fun.symbol) => genLoad(expr) val boxType = unboxResultType(fun.symbol) generatedType = boxType diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index dbb0b4b15e3f..81dc15db4c95 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -472,10 +472,10 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { // with just `ArrayValue(...).$asInstanceOf[...]` // // See scala/bug#6611; we must *only* do this for literal vararg arrays. - case Apply(appMeth, List(Apply(wrapRefArrayMeth, List(arg @ StripCast(ArrayValue(_, _)))), _)) + case Apply(appMeth, Apply(wrapRefArrayMeth, (arg @ StripCast(ArrayValue(_, _))) :: Nil) :: _ :: Nil) if wrapRefArrayMeth.symbol == currentRun.runDefinitions.Predef_wrapRefArray && appMeth.symbol == ArrayModule_genericApply => super.transform(arg) - case Apply(appMeth, List(elem0, Apply(wrapArrayMeth, List(rest @ ArrayValue(elemtpt, _))))) + case Apply(appMeth, elem0 :: Apply(wrapArrayMeth, (rest @ ArrayValue(elemtpt, _)) :: Nil) :: Nil) if wrapArrayMeth.symbol == Predef_wrapArray(elemtpt.tpe) && appMeth.symbol == ArrayModule_apply(elemtpt.tpe) => super.transform(treeCopy.ArrayValue(rest, rest.elemtpt, elem0 :: rest.elems)) From 94be00349d57b484f3fc339e64161b867ef45463 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Mar 2017 15:28:25 +1000 Subject: [PATCH 1473/2793] Remove some indirections in Global and Typer This reduces the number of Java stack frames between the bottom of the stack and the start of typer, and, more importantly, at each level of recursion in the AST or in symbol completion. The intent is to make the stacks easier to visually scan in profilers and other tools that display them. I'm not expecting that performance will improve, the JVM probably does a decent jobs and inlining these chunks of the stack. (cherry picked from commit 3d69134a50cc1e010e135377c54a7375db13ddb9) --- src/compiler/scala/tools/nsc/Global.scala | 26 +++++++- .../tools/nsc/typechecker/Analyzer.scala | 8 ++- .../scala/tools/nsc/typechecker/Typers.scala | 64 +++++++++---------- .../nsc/typechecker/TypersTracking.scala | 10 +++ 4 files changed, 67 insertions(+), 41 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 4f0fa16cf52a..ee0b4e75fb1b 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -387,7 +387,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def run() { echoPhaseSummary(this) - currentRun.units foreach applyPhase + val units = currentRun.units + while (units.hasNext) + applyPhase(units.next()) } def apply(unit: CompilationUnit): Unit @@ -400,12 +402,17 @@ class Global(var currentSettings: Settings, reporter0: Reporter) reporter.cancelled || unit.isJava && this.id > maxJavaPhase } - final def withCurrentUnit(unit: CompilationUnit)(task: => Unit) { + private def beforeUnit(unit: CompilationUnit): Unit = { if ((unit ne null) && unit.exists) lastSeenSourceFile = unit.source if (settings.debug && (settings.verbose || currentRun.size < 5)) inform("[running phase " + name + " on " + unit + "]") + } + + @deprecated + final def withCurrentUnit(unit: CompilationUnit)(task: => Unit) { + beforeUnit(unit) if (!cancelled(unit)) { currentRun.informUnitStarting(this, unit) try withCurrentUnitNoLog(unit)(task) @@ -413,6 +420,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } } + @inline final def withCurrentUnitNoLog(unit: CompilationUnit)(task: => Unit) { val unit0 = currentUnit try { @@ -424,7 +432,19 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } } - final def applyPhase(unit: CompilationUnit) = withCurrentUnit(unit)(apply(unit)) + final def applyPhase(unit: CompilationUnit) = { + beforeUnit(unit) + if (!cancelled(unit)) { + currentRun.informUnitStarting(this, unit) + val unit0 = currentUnit + currentRun.currentUnit = unit + try apply(unit) + finally { + currentRun.currentUnit = unit0 + currentRun.advanceUnit() + } + } + } } // phaseName = "parser" diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 5fc17c191477..19eb1fda2b89 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -80,7 +80,8 @@ trait Analyzer extends AnyRef val phaseName = "typer" val runsAfter = List[String]() val runsRightAfter = Some("packageobjects") - def newPhase(_prev: Phase): StdPhase = new StdPhase(_prev) { + def newPhase(prev: Phase): StdPhase = new TyperPhase(prev) + final class TyperPhase(prev: Phase) extends StdPhase(prev) { override def keepsTypeParams = false resetTyper() // the log accumulates entries over time, even though it should not (Adriaan, Martin said so). @@ -90,8 +91,9 @@ trait Analyzer extends AnyRef override def run() { val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.typerNanos) else null global.echoPhaseSummary(this) - for (unit <- currentRun.units) { - applyPhase(unit) + val units = currentRun.units + while (units.hasNext) { + applyPhase(units.next()) undoLog.clear() } // defensive measure in case the bookkeeping in deferred macro expansion is buggy diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 599c003d9da5..821bb5e5c885 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5531,7 +5531,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => abort(s"unexpected tree in pattern mode: ${tree.getClass}\n$tree") } - def typedTypTree(tree: TypTree): Tree = tree match { + @inline def typedTypTree(tree: TypTree): Tree = tree match { case tree: TypeTree => typedTypeTree(tree) case tree: AppliedTypeTree => typedAppliedTypeTree(tree) case tree: TypeBoundsTree => typedTypeBoundsTree(tree) @@ -5543,7 +5543,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => abort(s"unexpected type-representing tree: ${tree.getClass}\n$tree") } - def typedMemberDef(tree: MemberDef): Tree = tree match { + @inline def typedMemberDef(tree: MemberDef): Tree = tree match { case tree: ValDef => typedValDef(tree) case tree: DefDef => defDefTyper(tree).typedDefDef(tree) case tree: ClassDef => newTyper(context.makeNewScope(tree, sym)).typedClassDef(tree) @@ -5577,7 +5577,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // Trees allowed in or out of pattern mode. - def typedInAnyMode(tree: Tree): Tree = tree match { + @inline def typedInAnyMode(tree: Tree): Tree = tree match { case tree: Ident => typedIdentOrWildcard(tree) case tree: Bind => typedBind(tree) case tree: Apply => typedApply(tree) @@ -5603,27 +5603,19 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typed(tree: Tree, mode: Mode, pt: Type): Tree = { lastTreeToTyper = tree - def body = ( - if (printTypings && !phase.erasedTypes && !noPrintTyping(tree)) - typingStack.nextTyped(tree, mode, pt, context)(typedInternal(tree, mode, pt)) - else - typedInternal(tree, mode, pt) - ) val statsEnabled = StatisticsStatics.areSomeHotStatsEnabled() && statistics.areHotStatsLocallyEnabled val startByType = if (statsEnabled) statistics.pushTimer(byTypeStack, byTypeNanos(tree.getClass)) else null if (statsEnabled) statistics.incCounter(visitsByType, tree.getClass) - try body - finally if (statsEnabled) statistics.popTimer(byTypeStack, startByType) - } + val shouldPrintTyping = printTypings && !phase.erasedTypes && !noPrintTyping(tree) + val shouldPopTypingStack = shouldPrintTyping && typingStack.beforeNextTyped(tree, mode, pt, context) + try { - private def typedInternal(tree: Tree, mode: Mode, pt: Type): Tree = { - val ptPlugins = pluginsPt(pt, this, tree, mode) - def retypingOk = ( - context.retyping - && (tree.tpe ne null) - && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins)) - ) - def runTyper(): Tree = { + val ptPlugins = pluginsPt(pt, this, tree, mode) + def retypingOk = ( + context.retyping + && (tree.tpe ne null) + && (tree.tpe.isErroneous || !(tree.tpe <:< ptPlugins)) + ) if (retypingOk) { tree.setType(null) if (tree.hasSymbolField) tree.symbol = NoSymbol @@ -5663,10 +5655,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (mode.inPatternMode && !mode.inPolyMode && result.isType) PatternMustBeValue(result, pt) - result - } + if (shouldPopTypingStack) typingStack.showPop(result) - try runTyper() catch { + result + } catch { case ex: CyclicReference if global.propagateCyclicReferences => throw ex case ex: TypeError => @@ -5677,10 +5669,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper setError(tree) case ex: Exception => // @M causes cyclic reference error - devWarning(s"exception when typing $tree, pt=$ptPlugins") + devWarning(s"exception when typing $tree, pt=$pt") if (context != null && context.unit.exists && tree != null) logError("AT: " + tree.pos, ex) throw ex + } finally { + if (shouldPopTypingStack) typingStack.pop(tree) + if (statsEnabled) statistics.popTimer(byTypeStack, startByType) } } @@ -5692,12 +5687,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper /** Types expression or definition `tree`. */ - def typed(tree: Tree): Tree = { - val ret = typed(tree, context.defaultModeForTyped, WildcardType) - ret - } + @inline final def typed(tree: Tree): Tree = + typed(tree, context.defaultModeForTyped, WildcardType) - def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt) + @inline final def typedByValueExpr(tree: Tree, pt: Type = WildcardType): Tree = typed(tree, EXPRmode | BYVALmode, pt) def typedPos(pos: Position, mode: Mode, pt: Type)(tree: Tree) = typed(atPos(pos)(tree), mode, pt) def typedPos(pos: Position)(tree: Tree) = typed(atPos(pos)(tree)) @@ -5707,28 +5700,28 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper /** Types expression `tree` with given prototype `pt`. */ - def typed(tree: Tree, pt: Type): Tree = + @inline final def typed(tree: Tree, pt: Type): Tree = typed(tree, context.defaultModeForTyped, pt) - def typed(tree: Tree, mode: Mode): Tree = + @inline final def typed(tree: Tree, mode: Mode): Tree = typed(tree, mode, WildcardType) /** Types qualifier `tree` of a select node. * E.g. is tree occurs in a context like `tree.m`. */ - def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree = + @inline final def typedQualifier(tree: Tree, mode: Mode, pt: Type): Tree = typed(tree, PolyQualifierModes | mode.onlyTypePat, pt) // TR: don't set BYVALmode, since qualifier might end up as by-name param to an implicit /** Types qualifier `tree` of a select node. * E.g. is tree occurs in a context like `tree.m`. */ - def typedQualifier(tree: Tree, mode: Mode): Tree = + @inline final def typedQualifier(tree: Tree, mode: Mode): Tree = typedQualifier(tree, mode, WildcardType) - def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType) + @inline final def typedQualifier(tree: Tree): Tree = typedQualifier(tree, NOmode, WildcardType) /** Types function part of an application */ - def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes) + @inline final def typedOperator(tree: Tree): Tree = typed(tree, OperatorModes) // the qualifier type of a supercall constructor is its first parent class private def typedSelectOrSuperQualifier(qual: Tree) = @@ -5845,6 +5838,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => op } + @inline final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index f2911fb98b16..ec889bd8301c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -138,6 +138,16 @@ trait TypersTracking { runWith(tree) { pushFn ; showPop(body) } ) + def beforeNextTyped(tree: Tree, mode: Mode, pt: Type, context: Context): Boolean = if (noPrintTyping(tree)) false else { + push(tree) + showPush(tree, mode, pt, context) + true + } + def afterNextTyped(tree: Tree, typedTree: Tree): Unit = { + showPop(typedTree) + pop(tree) + } + @inline final def printTyping(tree: Tree, s: => String) = { if (printTypings && !noPrintTyping(tree)) show(indented(s)) From 2ff34ca96b40bb1d327c6d077b0c08e65e82003d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 14:27:27 +1000 Subject: [PATCH 1474/2793] Avoid expensive call to `imports` in implicit search (cherry picked from commit 2fae7d814bc9aa77a699c30c5940d16f82e476c8) --- .../scala/tools/nsc/typechecker/Contexts.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a4f191720ad5..b2562eef23ac 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -909,7 +909,7 @@ trait Contexts { self: Analyzer => /** @return None if a cycle is detected, or Some(infos) containing the in-scope implicits at this context */ private def implicits(nextOuter: Context): Option[List[ImplicitInfo]] = { - val imports = this.imports + val firstImport = this.firstImport if (owner != nextOuter.owner && owner.isClass && !owner.isPackageClass && !inSelfSuperCall) { if (!owner.isInitialized) None else savingEnclClass(this) { @@ -922,13 +922,14 @@ trait Contexts { self: Analyzer => debuglog("collect local implicits " + scope.toList)//DEBUG Some(collectImplicits(scope, NoPrefix)) } else if (firstImport != nextOuter.firstImport) { - assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports)) - Some(collectImplicitImports(imports.head)) + if (isDeveloper) + assert(imports.tail.headOption == nextOuter.firstImport, (imports, nextOuter.imports)) + Some(collectImplicitImports(firstImport.get)) } else if (owner.isPackageClass) { // the corresponding package object may contain implicit members. val pre = owner.packageObject.typeOfThis Some(collectImplicits(pre.implicitMembers, pre)) - } else Some(Nil) + } else SomeNil } // @@ -1525,6 +1526,7 @@ trait Contexts { self: Analyzer => type ImportType = global.ImportType val ImportType = global.ImportType + private final val SomeNil = Some(Nil) } object ContextMode { From 100602f039a40ead63d647eadee107028529bb79 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 3 Aug 2018 10:08:46 +1000 Subject: [PATCH 1475/2793] Use eq in lookupEntry (cherry picked from commit ef3d9f384b848e3fab03f54a1233fa3f7b1685be) --- src/reflect/scala/reflect/internal/Scopes.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 8aa9a6d41e75..1a375e01166f 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -311,12 +311,12 @@ trait Scopes extends api.Scopes { self: SymbolTable => var e: ScopeEntry = null if (hashtable ne null) { e = hashtable(name.start & HASHMASK) - while ((e ne null) && e.sym.name != name) { + while ((e ne null) && (e.sym.name ne name)) { e = e.tail } } else { e = elems - while ((e ne null) && e.sym.name != name) { + while ((e ne null) && (e.sym.name ne name)) { e = e.next } } From 51ac7dc53b4799690c81bd6fca71a671fb62c488 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 13:36:01 +1000 Subject: [PATCH 1476/2793] Make Name.start final (cherry picked from commit 31f6afbb32a5e3491e09fafe4b6e35237a7feb4d) --- src/reflect/scala/reflect/internal/Names.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index f22c197cadb0..eaffadb6b96c 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -191,7 +191,7 @@ trait Names extends api.Names { // compile loses track of this fact. /** Index into name table */ - def start: Int = index + final def start: Int = index /** The next name in the same hash bucket. */ def next: Name with ThisNameType From adbb8308dfb9a51eec61d2cc1ce75b14bfb9ed76 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 13:36:24 +1000 Subject: [PATCH 1477/2793] Avoid List.equals in hot path (cherry picked from commit 4da0de224c52692c5e2374732b1ccd6b53c27009) --- src/reflect/scala/reflect/internal/tpe/TypeMaps.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index bf0220e168e3..5d734cfbdc18 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -220,8 +220,8 @@ private[internal] trait TypeMaps { */ protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = { @tailrec def loop(syms: List[Symbol]): Boolean = syms match { - case Nil => true case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs) + case _ => true } loop(origSyms) } From e0edb2a8a35bc093f9681228d04cd7c3b831967f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 13:50:48 +1000 Subject: [PATCH 1478/2793] Avoid double call to `dealias` (cherry picked from commit c602727a6c2b7adac97149f28e6fb9cc5404391a) --- src/reflect/scala/reflect/internal/Definitions.scala | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 69370475a176..409c1c7f7f6c 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -808,14 +808,19 @@ trait Definitions extends api.StandardDefinitions { } } } + def isVolatileTypeRef(tr: TypeRef) = { + val dealised = tr.dealias + if (dealised ne tr) isVolatile(dealised) + else if (tr.sym.isAbstractType) isVolatileAbstractType + else false + } tp match { case ThisType(_) => false case SingleType(_, sym) => isVolatile(tp.underlying) && (sym.hasVolatileType || !sym.isStable) case NullaryMethodType(restpe) => isVolatile(restpe) case PolyType(_, restpe) => isVolatile(restpe) - case TypeRef(_, _, _) if tp ne tp.dealias => isVolatile(tp.dealias) - case TypeRef(_, sym, _) if sym.isAbstractType => isVolatileAbstractType + case tr: TypeRef => isVolatileTypeRef(tr) case RefinedType(_, _) => isVolatileRefinedType case TypeVar(origin, _) => isVolatile(origin) case _: SimpleTypeProxy => isVolatile(tp.underlying) From 749ee88e751f3cbfa6601b75c06021f6b8ec720b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 13:56:55 +1000 Subject: [PATCH 1479/2793] Avoid repeated calls to Symbol.info (cherry picked from commit ab66e2618c4bc6c3fb6c335ff4f63edb6514b3df) --- src/compiler/scala/tools/nsc/transform/Erasure.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 50caff362608..33d869919083 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -824,8 +824,9 @@ abstract class Erasure extends InfoTransform case Ident(_) | Select(_, _) => if (tree1.symbol.isOverloaded) { val first = tree1.symbol.alternatives.head + val firstTpe = first.tpe val sym1 = tree1.symbol.filter { - alt => alt == first || !(first.tpe looselyMatches alt.tpe) + alt => alt == first || !(firstTpe looselyMatches alt.tpe) } if (tree.symbol ne sym1) { tree1 setSymbol sym1 setType sym1.tpe From 4bc8aa43401b54dc16f1219e8bab9c17718a0a51 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 7 Aug 2018 16:03:47 +1000 Subject: [PATCH 1480/2793] Remove hot assertion We want this method to be a tiny as possible so it is JVM inlined into call sites and free of any overhead. (cherry picked from commit 0586e022ccf736ad70c499a0939854f2176fbda4) --- src/reflect/scala/reflect/internal/SymbolTable.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 9c2779f59412..93ff7dcf7d24 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -230,8 +230,6 @@ abstract class SymbolTable extends macros.Universe } final def phase_=(p: Phase) { - //System.out.println("setting phase to " + p) - assert((p ne null) && p != NoPhase, p) ph = p per = period(currentRunId, p.id) } From 49a5ebde961052b81716d243115a91089f2d9811 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 27 Aug 2018 07:22:39 -0400 Subject: [PATCH 1481/2793] Uncurry of Java varargs respects Java's universal trait nescience If `U` is a universal trait (i.e., one extending from `Any` rather than `AnyRef`), Java still sees it as a normal interface, and in a method with a generic varargs parameter ``, interprets that as declaring an array argument `U[]` after erasure. However, due to a pre-value-class implementation of `isUnboundedGeneric`, such a type parameter was considered unbounded, and therefore Scala considered its erasure to be `Object[]` instead. This causes a runtime `NoSuchMethodError`, of course. I moved `isUnboundedGeneric` from `Types` into `UnCurry`, since that's the only place it was used, and its proximity to `isBoundedGeneric`, which is neither defined as nor synonymous with `!isUnboundedGeneric` appeared likely to cause confusion. (_That_ method is only used in `SpecialiseTypes`, but I didn't want to change an unrelated file.) Running into this bug is probably penance for using Hibernate with Scala, but I promise I'm "just following orders". Fixes scala/bug#11109. --- src/reflect/scala/reflect/internal/Types.scala | 8 ++------ .../scala/reflect/internal/transform/UnCurry.scala | 6 ++++++ test/files/run/t11109/JaVarArgs.java | 9 +++++++++ test/files/run/t11109/Test.scala | 7 +++++++ test/files/run/t11109/Universal.scala | 4 ++++ 5 files changed, 28 insertions(+), 6 deletions(-) create mode 100644 test/files/run/t11109/JaVarArgs.java create mode 100644 test/files/run/t11109/Test.scala create mode 100644 test/files/run/t11109/Universal.scala diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 701ae8ac0868..e42cc4b572dc 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1305,8 +1305,8 @@ trait Types case TypeBounds(_, _) => that <:< this case _ => lo <:< that && that <:< hi } - private def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard - private def emptyUpperBound = typeIsAny(hi) || hi.isWildcard + def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard + def emptyUpperBound = typeIsAny(hi) || hi.isWildcard def isEmptyBounds = emptyLowerBound && emptyUpperBound override def safeToString = scalaNotation(_.toString) @@ -4661,10 +4661,6 @@ trait Types try { explainSwitch = true; op } finally { explainSwitch = s } } - def isUnboundedGeneric(tp: Type) = tp match { - case t @ TypeRef(_, sym, _) => sym.isAbstractType && !(t <:< AnyRefTpe) - case _ => false - } def isBoundedGeneric(tp: Type) = tp match { case TypeRef(_, sym, _) if sym.isAbstractType => (tp <:< AnyRefTpe) case TypeRef(_, sym, _) => !isPrimitiveValueClass(sym) diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index 3918723b5cd2..aa0b4d4fc71c 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -56,6 +56,12 @@ trait UnCurry { } object DesugaredParameterType { + def isUnboundedGeneric(tp: Type) = tp match { + case t @ TypeRef(_, sym, _) if sym.isAbstractType => + sym.info.resultType.bounds.emptyUpperBound + case _ => false + } + def unapply(tpe: Type): Option[Type] = tpe match { case TypeRef(pre, ByNameParamClass, arg :: Nil) => Some(functionType(List(), arg)) diff --git a/test/files/run/t11109/JaVarArgs.java b/test/files/run/t11109/JaVarArgs.java new file mode 100644 index 000000000000..cecccf97551f --- /dev/null +++ b/test/files/run/t11109/JaVarArgs.java @@ -0,0 +1,9 @@ +// filter: Note: +package t11109; + +import java.io.*; + +public class JaVarArgs { + public void serialize(T... ts) {} + public void universalize(T... ts) {} +} \ No newline at end of file diff --git a/test/files/run/t11109/Test.scala b/test/files/run/t11109/Test.scala new file mode 100644 index 000000000000..be0ad9acdd35 --- /dev/null +++ b/test/files/run/t11109/Test.scala @@ -0,0 +1,7 @@ +import t11109._ + +object Test extends App { + val jva = new JaVarArgs + jva.serialize("asdf") + jva.universalize(Universal) +} \ No newline at end of file diff --git a/test/files/run/t11109/Universal.scala b/test/files/run/t11109/Universal.scala new file mode 100644 index 000000000000..e551fab8d0d0 --- /dev/null +++ b/test/files/run/t11109/Universal.scala @@ -0,0 +1,4 @@ +package t11109 + +trait Universal extends Any +object Universal extends Universal \ No newline at end of file From 42dac30eb55e72a12b853a777de0b9af75243899 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Sat, 18 Aug 2018 13:43:52 -0700 Subject: [PATCH 1482/2793] [nomerge] fix performance regression in mutable.HashMap#getOrElseUpdate the change in question originated in https://github.com/scala/collection-strawman/pull/484. it was correct at the time because `HashTable#addEntry0` has a threshold check but then when the change was backported to 2.12.x in https://github.com/scala/scala/pull/6828, the `HashTable#addEntry0`call was replaced with a call to `HashMap#addEntry0`, which doesn't check the threshold. so if the table is only ever updated using `getOrElseUpdate`, the table's load factor would just keep climbing, resulting in poor performance this was caught by my Project Euler solutions :-) [nomerge] since the problem is specific to the 2.12 code --- src/library/scala/collection/mutable/HashMap.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index c32e9d2f7d35..396c8b6643f5 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -87,7 +87,7 @@ extends AbstractMap[A, B] // Repeat search // because evaluation of `default` can bring entry with `key` val secondEntry = findEntry(key, newEntryIndex) - if (secondEntry == null) addEntry0(e, newEntryIndex) + if (secondEntry == null) addEntry(e, newEntryIndex) else secondEntry.value = default default } From da62022a0249f992f7697ec2841472e387e09567 Mon Sep 17 00:00:00 2001 From: Martijn Hoekstra Date: Tue, 28 Aug 2018 12:29:47 +0200 Subject: [PATCH 1483/2793] updates documentation of PriotityQueue makes it clear that despite the name Queue, PriorityQueue does not guarantee FIFO ordering --- src/library/scala/collection/mutable/PriorityQueue.scala | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index ce8bb1a3c425..5fe34b753394 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -15,6 +15,12 @@ import generic._ /** This class implements priority queues using a heap. * To prioritize elements of type A there must be an implicit * Ordering[A] available at creation. + * + * If multiple elements have the same priority in the ordering of this + * PriorityQueue, no guarantees are made regarding the order in which elements + * are returned by `dequeue` or `dequeueAll`. In particular, that means this + * class does not guarantee first in first out behaviour that may be + * incorrectly inferred from the Queue part of the name of this class. * * Only the `dequeue` and `dequeueAll` methods will return elements in priority * order (while removing elements from the heap). Standard collection methods From e164092a73835b170a6a706f7180008fb1793eb9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 29 Aug 2018 20:39:24 +1000 Subject: [PATCH 1484/2793] Address binary incompatibilities - Restore default implementation of ClassTag.apply in 2.12.x - Whitelist the change to add a concrete implementation of toList in IndexedSeqOptimized. I tested the latter change for potential breakage with: ``` object Test { def main(args: Array[String]) { def s = new collection.immutable.WrappedString("") s.toList (s: collection.IndexedSeqOptimized[Char, Any]).toList (s: collection.GenTraversableOnce[Char]).toList } } ``` ``` $ qscalac sandbox/test.scala && scala-launch 2.12.0 Test $ scalac-launch 2.12.0 sandbox/test.scala && qscala Test ``` --- src/library/mima-filters/2.12.0.forwards.excludes | 1 + src/library/scala/reflect/ClassTag.scala | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/src/library/mima-filters/2.12.0.forwards.excludes b/src/library/mima-filters/2.12.0.forwards.excludes index 0b4cccf1eebf..d31109c69b7f 100644 --- a/src/library/mima-filters/2.12.0.forwards.excludes +++ b/src/library/mima-filters/2.12.0.forwards.excludes @@ -45,3 +45,4 @@ ProblemFilters.exclude[MissingFieldProblem]("scala.collection.immutable.Map#Map1 ProblemFilters.exclude[MissingFieldProblem]("scala.collection.immutable.Map#Map2.serialVersionUID") ProblemFilters.exclude[MissingFieldProblem]("scala.collection.immutable.Map#Map3.serialVersionUID") ProblemFilters.exclude[MissingFieldProblem]("scala.collection.immutable.Map#Map4.serialVersionUID") +ProblemFilters.exclude[DirectAbstractMethodProblem]("scala.collection.GenTraversableOnce.toList") \ No newline at end of file diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 4cb44a4f4045..4194ae0905a4 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -46,7 +46,20 @@ trait ClassTag[T] extends ClassManifestDeprecatedApis[T] with Equals with Serial def wrap: ClassTag[Array[T]] = ClassTag[Array[T]](arrayClass(runtimeClass)) /** Produces a new array with element type `T` and length `len` */ - override def newArray(len: Int): Array[T] + override def newArray(len: Int): Array[T] = { + runtimeClass match { + case java.lang.Byte.TYPE => new Array[Byte](len).asInstanceOf[Array[T]] + case java.lang.Short.TYPE => new Array[Short](len).asInstanceOf[Array[T]] + case java.lang.Character.TYPE => new Array[Char](len).asInstanceOf[Array[T]] + case java.lang.Integer.TYPE => new Array[Int](len).asInstanceOf[Array[T]] + case java.lang.Long.TYPE => new Array[Long](len).asInstanceOf[Array[T]] + case java.lang.Float.TYPE => new Array[Float](len).asInstanceOf[Array[T]] + case java.lang.Double.TYPE => new Array[Double](len).asInstanceOf[Array[T]] + case java.lang.Boolean.TYPE => new Array[Boolean](len).asInstanceOf[Array[T]] + case java.lang.Void.TYPE => new Array[Unit](len).asInstanceOf[Array[T]] + case _ => java.lang.reflect.Array.newInstance(runtimeClass, len).asInstanceOf[Array[T]] + } + } /** A ClassTag[T] can serve as an extractor that matches only objects of type T. * From aa7578210b74c3b051dbf49a9c3b5c2298d50f56 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 25 Aug 2018 10:44:56 -0400 Subject: [PATCH 1485/2793] Don't re-apply type maps to info of unchanged symbols The fast path in `mapOver` would avoid cloning symbols if the map had no effect on any of their infos. However, if a symbol with a changed info was found, it would use `cloneSymbolsAndModify` to apply itself to a fresh clone of those symbols, with the result that it would re-apply itself to the infos of symbols clone from symbols that were unchanged. This avoids that. In library/reflect/compiler, this avoids 51358 _direct_ repeated calls to `TypeMap#apply`. Since those may go off and do more work, it's more than that. --- .../scala/reflect/internal/tpe/TypeMaps.scala | 35 ++++++++++++------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index bf0220e168e3..ea14d7ba4747 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -208,22 +208,25 @@ private[internal] trait TypeMaps { /** Applies this map to the symbol's info, setting variance = Invariant * if necessary when the symbol is an alias. */ - private def applyToSymbolInfo(sym: Symbol): Type = { + private def applyToSymbolInfo(sym: Symbol, info: Type): Type = { if (trackVariance && !variance.isInvariant && sym.isAliasType) - withVariance(Invariant)(this(sym.info)) + withVariance(Invariant)(this(info)) else - this(sym.info) + this(info) } - /** Called by mapOver to determine whether the original symbols can - * be returned, or whether they must be cloned. + /** The index of the first symbol in `origSyms` which would have its info + * transformed by this type map. */ - protected def noChangeToSymbols(origSyms: List[Symbol]): Boolean = { - @tailrec def loop(syms: List[Symbol]): Boolean = syms match { - case Nil => true - case x :: xs => (x.info eq applyToSymbolInfo(x)) && loop(xs) + protected def firstChangedSymbol(origSyms: List[Symbol]): Int = { + @tailrec def loop(i: Int, syms: List[Symbol]): Int = syms match { + case Nil => -1 + case x :: xs => + val info = x.info + if (applyToSymbolInfo(x, info) eq info) loop(i+1, xs) + else i } - loop(origSyms) + loop(0, origSyms) } /** Map this function over given scope */ @@ -236,10 +239,16 @@ private[internal] trait TypeMaps { /** Map this function over given list of symbols */ def mapOver(origSyms: List[Symbol]): List[Symbol] = { + val firstChange = firstChangedSymbol(origSyms) // fast path in case nothing changes due to map - if (noChangeToSymbols(origSyms)) origSyms - // map is not the identity --> do cloning properly - else cloneSymbolsAndModify(origSyms, TypeMap.this) + if (firstChange < 0) origSyms + else { + // map is not the identity --> do cloning properly + val cloned = cloneSymbols(origSyms) + // but we don't need to run the map again on the unchanged symbols + cloned.drop(firstChange).foreach(_ modifyInfo this) + cloned + } } def mapOver(annot: AnnotationInfo): AnnotationInfo = { From 228d21780d8ca5b5435894b1b07ef75914296074 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 30 Aug 2018 08:27:36 +1000 Subject: [PATCH 1486/2793] Make internal methods in TypeMap more private/final/inlinable --- src/reflect/scala/reflect/internal/tpe/TypeMaps.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index ea14d7ba4747..252f20f296a6 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -189,7 +189,7 @@ private[internal] trait TypeMaps { // throw new Error("mapOver inapplicable for " + tp); } - def withVariance[T](v: Variance)(body: => T): T = { + @inline final def withVariance[T](v: Variance)(body: => T): T = { val saved = variance variance = v try body finally variance = saved @@ -199,7 +199,7 @@ private[internal] trait TypeMaps { try body finally if (trackVariance) variance = variance.flip } - protected def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = ( + protected final def mapOverArgs(args: List[Type], tparams: List[Symbol]): List[Type] = ( if (trackVariance) map2Conserve(args, tparams)((arg, tparam) => withVariance(variance * tparam.variance)(this(arg))) else @@ -218,13 +218,13 @@ private[internal] trait TypeMaps { /** The index of the first symbol in `origSyms` which would have its info * transformed by this type map. */ - protected def firstChangedSymbol(origSyms: List[Symbol]): Int = { + private def firstChangedSymbol(origSyms: List[Symbol]): Int = { @tailrec def loop(i: Int, syms: List[Symbol]): Int = syms match { - case Nil => -1 case x :: xs => val info = x.info if (applyToSymbolInfo(x, info) eq info) loop(i+1, xs) else i + case Nil => -1 } loop(0, origSyms) } From e2f7ddc586aeea75bd6c07688482f51506a8f2f7 Mon Sep 17 00:00:00 2001 From: Darcy Shen Date: Fri, 31 Aug 2018 11:31:32 +0800 Subject: [PATCH 1487/2793] fix for equality of WrappedArray.ofRef --- .../collection/mutable/WrappedArray.scala | 2 +- .../collection/mutable/WrappedArrayTest.scala | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 test/junit/scala/collection/mutable/WrappedArrayTest.scala diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index 5b6ec970b7d2..5adf334553cc 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -130,7 +130,7 @@ object WrappedArray { def update(index: Int, elem: T) { array(index) = elem } override def hashCode = MurmurHash3.wrappedArrayHash(array) override def equals(that: Any) = that match { - case that: ofRef[_] => Arrays.equals(array.asInstanceOf[Array[AnyRef]], that.array.asInstanceOf[Array[AnyRef]]) + case that: ofRef[_] => that.array.canEqual(array) && array.sameElements(that.array) case _ => super.equals(that) } } diff --git a/test/junit/scala/collection/mutable/WrappedArrayTest.scala b/test/junit/scala/collection/mutable/WrappedArrayTest.scala new file mode 100644 index 000000000000..0786b3f1c368 --- /dev/null +++ b/test/junit/scala/collection/mutable/WrappedArrayTest.scala @@ -0,0 +1,19 @@ +package scala.collection.mutable + +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 +import org.junit.Test + +@RunWith(classOf[JUnit4]) +class WrappedArrayTest { + @Test + def ofRefEquality(): Unit = { + def assertOfRef(left: Array[AnyRef], right: Array[AnyRef]): Unit = { + assert(new WrappedArray.ofRef(left) == new WrappedArray.ofRef(right)) + } + assertOfRef(Array(Int.box(65)), Array(Double.box(65.0))) + assertOfRef(Array(Double.box(65.0)), Array(Int.box(65))) + assertOfRef(Array(Int.box(65)), Array(Char.box('A'))) + assertOfRef(Array(Char.box('A')), Array(Int.box(65))) + } +} From 2537c32bc97ad5a13dfb8b063de4857a0f480fb0 Mon Sep 17 00:00:00 2001 From: Darcy Shen Date: Fri, 31 Aug 2018 14:36:52 +0800 Subject: [PATCH 1488/2793] remove the always true canEqual --- src/library/scala/collection/mutable/WrappedArray.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index 5adf334553cc..ad4cab3e7400 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -130,7 +130,7 @@ object WrappedArray { def update(index: Int, elem: T) { array(index) = elem } override def hashCode = MurmurHash3.wrappedArrayHash(array) override def equals(that: Any) = that match { - case that: ofRef[_] => that.array.canEqual(array) && array.sameElements(that.array) + case that: ofRef[_] => array.sameElements(that.array) case _ => super.equals(that) } } From 6ea993a090a34e1c5c53ea38b2c416ae4d1c5d36 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 4 Sep 2018 13:59:44 +1000 Subject: [PATCH 1489/2793] [mergeforward] Avoid leaking constraints between typechecking implicit candidates - Predicate the backport of the optimization to avoid expensive type error creation to -Xsource:2.13, because side effects in explainVariance can be witnessed in type variables in the pt of an in-progress implicit search. - Fix this side-effect leakage with judicious use of undoLog around implicit candidate type checking, again under -Xsource:2.13 for bug compatibility. I haven't managed to get a standalone test case, I've been using Specs2 with: ``` import org.specs2.matcher.{Matcher, ValueCheck, OptionLikeCheckedMatcher, OptionLikeMatcher} trait Disj[A, B] class Repro { def returnValue[T](check: ValueCheck[T]): Any = null implicit def matcherIsValueCheck[T](m: Matcher[T]): ValueCheck[T] = ??? def overloaded[T](t: ValueCheck[T]): LeftDisjunctionCheckedMatcher[T] = null def overloaded[T]: OptionLikeMatcher[({type l[a]= Disj[a, _]})#l, T, T] = null trait LeftDisjunctionCheckedMatcher[T] extends OptionLikeCheckedMatcher[({type l[a]=Disj[a, _]})#l, T, T] returnValue(overloaded(null: Matcher[AnyRef])) } ``` Which fails to compile with the bug fix: ``` /Users/jz/code/specs2/scalaz/shared/src/main/scala/org/specs2/matcher/TaskMatchers.scala:13: error: type mismatch returnValue(overloaded(null: Matcher[AnyRef])) ^ ``` But used to sneak through compilation before: ``` Repro#8130.this.returnValue#15559[Disj#8127[AnyRef#1934, _]](Repro#8130.this.matcherIsValueCheck#15562[Disj#8127[AnyRef#1934, _]](Repro#8130.this.overloaded#15565[AnyRef#1934](Repro#8130.this.matcherIsValueCheck#15562[AnyRef#1934]((null: org#15.specs2#6588.matcher#6594.Matcher#7354[AnyRef#1934]))))) } } warning: there was one feature warning; re-run with -feature for details one warning found ``` --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 5 ++++- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 3 +++ 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7052edf8082a..7aa71cfda051 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -106,7 +106,10 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value) "type mismatch" + if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && settings.isScala213) + // OPT: avoid error string creation for errors that won't see the light of day, but predicate + // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 + "type mismatch" else "type mismatch" + foundReqMsg(found, req) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index de801a3a91aa..858f369eb22a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -969,7 +969,10 @@ trait Implicits { } ) + val mark = undoLog.log val typedFirstPending = typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) + if (typedFirstPending.isFailure && settings.isScala213) + undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note // the first `DivergentImplicitTypeError` that is being propagated From 720b8a6efbced706a9b062f9b952d0866e474ced Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 20 Aug 2018 12:32:58 +1000 Subject: [PATCH 1490/2793] Avoid temporary strings during classpath lookup Index the JAR metadata by dotted package name to avoid converting a dotted name to path on each lookup. (cherry picked from commit 6b3f3203b5696bc3b21df6f26bd1c394e84641d6) --- .../nsc/classpath/ZipArchiveFileLookup.scala | 3 +- src/reflect/scala/reflect/io/ZipArchive.scala | 36 +++++++++++++++---- 2 files changed, 31 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index a433eacaae55..8ef36d1a5576 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -68,8 +68,7 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPa } private def findDirEntry(pkg: String): Option[archive.DirEntry] = { - val dirName = FileUtils.dirPath(pkg) + "/" - archive.allDirs.get(dirName) + archive.allDirsByDottedName.get(pkg) } protected def createFileEntry(file: FileZipArchive#Entry): FileEntryType diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 2ccb765d7899..5362f7adf436 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -62,6 +62,25 @@ object ZipArchive { if (front) path.substring(0, idx + 1) else path.substring(idx + 1) } + def pathToDotted(path: String): String = { + if (path == "/") "" + else { + val slashEnd = path.endsWith("/") + val len = path.length - (if (slashEnd) 1 else 0) + val result = new Array[Char](len) + var i = 0 + while (i < len) { + val char = path.charAt(i) + result(i) = if (char == '/') '.' else char + i += 1 + } + new String(result) + } + } + def dottedToPath(dotted: String): String = { + val sb = new java.lang.StringBuilder(dotted.length) + dotted.replace('.', '/') + "/" + } } import ZipArchive._ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ @@ -101,7 +120,7 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext } } - private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = + private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = { //OPT inlined from getOrElseUpdate; saves ~50K closures on test run. // was: // dirs.getOrElseUpdate(path, { @@ -110,15 +129,17 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext // parent.entries(baseName(path)) = dir // dir // }) - dirs get path match { + val dotted = pathToDotted(path) + dirs get dotted match { case Some(v) => v case None => val parent = ensureDir(dirs, dirName(path), null) - val dir = new DirEntry(path) + val dir = new DirEntry(path) parent.entries(baseName(path)) = dir - dirs(path) = dir + dirs(dotted) = dir dir } + } protected def getDir(dirs: mutable.Map[String, DirEntry], entry: ZipEntry): DirEntry = { if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) @@ -171,9 +192,9 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) } - lazy val (root, allDirs) = { + lazy val (root, allDirsByDottedName) = { val root = new DirEntry("/") - val dirs = mutable.HashMap[String, DirEntry]("/" -> root) + val dirs = mutable.HashMap[String, DirEntry]("" -> root) val zipFile = openZipFile() val enum = zipFile.entries() @@ -206,6 +227,9 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch (root, dirs) } + @deprecated("Use allDirsByDottedName after converting keys from relative paths to dotted names", "2.13") + lazy val allDirs: mutable.HashMap[String, DirEntry] = allDirsByDottedName.map { case (k, v) => (dottedToPath(k), v) } + def iterator: Iterator[Entry] = root.iterator def name = file.getName From 178c8b4ec5f3864580d0307537ce32cc7eea0fcf Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 29 Aug 2018 20:48:22 +1000 Subject: [PATCH 1491/2793] Use dotted keys in dirs map in other use sites of getDir (cherry picked from commit 11230a8f247277e386ff593287a8ca073e76cd48) --- src/reflect/scala/reflect/io/ZipArchive.scala | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index 5362f7adf436..a2b853e2c8f2 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -77,10 +77,6 @@ object ZipArchive { new String(result) } } - def dottedToPath(dotted: String): String = { - val sb = new java.lang.StringBuilder(dotted.length) - dotted.replace('.', '/') + "/" - } } import ZipArchive._ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ @@ -228,7 +224,13 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch } @deprecated("Use allDirsByDottedName after converting keys from relative paths to dotted names", "2.13") - lazy val allDirs: mutable.HashMap[String, DirEntry] = allDirsByDottedName.map { case (k, v) => (dottedToPath(k), v) } + lazy val allDirs: mutable.HashMap[String, DirEntry] = { + def dottedToPath(dotted: String): String = { + val sb = new java.lang.StringBuilder(dotted.length) + dotted.replace('.', '/') + "/" + } + allDirsByDottedName.map { case (k, v) => (dottedToPath(k), v) } + } def iterator: Iterator[Entry] = root.iterator @@ -249,7 +251,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch final class URLZipArchive(val url: URL) extends ZipArchive(null) { def iterator: Iterator[Entry] = { val root = new DirEntry("/") - val dirs = mutable.HashMap[String, DirEntry]("/" -> root) + val dirs = mutable.HashMap[String, DirEntry]("" -> root) val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) @tailrec def loop() { @@ -317,7 +319,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { final class ManifestResources(val url: URL) extends ZipArchive(null) { def iterator = { val root = new DirEntry("/") - val dirs = mutable.HashMap[String, DirEntry]("/" -> root) + val dirs = mutable.HashMap[String, DirEntry]("" -> root) val manifest = new Manifest(input) val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) From 30f599f6067ec31baf7a75f87849419e37f11a18 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 23 Aug 2018 12:46:21 +1000 Subject: [PATCH 1492/2793] Optimize check for enabled statistics by specializing machinery to boolean flags Rather than returning a `ConstantCallSite` of an object that wraps a true/false value, just directly put the boolean in the callsite. AFAICT this eliminates a cast and a pointer dereference, even in C2 optimized code. (cherry picked from commit 12ae86b6a92fdcf09db83de15ac18987c926c43f) --- .../internal/util/AlmostFinalValue.java | 36 +++++++------- .../internal/util/StatisticsStatics.java | 48 +++++++------------ 2 files changed, 36 insertions(+), 48 deletions(-) diff --git a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java index ec4bf28f0b48..6001c6fb73bd 100644 --- a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java +++ b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java @@ -22,46 +22,46 @@ * we cannot do that if we make `Statistics` an object extending `MutableCallSite` * in Scala. We instead rely on the Java implementation that uses a boxed representation. */ -public class AlmostFinalValue { - private final AlmostFinalCallSite callsite = - new AlmostFinalCallSite<>(this); +public class AlmostFinalValue { + private final AlmostFinalCallSite callsite = + new AlmostFinalCallSite(this); - protected V initialValue() { - return null; + protected boolean initialValue() { + return false; } public MethodHandle createGetter() { return callsite.dynamicInvoker(); } - public void setValue(V value) { + public void setValue(boolean value) { callsite.setValue(value); } - private static class AlmostFinalCallSite extends MutableCallSite { - private Object value; + private static class AlmostFinalCallSite extends MutableCallSite { + private Boolean value; private SwitchPoint switchPoint; - private final AlmostFinalValue volatileFinalValue; + private final AlmostFinalValue volatileFinalValue; private final MethodHandle fallback; private final Object lock; - private static final Object NONE = new Object(); + private static final Boolean NONE = null; private static final MethodHandle FALLBACK; static { try { FALLBACK = MethodHandles.lookup().findVirtual(AlmostFinalCallSite.class, "fallback", - MethodType.methodType(Object.class)); + MethodType.methodType(Boolean.TYPE)); } catch (NoSuchMethodException|IllegalAccessException e) { throw new AssertionError(e.getMessage(), e); } } - AlmostFinalCallSite(AlmostFinalValue volatileFinalValue) { - super(MethodType.methodType(Object.class)); + AlmostFinalCallSite(AlmostFinalValue volatileFinalValue) { + super(MethodType.methodType(Boolean.TYPE)); Object lock = new Object(); MethodHandle fallback = FALLBACK.bindTo(this); synchronized(lock) { - value = NONE; + value = null; switchPoint = new SwitchPoint(); setTarget(fallback); } @@ -70,19 +70,19 @@ private static class AlmostFinalCallSite extends MutableCallSite { this.fallback = fallback; } - Object fallback() { + boolean fallback() { synchronized(lock) { - Object value = this.value; + Boolean value = this.value; if (value == NONE) { value = volatileFinalValue.initialValue(); } - MethodHandle target = switchPoint.guardWithTest(MethodHandles.constant(Object.class, value), fallback); + MethodHandle target = switchPoint.guardWithTest(MethodHandles.constant(Boolean.TYPE, value), fallback); setTarget(target); return value; } } - void setValue(V value) { + void setValue(boolean value) { synchronized(lock) { SwitchPoint switchPoint = this.switchPoint; this.value = value; diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index 3670af20588c..77b1a5a0deaa 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -9,59 +9,47 @@ * Its implementation delegates to {@link scala.reflect.internal.util.AlmostFinalValue}, * which helps performance (see docs to find out why). */ -public final class StatisticsStatics extends BooleanContainer { - public StatisticsStatics(boolean value) { - super(value); - } - - private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue() { +public final class StatisticsStatics { + private static final AlmostFinalValue COLD_STATS = new AlmostFinalValue() { @Override - protected BooleanContainer initialValue() { - return new FalseContainer(); + protected boolean initialValue() { + return false; } }; - private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue() { + private static final AlmostFinalValue HOT_STATS = new AlmostFinalValue() { @Override - protected BooleanContainer initialValue() { - return new FalseContainer(); + protected boolean initialValue() { + return false; } }; private static final MethodHandle COLD_STATS_GETTER = COLD_STATS.createGetter(); private static final MethodHandle HOT_STATS_GETTER = HOT_STATS.createGetter(); - public static boolean areSomeColdStatsEnabled() { - try { - return ((BooleanContainer)(Object) COLD_STATS_GETTER.invokeExact()).isEnabledNow(); - } catch (Throwable e) { - throw new AssertionError(e.getMessage(), e); - } + public static boolean areSomeColdStatsEnabled() throws Throwable { + return (boolean) COLD_STATS_GETTER.invokeExact(); } - public static boolean areSomeHotStatsEnabled() { - try { - return ((BooleanContainer)(Object) HOT_STATS_GETTER.invokeExact()).isEnabledNow(); - } catch (Throwable e) { - throw new AssertionError(e.getMessage(), e); - } + public static boolean areSomeHotStatsEnabled() throws Throwable { + return (boolean) HOT_STATS_GETTER.invokeExact(); } - public static void enableColdStats() { + public static void enableColdStats() throws Throwable { if (!areSomeColdStatsEnabled()) - COLD_STATS.setValue(new TrueContainer()); + COLD_STATS.setValue(true); } public static void disableColdStats() { - COLD_STATS.setValue(new FalseContainer()); + COLD_STATS.setValue(false); } - public static void enableHotStats() { + public static void enableHotStats() throws Throwable { if (!areSomeHotStatsEnabled()) - HOT_STATS.setValue(new TrueContainer()); + HOT_STATS.setValue(true); } public static void disableHotStats() { - HOT_STATS.setValue(new FalseContainer()); + HOT_STATS.setValue(false); } -} \ No newline at end of file +} From 089ed4b5bcf71c9a41875d8e037ea7d5db81abf1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 28 Aug 2018 13:27:05 +1000 Subject: [PATCH 1493/2793] Optimize TypeRef.equals with fast paths for eq elements (cherry picked from commit 519a320622c7c255137e5925e17a7715bafb2176) --- src/reflect/scala/reflect/internal/Types.scala | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e42cc4b572dc..47a6cfcf5994 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -7,7 +7,9 @@ package scala package reflect package internal -import scala.collection.{ mutable, immutable } +import java.util.Objects + +import scala.collection.{immutable, mutable} import scala.ref.WeakReference import mutable.ListBuffer import Flags._ @@ -2140,9 +2142,10 @@ trait Types } //OPT specialize equals override final def equals(other: Any): Boolean = { - other match { + if (this eq other.asInstanceOf[AnyRef]) true + else other match { case otherTypeRef: TypeRef => - pre.equals(otherTypeRef.pre) && sym.eq(otherTypeRef.sym) && sameElementsEquals(args, otherTypeRef.args) + Objects.equals(pre, otherTypeRef.pre) && sym.eq(otherTypeRef.sym) && sameElementsEquals(args, otherTypeRef.args) case _ => false } } From 5d34c2dfab4ac086079cc5b312187a81a031b61f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Sep 2018 10:36:29 +1000 Subject: [PATCH 1494/2793] Address binary incompatibility Refactor to make one new method private, and make the other new method package private with a whitelist exclusion on the grounds that scala-reflect and scala-compiler JARs are expected to be identically versioned. --- .../mima-filters/2.12.0.forwards.excludes | 3 +- src/reflect/scala/reflect/io/ZipArchive.scala | 44 +++++++++++-------- 2 files changed, 27 insertions(+), 20 deletions(-) diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index d39f24039a81..ee7ce7fb19e8 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -23,4 +23,5 @@ ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settin ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.this") -ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.getDir") \ No newline at end of file +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.getDir") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.allDirsByDottedName") \ No newline at end of file diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index a2b853e2c8f2..a7f74724491b 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -62,21 +62,6 @@ object ZipArchive { if (front) path.substring(0, idx + 1) else path.substring(idx + 1) } - def pathToDotted(path: String): String = { - if (path == "/") "" - else { - val slashEnd = path.endsWith("/") - val len = path.length - (if (slashEnd) 1 else 0) - val result = new Array[Char](len) - var i = 0 - while (i < len) { - val char = path.charAt(i) - result(i) = if (char == '/') '.' else char - i += 1 - } - new String(result) - } - } } import ZipArchive._ /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ @@ -116,6 +101,22 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext } } + private def pathToDotted(path: String): String = { + if (path == "/") "" + else { + val slashEnd = path.endsWith("/") + val len = path.length - (if (slashEnd) 1 else 0) + val result = new Array[Char](len) + var i = 0 + while (i < len) { + val char = path.charAt(i) + result(i) = if (char == '/') '.' else char + i += 1 + } + new String(result) + } + } + private def ensureDir(dirs: mutable.Map[String, DirEntry], path: String, zipEntry: ZipEntry): DirEntry = { //OPT inlined from getOrElseUpdate; saves ~50K closures on test run. // was: @@ -188,9 +189,14 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch override def sizeOption: Option[Int] = Some(zipEntry.getSize.toInt) } - lazy val (root, allDirsByDottedName) = { + private[scala] def allDirsByDottedName: collection.Map[String, DirEntry] = { + root // force + dirs + } + private[this] val dirs = mutable.HashMap[String, DirEntry]() + lazy val root: DirEntry = { val root = new DirEntry("/") - val dirs = mutable.HashMap[String, DirEntry]("" -> root) + dirs("") = root val zipFile = openZipFile() val enum = zipFile.entries() @@ -220,7 +226,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch } finally { if (ZipArchive.closeZipFile) zipFile.close() } - (root, dirs) + root } @deprecated("Use allDirsByDottedName after converting keys from relative paths to dotted names", "2.13") @@ -229,7 +235,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch val sb = new java.lang.StringBuilder(dotted.length) dotted.replace('.', '/') + "/" } - allDirsByDottedName.map { case (k, v) => (dottedToPath(k), v) } + dirs.map { case (k, v) => (dottedToPath(k), v) } } def iterator: Iterator[Entry] = root.iterator From 8bfc74557d68fd9aa94c807fd635e04189c69159 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 5 Sep 2018 12:15:45 +0200 Subject: [PATCH 1495/2793] Optimize non-sensical comparison check --- .../scala/tools/nsc/typechecker/RefChecks.scala | 12 ++++++++++-- src/reflect/scala/reflect/internal/Symbols.scala | 8 +++----- 2 files changed, 13 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index dd4699cef988..d817e0612996 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1042,6 +1042,14 @@ abstract class RefChecks extends Transform { && !isCaseEquals ) + def isEffectivelyFinalDeep(sym: Symbol): Boolean = ( + sym.isEffectivelyFinal + // If a parent of an intersection is final, the resulting type must effectively be final. + // (Any subclass of the refinement would have to be a subclass of that final parent.) + // OPT: this condition is not included in the standard isEffectivelyFinal check, as it's expensive + || sym.isRefinementClass && sym.info.parents.exists { _.typeSymbol.isEffectivelyFinal } + ) + // Have we already determined that the comparison is non-sensible? I mean, non-sensical? var isNonSensible = false @@ -1091,9 +1099,9 @@ abstract class RefChecks extends Transform { else if (isWarnable && !isCaseEquals) { if (isNew(qual)) // new X == y nonSensiblyNew() - else if (isNew(other) && (receiver.isEffectivelyFinal || isReferenceOp)) // object X ; X == new Y + else if (isNew(other) && (isEffectivelyFinalDeep(receiver) || isReferenceOp)) // object X ; X == new Y nonSensiblyNew() - else if (actual.isEffectivelyFinal && receiver.isEffectivelyFinal && !haveSubclassRelationship) { // object X, Y; X == Y + else if (isEffectivelyFinalDeep(actual) && isEffectivelyFinalDeep(receiver) && !haveSubclassRelationship) { // object X, Y; X == Y if (isEitherNullable) nonSensible("non-null ", false) else diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index a516f49e605d..2817d864a01b 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1015,11 +1015,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => (this hasFlag FINAL | PACKAGE) || isModuleOrModuleClass && (isTopLevel || !settings.overrideObjects) || isTerm && (isPrivate || isLocalToBlock || (hasAllFlags(notPRIVATE | METHOD) && !hasFlag(DEFERRED))) - || isClass && !isRefinementClass && originalOwner.isTerm && children.isEmpty // we track known subclasses of term-owned classes, use that infer finality - // don't look at owner for refinement classes (it's basically arbitrary) -- instead, - // it suffices for one parent of an intersection to be final, for the resulting type to be final - // any subclass of the refinement would have to be a subclass of that final parent, which is not allowed - || isRefinementClass && info.parents.exists { _.typeSymbol.isEffectivelyFinal } + // We track known subclasses of term-owned classes, use that to infer finality. + // However, don't look at owner for refinement classes (it's basically arbitrary). + || isClass && !isRefinementClass && originalOwner.isTerm && children.isEmpty ) /** Is this symbol effectively final or a concrete term member of sealed class whose children do not override it */ final def isEffectivelyFinalOrNotOverridden: Boolean = isEffectivelyFinal || (isTerm && !isDeferred && isNotOverridden) From 23947cbfcab5b12fa36c7cd79bb36411749a6594 Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 20 Jun 2018 23:48:53 +0100 Subject: [PATCH 1496/2793] Optimise equals checking for Vector --- src/library/scala/collection/GenSeqLike.scala | 2 +- .../scala/collection/IterableLike.scala | 32 +++++++++++++++---- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala index 405d8d7e57ea..6828749f4b8c 100644 --- a/src/library/scala/collection/GenSeqLike.scala +++ b/src/library/scala/collection/GenSeqLike.scala @@ -474,7 +474,7 @@ trait GenSeqLike[+A, +Repr] extends Any with GenIterableLike[A, Repr] with Equal * this sequence in the same order, `false` otherwise */ override def equals(that: Any): Boolean = that match { - case that: GenSeq[_] => (that canEqual this) && (this sameElements that) + case that: GenSeq[_] => (that eq this.asInstanceOf[AnyRef]) || (that canEqual this) && (this sameElements that) case _ => false } diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index 419206c226bb..8b4cd6634242 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -288,13 +288,31 @@ self => } def sameElements[B >: A](that: GenIterable[B]): Boolean = { - val these = this.iterator - val those = that.iterator - while (these.hasNext && those.hasNext) - if (these.next != those.next) - return false - - !these.hasNext && !those.hasNext + that match { + case thatVector: Vector[_] if this.isInstanceOf[Vector[_]] => + val thisVector = this.asInstanceOf[Vector[_]] + (thisVector eq thatVector) || { + var equal = thisVector.length == thatVector.length + if (equal) { + val length = thatVector.length + var index = 0 + while (index < length && equal) { + equal = thisVector(index) == thatVector(index) + index += 1 + } + } + equal + } + + case _ => + val these = this.iterator + val those = that.iterator + while (these.hasNext && those.hasNext) + if (these.next != those.next) + return false + + !these.hasNext && !those.hasNext + } } override /*TraversableLike*/ def toStream: Stream[A] = iterator.toStream From 562a806349c419fdb8a569a29fb96a5042d761b4 Mon Sep 17 00:00:00 2001 From: Darcy Shen Date: Fri, 7 Sep 2018 00:04:40 +0800 Subject: [PATCH 1497/2793] remove the equals method, revert ofRef part of #5551 --- src/library/scala/collection/mutable/WrappedArray.scala | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index ad4cab3e7400..0bfc1ab5ae1b 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -129,10 +129,6 @@ object WrappedArray { def apply(index: Int): T = array(index).asInstanceOf[T] def update(index: Int, elem: T) { array(index) = elem } override def hashCode = MurmurHash3.wrappedArrayHash(array) - override def equals(that: Any) = that match { - case that: ofRef[_] => array.sameElements(that.array) - case _ => super.equals(that) - } } final class ofByte(val array: Array[Byte]) extends WrappedArray[Byte] with Serializable { From f9f2a119f6525ee5138b54be9a77e0d43140b5f6 Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Sun, 9 Sep 2018 09:21:14 +0900 Subject: [PATCH 1498/2793] update "Scala Language Specification" URL --- src/library/scala/Array.scala | 2 +- src/reflect/scala/reflect/api/Constants.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index 0e51cd98bba3..d9aa6b2ad6ba 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -482,7 +482,7 @@ object Array extends FallbackArrayBuilding { * * @author Martin Odersky * @since 1.0 - * @see [[http://www.scala-lang.org/files/archive/spec/2.11/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) + * @see [[http://www.scala-lang.org/files/archive/spec/2.12/ Scala Language Specification]], for in-depth information on the transformations the Scala compiler makes on Arrays (Sections 6.6 and 6.15 respectively.) * @see [[http://docs.scala-lang.org/sips/completed/scala-2-8-arrays.html "Scala 2.8 Arrays"]] the Scala Improvement Document detailing arrays since Scala 2.8. * @see [[http://docs.scala-lang.org/overviews/collections/arrays.html "The Scala 2.8 Collections' API"]] section on `Array` by Martin Odersky for more information. * @hideImplicitConversion scala.Predef.booleanArrayOps diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala index d0afd2d4f9ff..776283f67068 100644 --- a/src/reflect/scala/reflect/api/Constants.scala +++ b/src/reflect/scala/reflect/api/Constants.scala @@ -95,7 +95,7 @@ trait Constants { * broken down or evaluated, such as "true", "0", "classOf[List]". Such values become parts of the Scala abstract * syntax tree representing the program. The constants * correspond to section 6.24 "Constant Expressions" of the - * [[http://www.scala-lang.org/files/archive/spec/2.11/ Scala Language Specification]]. + * [[http://www.scala-lang.org/files/archive/spec/2.12/ Scala Language Specification]]. * * Such constants are used to represent literals in abstract syntax trees (the [[scala.reflect.api.Trees#Literal]] node) * and literal arguments for Java class file annotations (the [[scala.reflect.api.Annotations#LiteralArgument]] class). From 0a8e00cb1872e1c032c5f57a447743d27790ba2e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 10 Sep 2018 17:11:12 +1000 Subject: [PATCH 1499/2793] [mergeforward] Make nested implicit type error suppression unconditional Similarly, make the type constrain undo bugfix unconditional. This is known to break specs2, I'd like to see if that's all we find. https://github.com/scala/scala/pull/7147#issuecomment-418233611 --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 5 ++--- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7aa71cfda051..66763028f686 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -106,9 +106,8 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && settings.isScala213) - // OPT: avoid error string creation for errors that won't see the light of day, but predicate - // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 + if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value) + // OPT: avoid error string creation for errors that won't see the light of day "type mismatch" else "type mismatch" + foundReqMsg(found, req) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 858f369eb22a..b6f6f6b67f36 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -971,7 +971,7 @@ trait Implicits { val mark = undoLog.log val typedFirstPending = typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) - if (typedFirstPending.isFailure && settings.isScala213) + if (typedFirstPending.isFailure) undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note From 17346b967f3ac6acd5de32e6bb076c845590b0f0 Mon Sep 17 00:00:00 2001 From: Dan Skells Date: Tue, 3 Apr 2018 00:23:36 +0100 Subject: [PATCH 1500/2793] Avoid double evalution of predicate in Scope.filter While still conserving the original scope when filtering is an identity. Added a TODO to show how to avoid a second iteration in 2.13.x --- .../scala/reflect/internal/Scopes.scala | 21 ++++++++++++------- 1 file changed, 13 insertions(+), 8 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 8aa9a6d41e75..f717eddb469d 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -407,14 +407,19 @@ trait Scopes extends api.Scopes { self: SymbolTable => override def foreach[U](p: Symbol => U): Unit = toList foreach p - override def filterNot(p: Symbol => Boolean): Scope = ( - if (toList exists p) newScopeWith(toList filterNot p: _*) - else this - ) - override def filter(p: Symbol => Boolean): Scope = ( - if (toList forall p) this - else newScopeWith(toList filter p: _*) - ) + // TODO in 2.13.x, s/sameLength(result, filtered)/result eq filtered/, taking advantage of + // the new conservation in List.filter/filterNot + override def filterNot(p: Symbol => Boolean): Scope = { + val result = toList + val filtered = result.filterNot(p) + if (sameLength(result, filtered)) this else newScopeWith(filtered: _*) + } + override def filter(p: Symbol => Boolean): Scope = { + val result = toList + val filtered = result.filter(p) + if (sameLength(result, filtered)) this else newScopeWith(filtered: _*) + } + @deprecated("use `toList.reverse` instead", "2.10.0") // Used in sbt 0.12.4 def reverse: List[Symbol] = toList.reverse From 5a72d7b7916977d2238df4a163968e3ed8482ee2 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Thu, 16 Nov 2017 09:54:40 +0000 Subject: [PATCH 1501/2793] Support GitHub Flavored Markdown version of tables in Scaladoc. Based on GitHub Flavored Markdown Spec, https://github.github.com/gfm/#tables-extension- Version 0.28-gfm (2017-08-01) A table is a block element consisting of, * A header row * A delimiter row separating the header from the data * Zero or more data rows Restrictions, Rows must begin and end with pipe symbols A blank line required after table Limitations, Escaping of pipe symbols is not yet supported Inline markdown can be used in header and data cells, block markdown cannot be used. Example, /** * |Nibbles|Main|Desert| * |:--:|:---:|----| * |Bread|Yak|Vodka| * |Figs|Cheese on toast^three ways^|Coffee| */ trait RepastOptions The accepted markdown is intended to be a strict subset of all possible GHFM tables. --- .../nsc/doc/base/CommentFactoryBase.scala | 292 ++++++++++++++- .../tools/nsc/doc/base/comment/Body.scala | 11 +- .../scala/tools/nsc/doc/html/HtmlPage.scala | 29 ++ .../nsc/doc/html/resource/lib/template.css | 32 ++ test/scaladoc/resources/tables-warnings.scala | 33 ++ test/scaladoc/resources/tables.scala | 218 +++++++++++ test/scaladoc/run/tables-warnings.check | 19 + test/scaladoc/run/tables-warnings.scala | 99 +++++ test/scaladoc/run/tables.check | 16 + test/scaladoc/run/tables.scala | 343 ++++++++++++++++++ 10 files changed, 1088 insertions(+), 4 deletions(-) create mode 100644 test/scaladoc/resources/tables-warnings.scala create mode 100644 test/scaladoc/resources/tables.scala create mode 100644 test/scaladoc/run/tables-warnings.check create mode 100644 test/scaladoc/run/tables-warnings.scala create mode 100644 test/scaladoc/run/tables.check create mode 100644 test/scaladoc/run/tables.scala diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index b1bb842453c3..7b68514fd5df 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -1,5 +1,5 @@ /* NSC -- new Scala compiler - * Copyright 2007-2017 LAMP/EPFL + * Copyright 2007-2018 LAMP/EPFL * @author Manohar Jonnalagedda */ @@ -8,11 +8,13 @@ package doc package base import base.comment._ +import scala.annotation.tailrec import scala.collection._ import scala.util.matching.Regex import scala.reflect.internal.util.Position import scala.language.postfixOps + /** The comment parser transforms raw comment strings into `Comment` objects. * Call `parse` to run the parser. Note that the parser is stateless and * should only be built once for a given Scaladoc run. @@ -433,6 +435,9 @@ trait CommentFactoryBase { this: MemberLookupBase => protected final class WikiParser(val buffer: String, pos: Position, site: Symbol) extends CharReader(buffer) { wiki => var summaryParsed = false + // TODO: Convert to Char + private val TableCellStart = "|" + def document(): Body = { val blocks = new mutable.ListBuffer[Block] while (char != endOfText) @@ -442,7 +447,7 @@ trait CommentFactoryBase { this: MemberLookupBase => /* BLOCKS */ - /** {{{ block ::= code | title | hrule | listBlock | para }}} */ + /** {{{ block ::= code | title | hrule | listBlock | table | para }}} */ def block(): Block = { if (checkSkipInitWhitespace("{{{")) code() @@ -452,6 +457,8 @@ trait CommentFactoryBase { this: MemberLookupBase => hrule() else if (checkList) listBlock + else if (check(TableCellStart)) + table() else { para() } @@ -490,7 +497,7 @@ trait CommentFactoryBase { this: MemberLookupBase => jumpWhitespace() jump(style) val p = Paragraph(inline(isInlineEnd = false)) - blockEnded("end of list line ") + blockEnded("end of list line") Some(p) } @@ -544,6 +551,284 @@ trait CommentFactoryBase { this: MemberLookupBase => HorizontalRule() } + /** {{{ + * table ::= headerRow '\n' delimiterRow '\n' dataRows '\n' + * content ::= inline-content + * row ::= '|' { content '|' }+ + * headerRow ::= row + * dataRows ::= row* + * align ::= ':' '-'+ | '-'+ | '-'+ ':' | ':' '-'+ ':' + * delimiterRow :: = '|' { align '|' }+ + * }}} + */ + def table(): Block = { + + /* Helpers */ + + def peek(tag: String): Unit = { + val peek: String = buffer.substring(offset) + val limit = 60 + val limitedPeek = peek.substring(0, limit min peek.length) + println(s"peek: $tag: '$limitedPeek'") + } + + def nextIsCellStart = check(TableCellStart) + + /* Accumulated state */ + + var header: Option[Row] = None + + val rows = mutable.ListBuffer.empty[Row] + + val cells = mutable.ListBuffer.empty[Cell] + + def finalizeCells(): Unit = { + if (cells.nonEmpty) { + rows += Row(cells.toList) + } + cells.clear() + } + + def finalizeHeaderCells(): Unit = { + if (cells.nonEmpty) { + if (header.isDefined) { + reportError(pos, "more than one table header") + } else { + header = Some(Row(cells.toList)) + } + } + cells.clear() + } + + def checkAny(terminators: List[String]) = terminators.exists(check) + + def isEndOfText = char == endOfText + + def isNewline = char == endOfLine + + def skipNewline() = jump(endOfLine) + + def contentNonEmpty(content: Inline) = content != Text("") + + /** + * @param nextIsStartMark True if the next char is a cell mark prefix and not any non-cell mark. + * @param cellStartMark The char the cell start mark is based on + * @param finalizeRow Function to invoke when the row has been fully parsed + */ + def parseCells(nextIsStartMark: => Boolean, cellStartMark: Char, finalizeRow: () => Unit): Unit = { + /* The first sequence of cellStartMark characters defines the markdown for new cells. */ + def parseStartMark() = { + if (!jump(cellStartMark)) { + peek("Expected startMark") + sys.error("Precondition violated: Expected startMark.") + } + cellStartMark.toString + } + + /* startMark is the only mark not requiring a newline first */ + def makeInlineTerminators(startMark: String) = startMark :: Nil + + val startPos = offset + + val startMark = parseStartMark() + + val inlineTerminators = makeInlineTerminators(startMark) + + val content = Paragraph(inline(isInlineEnd = checkAny(inlineTerminators))) + + parseCells0(content :: Nil, startMark, cellStartMark, inlineTerminators, nextIsStartMark, finalizeRow, startPos, offset) + } + + // Continue parsing a table row. + // + // After reading inline content the follow conditions will be encountered, + // + // Case : Next Chars + // .................. + // 1 : end-of-text + // 2 : '|' '\n' + // 3 : '|' + // 4 : '\n' + // + // Case 1. + // State : End of text + // Action: Store the current contents, close the row, report warning, stop parsing. + // + // Case 2. + // State : The cell separator followed by a newline + // Action: Store the current contents, skip the cell separator and newline, close the row, stop parsing. + // + // Case 3. + // State : The cell separator not followed by a newline + // Action: Store the current contents, skip the cell separator, continue parsing the row. + // + // Case 4. + // State : A newline followed by anything + // Action: Store the current contents, report warning, skip the newline, close the row, stop parsing. + // + @tailrec def parseCells0( + contents: List[Block], + startMark: String, + cellStartMark: Char, + inlineTerminators: List[String], + nextIsStartMark: => Boolean, + finalizeRow: () => Unit, + progressPreParse: Int, + progressPostParse: Int + ): Unit = { + + def isStartMarkNewline = check(startMark + endOfLine) + + def skipStartMarkNewline() = jump(startMark + endOfLine) + + def isStartMark = check(startMark) + + def skipStartMark() = jump(startMark) + + def isNewlineCellStart = check(endOfLine.toString + cellStartMark) + + def storeContents() = cells += Cell(contents.reverse) + + val startPos = offset + + // The ordering of the checks ensures the state checks are correct. + if (progressPreParse == progressPostParse) { + peek("no-progress-table-row-parsing") + sys.error("No progress while parsing table row") + } else if (isEndOfText) { + // peek("1: end-of-text") + // Case 1 + storeContents() + finalizeRow() + reportError(pos, "unclosed table row") + } else if (isStartMarkNewline) { + // peek("2/1: start-mark-new-line") + // Case 2 + storeContents() + finalizeRow() + skipStartMarkNewline() + // peek("2/2: start-mark-new-line") + } else if (isStartMark) { + // peek("3: start-mark") + // Case 3 + storeContents() + skipStartMark() + val content = inline(isInlineEnd = checkAny(inlineTerminators)) + // TrailingCellsEmpty produces empty content + val accContents = if (contentNonEmpty(content)) Paragraph(content) :: Nil else Nil + parseCells0(accContents, startMark, cellStartMark, inlineTerminators, nextIsStartMark, finalizeRow, startPos, offset) + } else if (isNewline) { + // peek("4: newline") + // Case 4 + /* Fix and continue as there is no option to not return a table at present. */ + reportError(pos, "missing trailing cell marker") + storeContents() + finalizeRow() + skipNewline() + } else { + // Case π√ⅈ + // When the impossible happens leave some clues. + reportError(pos, "unexpected table row markdown") + peek("parseCell0") + storeContents() + finalizeRow() + } + } + + /* Parsing */ + + jumpWhitespace() + + parseCells(nextIsCellStart, TableCellStart(0), finalizeHeaderCells) + + while (nextIsCellStart) { + val initialOffset = offset + + parseCells(nextIsCellStart, TableCellStart(0), finalizeCells) + + /* Progress should always be made */ + if (offset == initialOffset) { + peek("no-progress-table-parsing") + sys.error("No progress while parsing table") + } + } + + /* Finalize */ + + /* Structural consistency checks */ + + /* Structural coercion */ + + // https://github.github.com/gfm/#tables-extension- + // TODO: The header row must match the delimiter row in the number of cells. If not, a table will not be recognized: + // TODO: Break at following block level element: The table is broken at the first empty line, or beginning of another block-level structure: + // TODO: Do not return a table when: The header row must match the delimiter row in the number of cells. If not, a table will not be recognized + + if (cells.nonEmpty) { + reportError(pos, s"Parsed and unused content: $cells") + } + assert(header.isDefined, "table header was not parsed") + val enforcedCellCount = header.get.cells.size + + def applyColumnCountConstraint(row: Row, defaultCell: Cell, rowType: String): Row = { + if (row.cells.size == enforcedCellCount) + row + else if (row.cells.size > enforcedCellCount) { + val excess = row.cells.size - enforcedCellCount + reportError(pos, s"Dropping $excess excess table $rowType cells from row.") + Row(row.cells.take(enforcedCellCount)) + } else { + val missing = enforcedCellCount - row.cells.size + Row(row.cells ++ List.fill(missing)(defaultCell)) + } + } + + // TODO: Abandon table parsing when the delimiter is missing instead of fixing and continuing. + val delimiterRow :: dataRows = if (rows.nonEmpty) + rows.toList + else { + reportError(pos, "Fixing missing delimiter row") + Row(Cell(Paragraph(Text("-")) :: Nil) :: Nil) :: Nil + } + + if (delimiterRow.cells.isEmpty) sys.error("TODO: Handle table with empty delimiter row") + + val constrainedDelimiterRow = applyColumnCountConstraint(delimiterRow, delimiterRow.cells(0), "delimiter") + + val constrainedDataRows = dataRows.toList.map(applyColumnCountConstraint(_, Cell(Nil), "data")) + + /* Convert the row following the header row to column options */ + + val leftAlignmentPattern = "^:?-++$".r + val centerAlignmentPattern = "^:-++:$".r + val rightAlignmentPattern = "^-++:$".r + + import ColumnOption._ + /* Encourage user to fix by defaulting to least ignorable fix. */ + val defaultColumnOption = ColumnOptionRight + val columnOptions = constrainedDelimiterRow.cells.map { + alignmentSpecifier => + alignmentSpecifier.blocks match { + // TODO: Parse the second row without parsing inline markdown + // TODO: Save pos when delimiter row is parsed and use here in reported errors + case Paragraph(Text(as)) :: Nil => + as.trim match { + case leftAlignmentPattern(_*) => ColumnOptionLeft + case centerAlignmentPattern(_*) => ColumnOptionCenter + case rightAlignmentPattern(_*) => ColumnOptionRight + case x => + reportError(pos, s"Fixing invalid column alignment: $x") + defaultColumnOption + } + case x => + reportError(pos, s"Fixing invalid column alignment: $x") + defaultColumnOption + } + } + blockEnded("table") + Table(header.get, columnOptions, constrainedDataRows) + } + /** {{{ para ::= inline '\n' }}} */ def para(): Block = { val p = @@ -781,6 +1066,7 @@ trait CommentFactoryBase { this: MemberLookupBase => checkSkipInitWhitespace('=') || checkSkipInitWhitespace("{{{") || checkList || + check(TableCellStart) || checkSkipInitWhitespace('\u003D') } offset = poff diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala index 2524fb75fb82..d60aa1be43ca 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala @@ -1,5 +1,5 @@ /* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL + * Copyright 2007-2018 LAMP/EPFL * @author Manohar Jonnalagedda */ @@ -55,6 +55,15 @@ final case class UnorderedList(items: Seq[Block]) extends Block final case class OrderedList(items: Seq[Block], style: String) extends Block final case class DefinitionList(items: SortedMap[Inline, Block]) extends Block final case class HorizontalRule() extends Block +final case class Table(header: Row, columnOptions: Seq[ColumnOption], rows: Seq[Row]) extends Block +final case class ColumnOption(option: Char) { require(option == 'L' || option == 'C' || option == 'R') } +object ColumnOption { + val ColumnOptionLeft = ColumnOption('L') + val ColumnOptionCenter = ColumnOption('C') + val ColumnOptionRight = ColumnOption('R') +} +final case class Row(cells: Seq[Cell]) +final case class Cell(blocks: Seq[Block]) /** An section of text inside a block, possibly with formatting. */ sealed abstract class Inline diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index e10050537430..aafd95ba1ba6 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -104,6 +104,7 @@ abstract class HtmlPage extends Page { thisPage =>
{items map { case (t, d) =>
{ inlineToHtml(t) }
{ blockToHtml(d) }
} }
case HorizontalRule() =>
+ case tbl: Table => tableToHtml(tbl) } def listItemsToHtml(items: Seq[Block]) = @@ -158,6 +159,34 @@ abstract class HtmlPage extends Page { thisPage => inlineToHtml(text) } + private def tableToHtml(table: Table): NodeSeq = { + + val Table(header, columnOptions, rows) = table + + val colClass = Map( + ColumnOption.ColumnOptionLeft -> "doctbl-left", + ColumnOption.ColumnOptionCenter -> "doctbl-center", + ColumnOption.ColumnOptionRight -> "doctbl-right" + ) + val cc = columnOptions.map(colClass) + + + + { (header.cells zip cc).map{ case (cell, cls) => } } + + { + if (rows.nonEmpty) { + { + rows.map { + row => { (row.cells zip cc).map{ case (cell, cls) => } } + } + } + + } + } +
{ cell.blocks.map(blockToHtml) }
{ cell.blocks.map(blockToHtml) }
+ } + def typeToHtml(tpes: List[model.TypeEntity], hasLinks: Boolean): NodeSeq = tpes match { case Nil => NodeSeq.Empty diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css index 412cc51bc652..ae285a702398 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/template.css @@ -819,6 +819,38 @@ div.fullcomment dl.paramcmts > dd { min-height: 15px; } +/* Author Content Table formatting */ + +.doctbl { + border-collapse: collapse; + margin: 1.0em 0em; +} + +.doctbl-left { + text-align: left; +} + +.doctbl-center { + text-align: center; +} + +.doctbl-right { + text-align: right; +} + +table.doctbl th { + border: 1px dotted #364550; + background-color: #c2d2dc; + padding: 5px; + color: #103a51; + font-weight: bold; +} + +table.doctbl td { + border: 1px dotted #364550; + padding: 5px; +} + /* Members filter tool */ #memberfilter { diff --git a/test/scaladoc/resources/tables-warnings.scala b/test/scaladoc/resources/tables-warnings.scala new file mode 100644 index 000000000000..bb8819ed5ae6 --- /dev/null +++ b/test/scaladoc/resources/tables-warnings.scala @@ -0,0 +1,33 @@ +package scala.test.scaladoc.tables.warnings { + + /** + * |Header| + * |-| + * |cell*/ + trait PrematureEndOfText + + /** + * |Unterminated| + * |-| + * |r1c1| + * |r2c1 + * |r3c1| + * + */ + trait MissingTrailingCellMark + + /** + * |colon-colon|middle-colon|random|center| + * |::-|-:-|??|:----------------:| + * |a|b|c|d| + * */ + trait InvalidColumnOptions + + /** + * |Sequence| + * |''---''| + * |9| + * */ + trait InvalidMarkdownUsingColumnOptions + +} \ No newline at end of file diff --git a/test/scaladoc/resources/tables.scala b/test/scaladoc/resources/tables.scala new file mode 100644 index 000000000000..8b7e4af21574 --- /dev/null +++ b/test/scaladoc/resources/tables.scala @@ -0,0 +1,218 @@ +package scala.test.scaladoc.tables { + + /** + * |First Header| + * |---| + * |Content Cell| + */ + trait Minimal + + /** + * |No Data Rows| + * |---| + */ + trait NoDataRows + + /** + * |First Header|Second Header|Third Header| + * |:---|:---:|---:| + * |Cell 1|Cell 2|Cell 3| + */ + trait ColumnOptionsAllTypes + + /** + * |First Header|Second Header|Third Header| + * |:----|:-----:|------:| + * |Cell 1|Cell 2|Cell 3| + */ + trait ColumnOptionsMoreThanThreeHyphens + + /** + * |First Header|Second Header|Third Header| + * |-|:--:|---:| + */ + trait ColumnOptionsHyphenRepetitions + + /** + * |First Header|Second Header| + * |:---:|:---:|----| + * |Pork|Veal|Yak| + * |Yam| + * + */ + trait HeaderConstraints + + /** + * |Edibles| + * |---| + * |Oranges __and__ Aubergines| + * |Peaches `or` Pears| + */ + trait CellsUsingMarkdown + + /** + * |'''Nibbles'''|''Main''|`Desert`| + * |:--:|:---:|----| + * |Bread|Yak|Vodka| + * |Figs|Cheese on toast^three ways^|Coffee| + */ + trait CellsUsingMarkdownInHeader + + /** + * |Header 1|Header 2|| + * |---|---|---| + * |Fig|| + * |Cherry||| + * |Walnut| + */ + trait TrailingCellsEmpty + + // Headers + + /** + * |Fruits, ,,Beverages,, and Vegetables|Semiconductors, ''Raptors'', and Poultry| + * |---|---| + * |Out of stock|7 left| + */ + trait HeadersUsingInlineMarkdown + + /** + * |Item|Price| + * |---|---:| + * |Rookworst|€ 15,00| + * |Apple Sauce|€ 5,00| + */ + trait Combined + + /** + * |Header| + * |---| + * |link| + */ + trait CellInlineMarkdown + + /** + * |Hill Dweller| + * |---| + * |Ant| + * + * |Hive Dweller| + * |---| + * |Bee| + * + */ + trait MultipleTables1 + + /** + * |Hill Dweller| + * |---| + * |Ant| + * + * |Hive Dweller| + * |---| + * |Bee| + * + * |Forest Dweller| + * |---| + * |Cricket| + * + */ + trait MultipleTables2 + + /** + * |Hill Dweller| + * |---| + * |Ant| + * + * Ants are cool. + * + * |Hive Dweller| + * |---| + * |Bee| + * + * But bees are better. + */ + trait MixedContent + + /** + * Summary + * + * Paragraph text should end here. + * |type| + * |-| + * |nuttiest| + */ + trait ParagraphEnd + + // Known suboptimal behaviour. Candidates for improving later. + + /** + * |First \|Header| + * |---|---| + * |\|Content 1| + * |C\|ontent 2| + * |Content\| 3| + * |Content \|4| + * |Content 5\|| + */ + trait CellMarkerEscaped + + /** + * |Domain|Symbol|Operation|Extra| + * |---|:---:|---|---| + * |Bitwise| \| |Or|| + */ + trait CellMarkerEscapedTwice + + /** + * ||Header 1|Header 2| + * |---|---|---| + * |||Fig| + * ||Cherry|| + * |Walnut||| + */ + trait LeadingCellsEmpty + + // Should not lose r2c1 or warn + /** + * |Unstarted| + * |-| + * |r1c1| + * r2c1| + * |r3c1| + * + */ + trait MissingInitialCellMark + + /** + * |Split| + * |-| + * |Accidental + * newline| + * |~FIN~| + * + */ + trait SplitCellContent + + /** + * |Hill Dweller| + * |---| + * |Ant| + * Ants are cool. + * |Hive Dweller| + * |---| + * |Bee| + * But bees are better. + */ + trait MixedContentUnspaced + + // Should parse to table with a header, defaulted delimiter and no rows. + /** + * |Leading| + * |-| + * |whitespace before marks| + * |Not Yet Skipped|Maybe TO DO| + */ + trait LeadingWhitespaceNotSkipped + +} \ No newline at end of file diff --git a/test/scaladoc/run/tables-warnings.check b/test/scaladoc/run/tables-warnings.check new file mode 100644 index 000000000000..35d4d72ebd3b --- /dev/null +++ b/test/scaladoc/run/tables-warnings.check @@ -0,0 +1,19 @@ +newSource:3: warning: unclosed table row + /** + ^ +newSource:9: warning: missing trailing cell marker + /** + ^ +newSource:19: warning: Fixing invalid column alignment: ::- + /** + ^ +newSource:19: warning: Fixing invalid column alignment: -:- + /** + ^ +newSource:19: warning: Fixing invalid column alignment: ?? + /** + ^ +newSource:26: warning: Fixing invalid column alignment: List(Paragraph(Italic(Text(---)))) + /** + ^ +Done. diff --git a/test/scaladoc/run/tables-warnings.scala b/test/scaladoc/run/tables-warnings.scala new file mode 100644 index 000000000000..7a75557417ea --- /dev/null +++ b/test/scaladoc/run/tables-warnings.scala @@ -0,0 +1,99 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.partest.ScaladocModelTest +import ColumnOption._ + +// Test with: +// partest --verbose --srcpath scaladoc test/scaladoc/run/tables-warnings.scala + +object Test extends ScaladocModelTest { + + import access._ + + override def resourceFile = "tables-warnings.scala" + + def scaladocSettings = "" + + def testModel(rootPackage: Package): Unit = { + + val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("tables")._package("warnings") + + val printCommentName = false + + def withComment(commentNames: String*)(test: Comment => Unit) = { + commentNames foreach { + commentName => + if (printCommentName) { + println(commentName) + } + val comment = getComment(commentName, base) + test(comment) + } + } + + /* Compact table creation */ + + def pt(content: String): Paragraph = Paragraph(Text(content)) + + def c(contents: String*): Cell = Cell(contents.toList.map(pt)) + + def r(contents: String*): Row = Row(contents.toList.map(content => c(content))) + + withComment("PrematureEndOfText") { comment => + val header = r("Header") + val colOpts = ColumnOptionLeft :: Nil + val row = r("cell") + val rows = row :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("MissingTrailingCellMark") { comment => + val header = r("Unterminated") + val colOpts = ColumnOptionLeft :: Nil + val rows = r("r1c1") :: r("r2c1") :: r("r3c1") :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("InvalidColumnOptions") { comment => + val header = r("colon-colon", "middle-colon", "random", "center") + val colOpts = ColumnOptionRight :: ColumnOptionRight :: ColumnOptionRight :: ColumnOptionCenter :: Nil + val row = r("a", "b", "c", "d") + val rows = row :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("InvalidMarkdownUsingColumnOptions") { comment => + val header = r("Sequence") + val colOpts = ColumnOptionRight :: Nil + val row = r("9") + val rows = row :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + } + + private def getComment(traitName: String, containingPackage: Package): Comment = { + containingPackage._trait(traitName).comment.get + } + + private def assertTableEquals(expectedTable: Table, actualBody: Body): Unit = { + actualBody.blocks.toList match { + case (actualTable: Table) :: Nil => + assert(expectedTable == actualTable, s"\n\nExpected:\n${multilineFormat(expectedTable)}\n\nActual:\n${multilineFormat(actualTable)}\n") + case _ => + val expectedBody = Body(List(expectedTable)) + assert(expectedBody == actualBody, s"Expected: $expectedBody, Actual: $actualBody") + } + } + + private def assertTableEquals(expectedTable: Table, actualBlock: Block): Unit = { + assert(expectedTable == actualBlock, s"Expected: $expectedTable, Actual: $actualBlock") + } + + private def multilineFormat(table: Table): String = { + "header : " + table.header + "\n" + + "columnOptions: " + table.columnOptions.size + "\n" + + (table.columnOptions mkString "\n") + "\n" + + "rows : " + table.rows.size + "\n" + + (table.rows mkString "\n") + } +} \ No newline at end of file diff --git a/test/scaladoc/run/tables.check b/test/scaladoc/run/tables.check new file mode 100644 index 000000000000..cccd38786dc4 --- /dev/null +++ b/test/scaladoc/run/tables.check @@ -0,0 +1,16 @@ +newSource:36: warning: Dropping 1 excess table delimiter cells from row. + /** + ^ +newSource:36: warning: Dropping 1 excess table data cells from row. + /** + ^ +newSource:160: warning: Dropping 1 excess table data cells from row. + /** + ^ +newSource:177: warning: no additional content on same line after table + /** + ^ +newSource:177: warning: Fixing missing delimiter row + /** + ^ +Done. diff --git a/test/scaladoc/run/tables.scala b/test/scaladoc/run/tables.scala new file mode 100644 index 000000000000..5685a70f8712 --- /dev/null +++ b/test/scaladoc/run/tables.scala @@ -0,0 +1,343 @@ +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.doc.base.comment._ +import scala.tools.partest.ScaladocModelTest +import ColumnOption._ + +// Test with: +// partest --verbose --srcpath scaladoc test/scaladoc/run/tables.scala + +object Test extends ScaladocModelTest { + + import access._ + + override def resourceFile = "tables.scala" + + def scaladocSettings = "" + + def testModel(rootPackage: Package): Unit = { + + val base = rootPackage._package("scala")._package("test")._package("scaladoc")._package("tables") + + val allTests = true + val whitelist = Set[String]() + val blacklist = Set[String]() + val whitelistPrefix: Option[String] = None + val printCommentName = false + + def includeTest(commentName: String) = { + val whitelisted = whitelist(commentName) || whitelistPrefix.map(commentName startsWith _).getOrElse(false) + (allTests && !blacklist(commentName)) || whitelisted + } + + def withComment(commentNames: String*)(test: Comment => Unit) = { + commentNames foreach { + commentName => + if (includeTest(commentName)) { + if (printCommentName) { + println(commentName) + } + val comment = getComment(commentName, base) + test(comment) + } + } + } + + /* Compact table creation */ + + def pt(content: String): Paragraph = Paragraph(Text(content)) + + def c(contents: String*): Cell = Cell(contents.toList.map(pt)) + + def ci(content: Inline): Cell = Cell(Paragraph(content) :: Nil) + + /* None transforms to an empty block list */ + def r(contents: Any*): Row = { + val cells = contents.toList.map { + case "" => Cell(Nil) + case x: String => c(x) + case None => Cell(Nil) + } + Row(cells) + } + + withComment("Minimal") { comment => + val header = r("First Header") + val colOpts = ColumnOptionLeft :: Nil + val row = r("Content Cell") + assertTableEquals(Table(header, colOpts, row :: Nil), comment.body) + } + + withComment("NoDataRows") { comment => + val header = r("No Data Rows") + val colOpts = ColumnOptionLeft :: Nil + assertTableEquals(Table(header, colOpts, Nil), comment.body) + } + + withComment("ColumnOptionsAllTypes", "ColumnOptionsMoreThanThreeHyphens") { comment => + val header = r("First Header", "Second Header", "Third Header") + val colOpts = ColumnOptionLeft :: ColumnOptionCenter :: ColumnOptionRight :: Nil + val row = r("Cell 1", "Cell 2", "Cell 3") + assertTableEquals(Table(header, colOpts, row :: Nil), comment.body) + } + + withComment("ColumnOptionsHyphenRepetitions") { comment => + val header = r("First Header", "Second Header", "Third Header") + val colOpts = ColumnOptionLeft :: ColumnOptionCenter :: ColumnOptionRight :: Nil + assertTableEquals(Table(header, colOpts, Nil), comment.body) + } + + withComment("HeaderConstraints") { comment => + val header = r("First Header", "Second Header") + val colOpts = ColumnOptionCenter :: ColumnOptionCenter :: Nil + val row1 = r("Pork", "Veal") + val row2 = r("Yam", "") + val rows = row1 :: row2 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("CellsUsingMarkdown") { comment => + val header = r("Edibles") + val colOpts = ColumnOptionLeft :: Nil + + val cell1 = ci(Chain(List(Text("Oranges "), Underline(Text("and")), Text(" Aubergines")))) + + val cell2 = ci(Chain(List(Text("Peaches "), Monospace(Text("or")), Text(" Pears")))) + + val row1 = Row(cell1 :: Nil) + val row2 = Row(cell2 :: Nil) + val rows = row1 :: row2 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("CellsUsingMarkdownInHeader") { comment => + val header = { + val cell1 = ci(Bold(Text("Nibbles"))) + val cell2 = ci(Italic(Text("Main"))) + val cell3 = ci(Monospace(Text("Desert"))) + Row(cell1 :: cell2 :: cell3 :: Nil) + } + val colOpts = ColumnOptionCenter :: ColumnOptionCenter :: ColumnOptionLeft :: Nil + + val row1 = r("Bread", "Yak", "Vodka") + val row2 = { + val cell1 = c("Figs") + val cell2 = ci(Chain(Text("Cheese on toast") :: Superscript(Text("three ways")) :: Nil)) + val cell3 = c("Coffee") + Row(cell1 :: cell2 :: cell3 :: Nil) + } + val rows = row1 :: row2 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("TrailingCellsEmpty") { comment => + val header = r("Header 1", "Header 2", "") + val colOpts = ColumnOptionLeft :: ColumnOptionLeft :: ColumnOptionLeft :: Nil + + val row1 = r("Fig", "", "") + val row2 = r("Cherry", "", "") + val row3 = r("Walnut", "", "") + val rows = row1 :: row2 :: row3 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("LeadingCellsEmpty") { comment => + val nilCell = Cell(Nil) + val emptyCell = c("") + + val header = Row(emptyCell :: c("Header 1") :: c("Header 2") :: Nil) + val colOpts = ColumnOptionLeft :: ColumnOptionLeft :: ColumnOptionLeft :: Nil + + val row1 = Row(emptyCell :: nilCell :: c("Fig") :: Nil) + val row2 = Row(emptyCell :: c("Cherry") :: nilCell :: Nil) + val row3 = Row(c("Walnut") :: nilCell :: nilCell :: Nil) + val rows = row1 :: row2 :: row3 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("HeadersUsingInlineMarkdown") { comment => + val headerCell1 = ci( + Chain( + Text("Fruits, ") :: Subscript(Text("Beverages")) :: Text(" and Vegetables") :: Nil + ) + ) + val headerCell2 = ci( + Chain( + Text("Semiconductors, ") :: Italic(Text("Raptors")) :: Text(", and Poultry") :: Nil + ) + ) + + val header = Row(headerCell1 :: headerCell2 :: Nil) + val colOpts = ColumnOptionLeft :: ColumnOptionLeft :: Nil + + val row = r("Out of stock", "7 left") + val rows = row :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("Combined") { comment => + + val header = r("Item", "Price") + val colOpts = ColumnOptionLeft :: ColumnOptionRight :: Nil + + val row1 = r("Rookworst", "€ 15,00") + val row2 = r("Apple Sauce", "€ 5,00") + val rows = row1 :: row2 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("CellInlineMarkdown") { comment => + + val header = r("Header") + val colOpts = ColumnOptionLeft :: Nil + + val row = Row(ci(HtmlTag("link")) :: Nil) + + val rows = row :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("MultipleTables1") { comment => + + val colOpts = ColumnOptionLeft :: Nil + + val table1 = Table(r("Hill Dweller"), colOpts, r("Ant") :: Nil) + val table2 = Table(r("Hive Dweller"), colOpts, r("Bee") :: Nil) + + assertTablesEquals(table1 :: table2 :: Nil, comment.body) + } + + withComment("MultipleTables2") { comment => + + val colOpts = ColumnOptionLeft :: Nil + + val table1 = Table(r("Hill Dweller"), colOpts, r("Ant") :: Nil) + val table2 = Table(r("Hive Dweller"), colOpts, r("Bee") :: Nil) + val table3 = Table(r("Forest Dweller"), colOpts, r("Cricket") :: Nil) + + assertTablesEquals(table1 :: table2 :: table3 :: Nil, comment.body) + } + + { + val colOpts = ColumnOptionLeft :: Nil + + val table1 = Table(r("Hill Dweller"), colOpts, r("Ant") :: Nil) + val table2 = Table(r("Hive Dweller"), colOpts, r("Bee") :: Nil) + + val content1 = Paragraph(Chain(List(Summary(Chain(List(Text("Ants are cool"), Text("."))))))) + val content2 = pt("But bees are better.\n") + + val body = Body(table1 :: content1 :: table2 :: content2 :: Nil) + + withComment("MixedContent") { comment => + assertBodiesEquals(body, comment.body) + } + } + + withComment("ParagraphEnd") { comment => + + val summary = Paragraph(Chain(List(Summary(Text("Summary"))))) + val paragraph = pt("Paragraph text should end here.") + val header = r("type") + val colOpts = ColumnOptionLeft :: Nil + val table = Table(header, colOpts, r("nuttiest") :: Nil) + val expected = Body(List(summary, paragraph, table)) + + assertBodiesEquals(expected, comment.body) + } + + /* Deferred Enhancements. + * + * When these improvements are made corresponding test updates to any new or + * changed error messages and parsed content and would be included. + */ + + // Deferred pipe escape functionality. + withComment("CellMarkerEscaped") { comment => + val header = r("First \\", "Header") + val colOpts = ColumnOptionLeft :: ColumnOptionLeft :: Nil + + val row1 = r("\\", "Content 1") + val row2 = r("C\\", "ontent 2") + val row3 = r("Content\\", " 3") + val row4 = r("Content \\", "4") + val row5 = Row(Cell(List(Paragraph(Text("Content 5\\")))) :: Cell(Nil) :: Nil) + + val rows = row1 :: row2 :: row3 :: row4 :: row5 :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + // Deferred pipe escape functionality. + withComment("CellMarkerEscapedTwice") { comment => + val header = r("Domain", "Symbol", "Operation", "Extra") + val colOpts = ColumnOptionLeft :: ColumnOptionCenter :: ColumnOptionLeft :: ColumnOptionLeft :: Nil + + val row = r("Bitwise", " \\", " ", "Or") + + val rows = row :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + withComment("MissingInitialCellMark") { comment => + + val colOpts = ColumnOptionLeft :: Nil + + val table1 = Table(r("Unstarted"), colOpts, r("r1c1") :: Nil) + val table2 = Table(r("r3c1"), colOpts, Nil) + + assertTablesEquals(table1 :: table2 :: Nil, comment.body) + } + + // TODO: Add assertions for MixedContentUnspaced which is similar to MissingInitialCellMark + + withComment("SplitCellContent") { comment => + val header = r("Split") + val colOpts = ColumnOptionLeft :: Nil + val rows = r("Accidental\nnewline") :: r("~FIN~") :: Nil + assertTableEquals(Table(header, colOpts, rows), comment.body) + } + + // TODO: As a later enhancement skip whitespace before table marks to reduce rate of silently incorrect table markdown. + /* Confirm current suboptimal behaviour */ + // TODO: Restore this test by updating the expected value + if (false) { + withComment("LeadingWhitespaceNotSkipped") { comment => + val colOpts = ColumnOptionLeft :: Nil + val table1 = Table(r("Leading"), colOpts, Nil) + val table2 = Table(r("whitespace before marks"), colOpts, Nil) + val body = Body(table1 :: table2 :: Nil) + assertBodiesEquals(body, comment.body) + } + } + } + + private def getComment(traitName: String, containingPackage: Package): Comment = { + containingPackage._trait(traitName).comment.get + } + + private def assertTableEquals(expectedTable: Table, actualBody: Body): Unit = { + actualBody.blocks.toList match { + case (actualTable: Table) :: Nil => + assert(expectedTable == actualTable, s"\n\nExpected:\n${multilineFormat(expectedTable)}\n\nActual:\n${multilineFormat(actualTable)}\n") + case _ => + val expectedBody = Body(List(expectedTable)) + assert(expectedBody == actualBody, s"Expected: $expectedBody, Actual: $actualBody") + } + } + + private def assertTablesEquals(expectedTables: Seq[Table], actualBody: Body): Unit = { + val expectedBody = Body(expectedTables) + assert(expectedBody == actualBody, s"Expected: $expectedBody, Actual: $actualBody") + } + + private def assertBodiesEquals(expectedBody: Body, actualBody: Body): Unit = { + assert(expectedBody == actualBody, s"Expected: $expectedBody, Actual: $actualBody") + } + + private def multilineFormat(table: Table): String = { + "header : " + table.header + "\n" + + "columnOptions: " + table.columnOptions.size + "\n" + + (table.columnOptions mkString "\n") + "\n" + + "rows : " + table.rows.size + "\n" + + (table.rows mkString "\n") + } +} \ No newline at end of file From e387cafecac6cdaea1b85cfc9fd207e4f1dfeff3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 18 Sep 2018 15:53:36 +1000 Subject: [PATCH 1502/2793] Revert change that is binary incompatible with the 2.12.6 build of macro paradise --- src/compiler/scala/tools/nsc/Global.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 42b815750818..d43564e44e3a 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1142,7 +1142,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) val compiledFiles = new mutable.HashSet[String] /** A map from compiled top-level symbols to their source files */ - val symSource = new mutable.AnyRefMap[Symbol, AbstractFile] + val symSource = new mutable.HashMap[Symbol, AbstractFile] /** A map from compiled top-level symbols to their picklers */ val symData = new mutable.AnyRefMap[Symbol, PickleBuffer] From 036143bad49e07bf131386bf2df39ec889e24dc9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 18 Sep 2018 16:05:59 +1000 Subject: [PATCH 1503/2793] Make some methods protected that are used by scalameta's Typer subclass --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index db9d863e11d3..2f828154c5d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4126,7 +4126,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // import treeInfo.{isApplyDynamicName, DynamicUpdate, DynamicApplicationNamed} - private def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass + protected def acceptsApplyDynamic(tp: Type) = tp.typeSymbol isNonBottomSubClass DynamicClass /** Returns `Some(t)` if `name` can be selected dynamically on `qual`, `None` if not. * `t` specifies the type to be passed to the applyDynamic/selectDynamic call (unless it is NoType) @@ -4187,7 +4187,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * - simplest solution: have two method calls * */ - private def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = { + protected def mkInvoke(context: Context, tree: Tree, qual: Tree, name: Name): Option[Tree] = { val cxTree = context.enclosingNonImportContext.tree // scala/bug#8364 debuglog(s"dyna.mkInvoke($cxTree, $tree, $qual, $name)") val treeInfo.Applied(treeSelection, _, _) = tree @@ -4236,7 +4236,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } } - private def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head)) + protected def wrapErrors(tree: Tree, typeTree: Typer => Tree): Tree = silent(typeTree) orElse (err => DynamicRewriteError(tree, err.head)) // // END: applyDynamic support // From f8e8fd950a5742ffd9ff9b2706498f609d2cc914 Mon Sep 17 00:00:00 2001 From: Andrei Baidarov Date: Tue, 18 Sep 2018 17:40:02 +0300 Subject: [PATCH 1504/2793] [nomerge] [#11153] add null-check in ConcatIterator.advance --- src/library/scala/collection/Iterator.scala | 2 +- test/junit/scala/collection/IteratorTest.scala | 8 ++++++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index d44528d3a427..3aa95568ec79 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -191,7 +191,7 @@ object Iterator { tail = tail.tail merge() if (currentHasNextChecked) true - else if (current.hasNext) { + else if ((current ne null) && current.hasNext) { currentHasNextChecked = true true } else advance() diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 191db83c3f57..6a427bbdc078 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -325,6 +325,14 @@ class IteratorTest { assertSameElements(List(10,11,13), scan) assertSameElements(List(10,-1,-1,-11,11,-2,-2,-13,13,-3), results) } + // scala/bug#11153 + @Test def handleExhaustedConcatSubIterator(): Unit = { + val it = Iterator.empty ++ Iterator.empty + // exhaust and clear internal state + it.hasNext + val concat = Iterator.empty ++ it + while (concat.hasNext) concat.next() + } @Test def `scan trailing avoids extra hasNext`(): Unit = { val it = new AbstractIterator[Int] { var i = 0 From 4b476598a5e38f2d0c179db21aec3f464a8b0fee Mon Sep 17 00:00:00 2001 From: Mike Skells Date: Wed, 19 Sep 2018 23:59:01 +0100 Subject: [PATCH 1505/2793] make ConditionalReporting less lazy --- src/compiler/scala/tools/nsc/Reporting.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 5635e678ded5..4bed54a153b8 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -26,16 +26,16 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w protected def PerRunReporting = new PerRunReporting class PerRunReporting extends PerRunReportingBase { /** Collects for certain classes of warnings during this run. */ - private class ConditionalWarning(what: String, doReport: () => Boolean, setting: Settings#Setting) { + private class ConditionalWarning(what: String, doReport: Boolean, setting: Settings#Setting) { def this(what: String, booleanSetting: Settings#BooleanSetting) { - this(what, () => booleanSetting, booleanSetting) + this(what, booleanSetting.value, booleanSetting) } val warnings = mutable.LinkedHashMap[Position, (String, String)]() def warn(pos: Position, msg: String, since: String = "") = - if (doReport()) reporter.warning(pos, msg) + if (doReport) reporter.warning(pos, msg) else if (!(warnings contains pos)) warnings += ((pos, (msg, since))) def summarize() = - if (warnings.nonEmpty && (setting.isDefault || doReport())) { + if (warnings.nonEmpty && (setting.isDefault || doReport)) { val sinceAndAmount = mutable.TreeMap[String, Int]() warnings.valuesIterator.foreach { case (_, since) => val value = sinceAndAmount.get(since) @@ -65,7 +65,7 @@ trait Reporting extends scala.reflect.internal.Reporting { self: ast.Positions w private val _deprecationWarnings = new ConditionalWarning("deprecation", settings.deprecation) private val _uncheckedWarnings = new ConditionalWarning("unchecked", settings.unchecked) private val _featureWarnings = new ConditionalWarning("feature", settings.feature) - private val _inlinerWarnings = new ConditionalWarning("inliner", () => !settings.optWarningsSummaryOnly, settings.optWarnings) + private val _inlinerWarnings = new ConditionalWarning("inliner", !settings.optWarningsSummaryOnly, settings.optWarnings) private val _allConditionalWarnings = List(_deprecationWarnings, _uncheckedWarnings, _featureWarnings, _inlinerWarnings) // TODO: remove in favor of the overload that takes a Symbol, give that argument a default (NoSymbol) From 7bfbaa7bd1e1e55b6d2c815a5c87a1d562055f6f Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Thu, 20 Sep 2018 15:25:47 +0100 Subject: [PATCH 1506/2793] Bug t10156: add two files for the test bug case --- test/files/neg/t10156.check | 4 ++++ test/files/neg/t10156.scala | 5 +++++ 2 files changed, 9 insertions(+) create mode 100644 test/files/neg/t10156.check create mode 100644 test/files/neg/t10156.scala diff --git a/test/files/neg/t10156.check b/test/files/neg/t10156.check new file mode 100644 index 000000000000..e656cb3c25e8 --- /dev/null +++ b/test/files/neg/t10156.check @@ -0,0 +1,4 @@ +t10156.scala:4: error: could not find implicit value for parameter a: t10156.A + val z = x _ + ^ +one error found diff --git a/test/files/neg/t10156.scala b/test/files/neg/t10156.scala new file mode 100644 index 000000000000..a4a046108c38 --- /dev/null +++ b/test/files/neg/t10156.scala @@ -0,0 +1,5 @@ +object t10156 { + trait A + def x(implicit a: A) = a + val z = x _ +} \ No newline at end of file From 46db85852564f123697666da24ef216c5b2b29b3 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 20 Sep 2018 17:30:54 +0200 Subject: [PATCH 1507/2793] Un-deprecate linesIterator Java 11 introduces the `lines` method on `String`, which means we should probably avoid using that name, and go back to `linesIterator`. To allow compiling with -Xfatal-warnings on Java 11, we have no choice on 2.12 but to un-deprecate this method. Probably shouldn't immediately deprecate `lines`, since most users will be running on Java 8, and are thus not affected by this. Perhaps 2.13 is the right time frame for switching around the deprecation. --- src/library/scala/collection/immutable/StringLike.scala | 3 +-- test/files/run/repl-inline.check | 1 - 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index fce0f073aaff..ff31ab449b5e 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -125,7 +125,7 @@ self => /** Return all lines in this string in an iterator, excluding trailing line * end characters; i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. - */ + */ // TODO: deprecate on 2.13 to avoid conflict on Java 11, which introduces `String::lines` (this is why `linesIterator` has been un-deprecated) def lines: Iterator[String] = linesWithSeparators map (line => new WrappedString(line).stripLineEnd) @@ -133,7 +133,6 @@ self => * end characters; i.e., apply `.stripLineEnd` to all lines * returned by `linesWithSeparators`. */ - @deprecated("use `lines` instead","2.11.0") def linesIterator: Iterator[String] = linesWithSeparators map (line => new WrappedString(line).stripLineEnd) diff --git a/test/files/run/repl-inline.check b/test/files/run/repl-inline.check index db729a67dd09..c6b363a86a74 100644 --- a/test/files/run/repl-inline.check +++ b/test/files/run/repl-inline.check @@ -1,4 +1,3 @@ -warning: there was one deprecation warning (since 2.11.0); re-run with -deprecation for details callerOfCaller: String g: String h: String From 45657a3d0d661ba8d79abdd8ad6a31d69b8a8dc5 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Mon, 17 Sep 2018 20:10:55 +0100 Subject: [PATCH 1508/2793] Allow escaping Scaladoc table cell delimiter MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Interpret \| as content instead of cell delimiter following the GitHub Flavored Markdown Table Extension spec. For example this markdown defines a 2 column table, | Purpose | Command | | ------- | ------- | | Count instances | cut -f2 data.tsv \| sort \| uniq -c | ┌──────────────────┬───────────────────────────────────┐ │ Purpose │ Command │ ├──────────────────┼───────────────────────────────────┤ │ Count instances │ cut -f2 data.tsv | sort | uniq -c │ └──────────────────┴───────────────────────────────────┘ Fixes scala/bug#11161 --- .../nsc/doc/base/CommentFactoryBase.scala | 79 +++++++++---------- test/scaladoc/resources/tables.scala | 20 ++--- test/scaladoc/run/tables.check | 7 +- test/scaladoc/run/tables.scala | 38 ++++----- 4 files changed, 70 insertions(+), 74 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index 7b68514fd5df..3239735772c2 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -600,7 +600,12 @@ trait CommentFactoryBase { this: MemberLookupBase => cells.clear() } - def checkAny(terminators: List[String]) = terminators.exists(check) + val escapeChar = "\\" + + /* Poor man's negative lookbehind */ + def checkInlineEnd = check(TableCellStart) && !check(escapeChar, -1) + + def decodeEscapedCellMark(text: String) = text.replace(escapeChar + TableCellStart, TableCellStart) def isEndOfText = char == endOfText @@ -608,35 +613,35 @@ trait CommentFactoryBase { this: MemberLookupBase => def skipNewline() = jump(endOfLine) + def isStartMarkNewline = check(TableCellStart + endOfLine) + + def skipStartMarkNewline() = jump(TableCellStart + endOfLine) + + def isStartMark = check(TableCellStart) + + def skipStartMark() = jump(TableCellStart) + def contentNonEmpty(content: Inline) = content != Text("") /** - * @param nextIsStartMark True if the next char is a cell mark prefix and not any non-cell mark. - * @param cellStartMark The char the cell start mark is based on + * @param cellStartMark The char indicating the start or end of a cell * @param finalizeRow Function to invoke when the row has been fully parsed */ - def parseCells(nextIsStartMark: => Boolean, cellStartMark: Char, finalizeRow: () => Unit): Unit = { - /* The first sequence of cellStartMark characters defines the markdown for new cells. */ - def parseStartMark() = { + def parseCells(cellStartMark: String, finalizeRow: () => Unit): Unit = { + def jumpCellStartMark() = { if (!jump(cellStartMark)) { - peek("Expected startMark") - sys.error("Precondition violated: Expected startMark.") + peek(s"Expected $cellStartMark") + sys.error(s"Precondition violated: Expected $cellStartMark.") } - cellStartMark.toString } - /* startMark is the only mark not requiring a newline first */ - def makeInlineTerminators(startMark: String) = startMark :: Nil - val startPos = offset - val startMark = parseStartMark() - - val inlineTerminators = makeInlineTerminators(startMark) + jumpCellStartMark() - val content = Paragraph(inline(isInlineEnd = checkAny(inlineTerminators))) + val content = Paragraph(inline(isInlineEnd = checkInlineEnd, textTransform = decodeEscapedCellMark)) - parseCells0(content :: Nil, startMark, cellStartMark, inlineTerminators, nextIsStartMark, finalizeRow, startPos, offset) + parseCells0(content :: Nil, finalizeRow, startPos, offset) } // Continue parsing a table row. @@ -668,25 +673,11 @@ trait CommentFactoryBase { this: MemberLookupBase => // @tailrec def parseCells0( contents: List[Block], - startMark: String, - cellStartMark: Char, - inlineTerminators: List[String], - nextIsStartMark: => Boolean, finalizeRow: () => Unit, progressPreParse: Int, progressPostParse: Int ): Unit = { - def isStartMarkNewline = check(startMark + endOfLine) - - def skipStartMarkNewline() = jump(startMark + endOfLine) - - def isStartMark = check(startMark) - - def skipStartMark() = jump(startMark) - - def isNewlineCellStart = check(endOfLine.toString + cellStartMark) - def storeContents() = cells += Cell(contents.reverse) val startPos = offset @@ -713,10 +704,10 @@ trait CommentFactoryBase { this: MemberLookupBase => // Case 3 storeContents() skipStartMark() - val content = inline(isInlineEnd = checkAny(inlineTerminators)) + val content = inline(isInlineEnd = checkInlineEnd, textTransform = decodeEscapedCellMark) // TrailingCellsEmpty produces empty content val accContents = if (contentNonEmpty(content)) Paragraph(content) :: Nil else Nil - parseCells0(accContents, startMark, cellStartMark, inlineTerminators, nextIsStartMark, finalizeRow, startPos, offset) + parseCells0(accContents, finalizeRow, startPos, offset) } else if (isNewline) { // peek("4: newline") // Case 4 @@ -739,12 +730,12 @@ trait CommentFactoryBase { this: MemberLookupBase => jumpWhitespace() - parseCells(nextIsCellStart, TableCellStart(0), finalizeHeaderCells) + parseCells(TableCellStart, finalizeHeaderCells) while (nextIsCellStart) { val initialOffset = offset - parseCells(nextIsCellStart, TableCellStart(0), finalizeCells) + parseCells(TableCellStart, finalizeCells) /* Progress should always be made */ if (offset == initialOffset) { @@ -755,9 +746,7 @@ trait CommentFactoryBase { this: MemberLookupBase => /* Finalize */ - /* Structural consistency checks */ - - /* Structural coercion */ + /* Structural consistency checks and coercion */ // https://github.github.com/gfm/#tables-extension- // TODO: The header row must match the delimiter row in the number of cells. If not, a table will not be recognized: @@ -795,7 +784,7 @@ trait CommentFactoryBase { this: MemberLookupBase => val constrainedDelimiterRow = applyColumnCountConstraint(delimiterRow, delimiterRow.cells(0), "delimiter") - val constrainedDataRows = dataRows.toList.map(applyColumnCountConstraint(_, Cell(Nil), "data")) + val constrainedDataRows = dataRows.map(applyColumnCountConstraint(_, Cell(Nil), "data")) /* Convert the row following the header row to column options */ @@ -885,7 +874,7 @@ trait CommentFactoryBase { this: MemberLookupBase => list mkString "" } - def inline(isInlineEnd: => Boolean): Inline = { + def inline(isInlineEnd: => Boolean, textTransform: String => String = identity): Inline = { def inline0(): Inline = { if (char == safeTagMarker) { @@ -901,7 +890,7 @@ trait CommentFactoryBase { this: MemberLookupBase => else if (check("[[")) link() else { val str = readUntil { char == safeTagMarker || check("''") || char == '`' || check("__") || char == '^' || check(",,") || check("[[") || isInlineEnd || checkParaEnded || char == endOfLine } - Text(str) + Text(textTransform(str)) } } @@ -1106,6 +1095,14 @@ trait CommentFactoryBase { this: MemberLookupBase => ok } + final def check(chars: String, checkOffset: Int): Boolean = { + val poff = offset + offset += checkOffset + val ok = jump(chars) + offset = poff + ok + } + def checkSkipInitWhitespace(c: Char): Boolean = { val poff = offset jumpWhitespace() diff --git a/test/scaladoc/resources/tables.scala b/test/scaladoc/resources/tables.scala index 8b7e4af21574..820ad2ea5a20 100644 --- a/test/scaladoc/resources/tables.scala +++ b/test/scaladoc/resources/tables.scala @@ -144,15 +144,14 @@ package scala.test.scaladoc.tables { */ trait ParagraphEnd - // Known suboptimal behaviour. Candidates for improving later. - /** - * |First \|Header| - * |---|---| - * |\|Content 1| - * |C\|ontent 2| - * |Content\| 3| - * |Content \|4| + * |First \|Header|Second\| Header|Third\|Head\er| + * |:---:|:---|-:| + * |a\|b|cd|ef| + * |\|Content 1||| + * |C\|ontent 2||| + * |Content\| 3||| + * |Content \|4|\|\||\|\|\|\|| * |Content 5\|| */ trait CellMarkerEscaped @@ -161,8 +160,11 @@ package scala.test.scaladoc.tables { * |Domain|Symbol|Operation|Extra| * |---|:---:|---|---| * |Bitwise| \| |Or|| + * |Strange|\|\\||???|\N| */ - trait CellMarkerEscapedTwice + trait CellMarkerEscapeEscapesOnlyMarker + + // Known suboptimal behaviour. Candidates for improving later. /** * ||Header 1|Header 2| diff --git a/test/scaladoc/run/tables.check b/test/scaladoc/run/tables.check index cccd38786dc4..8bbb25e4d1b7 100644 --- a/test/scaladoc/run/tables.check +++ b/test/scaladoc/run/tables.check @@ -4,13 +4,10 @@ newSource:36: warning: Dropping 1 excess table delimiter cells from row. newSource:36: warning: Dropping 1 excess table data cells from row. /** ^ -newSource:160: warning: Dropping 1 excess table data cells from row. +newSource:179: warning: no additional content on same line after table /** ^ -newSource:177: warning: no additional content on same line after table - /** - ^ -newSource:177: warning: Fixing missing delimiter row +newSource:179: warning: Fixing missing delimiter row /** ^ Done. diff --git a/test/scaladoc/run/tables.scala b/test/scaladoc/run/tables.scala index 5685a70f8712..719207af3d8f 100644 --- a/test/scaladoc/run/tables.scala +++ b/test/scaladoc/run/tables.scala @@ -245,38 +245,38 @@ object Test extends ScaladocModelTest { assertBodiesEquals(expected, comment.body) } - /* Deferred Enhancements. - * - * When these improvements are made corresponding test updates to any new or - * changed error messages and parsed content and would be included. - */ - - // Deferred pipe escape functionality. withComment("CellMarkerEscaped") { comment => - val header = r("First \\", "Header") - val colOpts = ColumnOptionLeft :: ColumnOptionLeft :: Nil + val header = r("First |Header", "Second| Header", "Third|Head\\er") + val colOpts = ColumnOptionCenter :: ColumnOptionLeft :: ColumnOptionRight :: Nil - val row1 = r("\\", "Content 1") - val row2 = r("C\\", "ontent 2") - val row3 = r("Content\\", " 3") - val row4 = r("Content \\", "4") - val row5 = Row(Cell(List(Paragraph(Text("Content 5\\")))) :: Cell(Nil) :: Nil) + val row1 = r("a|b", "cd", "ef") + val row2 = r("|Content 1", "", "") + val row3 = r("C|ontent 2", "", "") + val row4 = r("Content| 3", "", "") + val row5 = r("Content |4", "||", "||||") + val row6 = Row(Cell(List(Paragraph(Text("Content 5|")))) :: Cell(Nil) :: Cell(Nil) :: Nil) - val rows = row1 :: row2 :: row3 :: row4 :: row5 :: Nil + val rows = row1 :: row2 :: row3 :: row4 :: row5 :: row6 :: Nil assertTableEquals(Table(header, colOpts, rows), comment.body) } - // Deferred pipe escape functionality. - withComment("CellMarkerEscapedTwice") { comment => + withComment("CellMarkerEscapeEscapesOnlyMarker") { comment => val header = r("Domain", "Symbol", "Operation", "Extra") val colOpts = ColumnOptionLeft :: ColumnOptionCenter :: ColumnOptionLeft :: ColumnOptionLeft :: Nil - val row = r("Bitwise", " \\", " ", "Or") + val row1 = r("Bitwise", " | ", "Or", "") + val row2 = r("Strange", raw"|\|", "???", raw"\N") - val rows = row :: Nil + val rows = row1 :: row2 :: Nil assertTableEquals(Table(header, colOpts, rows), comment.body) } + /* Deferred Enhancements. + * + * When these improvements are made corresponding test updates to any new or + * changed error messages and parsed content and would be included. + */ + withComment("MissingInitialCellMark") { comment => val colOpts = ColumnOptionLeft :: Nil From eefee10b7b709da9df2a05dfa5e3d5bd05ab3554 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 26 Sep 2018 16:43:45 +1000 Subject: [PATCH 1509/2793] Optimize Any.## In https://github.com/scala/scala/pull/5098, some duplicated code was consolidated in `scala.runtime`, including changing the code gen for Any.## to call `Statics.anyHash`. This appears to have caused a performance regression because `anyHash` does consecutive type tests for types that require cooperative equality to do more than call `Object.hashCode`. It is faster to optimize for the common case of non-numeric types by adding a fast path for `! x instanceOf Number`. --- src/library/scala/runtime/Statics.java | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java index 62390cb9d030..83e0ec6bd7d2 100644 --- a/src/library/scala/runtime/Statics.java +++ b/src/library/scala/runtime/Statics.java @@ -103,12 +103,20 @@ public static int anyHash(Object x) { if (x == null) return 0; + if (x instanceof java.lang.Number) { + return anyHashNumber((java.lang.Number) x); + } + + return x.hashCode(); + } + + private static int anyHashNumber(Number x) { if (x instanceof java.lang.Long) return longHash(((java.lang.Long)x).longValue()); - + if (x instanceof java.lang.Double) return doubleHash(((java.lang.Double)x).doubleValue()); - + if (x instanceof java.lang.Float) return floatHash(((java.lang.Float)x).floatValue()); From 014facccbef9127f0170910b71280064ac308a65 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 25 Sep 2018 15:50:41 +0200 Subject: [PATCH 1510/2793] Consider invariant type params in isPlausiblySubType Adapt a variation on the isSubArg check used in full subtyping. Also reduce the amount of negation in there, polish a bit. Follow up for 9d25000 --- .../tools/nsc/typechecker/Implicits.scala | 62 +++++++++---------- 1 file changed, 31 insertions(+), 31 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 8b3e1003c875..b4405f006ae0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -653,38 +653,38 @@ trait Implicits { loop(tp0, pt0) } - /** This expresses more cleanly in the negative: there's a linear path - * to a final true or false. - */ - private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = !isImpossibleSubType(tp1, tp2) - private def isImpossibleSubType(tp1: Type, tp2: Type): Boolean = tp1.dealiasWiden match { - // We can only rule out a subtype relationship if the left hand - // side is a class, else we may not know enough. - case tr1 @ TypeRef(_, sym1, args1) if sym1.isClass => - def typeRefHasMember(tp: TypeRef, name: Name) = { - tp.baseClasses.exists(_.info.decls.lookupEntry(name) != null) - } - - def existentialUnderlying(t: Type) = t match { - case et: ExistentialType => et.underlying - case tp => tp - } - val tp2Bounds = existentialUnderlying(tp2.dealiasWiden.bounds.hi) - tp2Bounds match { - case TypeRef(_, sym2, args2) if sym2 ne SingletonClass => - val impossible = if ((sym1 eq sym2) && (args1 ne Nil)) !corresponds3(sym1.typeParams, args1, args2) {(tparam, arg1, arg2) => - if (tparam.isCovariant) isPlausiblySubType(arg1, arg2) else isPlausiblySubType(arg2, arg1) - } else { - ((sym1 eq ByNameParamClass) != (sym2 eq ByNameParamClass)) || (sym2.isClass && !(sym1 isWeakSubClass sym2)) + private def isImpossibleSubType(tp1: Type, tp2: Type): Boolean = !isPlausiblySubType(tp1, tp2) + private def isPlausiblySubType(tp1: Type, tp2: Type): Boolean = + tp1.dealiasWiden match { + // We only know enough to rule out a subtype relationship if the left hand side is a class. + case tr1@TypeRef(_, sym1, args1) if sym1.isClass => + val tp2Wide = + tp2.dealiasWiden.bounds.hi match { + case et: ExistentialType => et.underlying // OPT meant as cheap approximation of skolemizeExistential? + case tp => tp } - impossible - case RefinedType(parents, decls) => - val impossible = decls.nonEmpty && !typeRefHasMember(tr1, decls.head.name) // opt avoid full call to .member - impossible - case _ => false - } - case _ => false - } + tp2Wide match { + case TypeRef(_, sym2, args2) if sym2 ne SingletonClass => + // The order of these two checks can be material for performance (scala/bug#8478) + def isSubArg(tparam: Symbol, t1: Type, t2: Type) = + (!tparam.isContravariant || isPlausiblySubType(t2, t1)) && + (!tparam.isCovariant || isPlausiblySubType(t1, t2)) + + if ((sym1 eq sym2) && (args1 ne Nil)) corresponds3(sym1.typeParams, args1, args2)(isSubArg) + else (sym1 eq ByNameParamClass) == (sym2 eq ByNameParamClass) && (!sym2.isClass || (sym1 isWeakSubClass sym2)) + case RefinedType(parents, decls) => + // OPT avoid full call to .member + decls.isEmpty || { + // Do any of the base classes of the class on the left declare the first member in the refinement on the right? + // (We randomly pick the first member as a good candidate for eliminating this subtyping pair.) + val firstDeclName = decls.head.name + tr1.baseClasses.exists(_.info.decls.lookupEntry(firstDeclName) != null) + } + + case _ => true + } + case _ => true + } private def typedImplicit0(info: ImplicitInfo, ptChecked: Boolean, isLocalToCallsite: Boolean): SearchResult = { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(plausiblyCompatibleImplicits) From 251ede1a4bc80d250565881f33d2e380d3640c9c Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 26 Sep 2018 09:56:04 +0200 Subject: [PATCH 1511/2793] Revert "[mergeforward] Make nested implicit type error suppression unconditional" This reverts commit 0a8e00cb1872e1c032c5f57a447743d27790ba2e. --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 5 +++-- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 66763028f686..7aa71cfda051 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -106,8 +106,9 @@ trait ContextErrors { def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } def typeErrorMsg(context: Context, found: Type, req: Type) = - if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value) - // OPT: avoid error string creation for errors that won't see the light of day + if (context.openImplicits.nonEmpty && !settings.XlogImplicits.value && settings.isScala213) + // OPT: avoid error string creation for errors that won't see the light of day, but predicate + // this on -Xsource:2.13 for bug compatibility with https://github.com/scala/scala/pull/7147#issuecomment-418233611 "type mismatch" else "type mismatch" + foundReqMsg(found, req) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index b4405f006ae0..9be8927d51f2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1018,7 +1018,7 @@ trait Implicits { if(wildPtNotInstantiable || matchesPtInst(firstPending)) typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) else SearchFailure - if (typedFirstPending.isFailure) + if (typedFirstPending.isFailure && settings.isScala213) undoLog.undoTo(mark) // Don't accumulate constraints from typechecking or type error message creation for failed candidates // Pass the errors to `DivergentImplicitRecovery` so that it can note From 33bbe7aeb147305e9748b42ab3fdda8dc25c43bc Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 27 Sep 2018 15:23:25 +0200 Subject: [PATCH 1512/2793] Bump version to 2.12.8, restarr onto 2.12.7 --- build.sbt | 2 +- versions.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index 2b8c8c62b66e..0745a1eb0d7c 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.7" +baseVersion in Global := "2.12.8" baseVersionSuffix in Global := "SNAPSHOT" mimaReferenceVersion in Global := Some("2.12.0") diff --git a/versions.properties b/versions.properties index 72fd78bfc3d5..a3bee7a23d7f 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.12.6 +starr.version=2.12.7 # The scala.binary.version determines how modules are resolved. It is set as follows: # - After 2.x.0 is released, the binary version is 2.x From 3093fc6b96ca66e12db68018539355273fa49385 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Mon, 1 Oct 2018 13:26:33 +0100 Subject: [PATCH 1513/2793] Don't reject views with result types which are TypeVars On the matchesPtInst fast path views are pruned without first being applied. This can result in a false negative in HasMethodMatching if the view has a result type which is a not fully instantiated TypeVar. The fix is to fall back to the slow path in that case. Fixes scala/bug#11174. --- .../tools/nsc/typechecker/Implicits.scala | 45 +++++++++++++------ test/files/pos/t11174.scala | 18 ++++++++ 2 files changed, 49 insertions(+), 14 deletions(-) create mode 100644 test/files/pos/t11174.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 9be8927d51f2..0147b910eacb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -566,23 +566,39 @@ trait Implicits { } private def matchesPtInst(info: ImplicitInfo): Boolean = { + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) + def isViewLike = pt match { case Function1(_, _) => true case _ => false } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) - info.tpe match { - case PolyType(tparams, restpe) => - try { - val allUndetparams = (undetParams ++ tparams).distinct - val tvars = allUndetparams map freshVar - val tp = ApproximateDependentMap(restpe) - val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) + info.tpe match { + case PolyType(tparams, restpe) => + try { + val allUndetparams = (undetParams ++ tparams).distinct + val tvars = allUndetparams map freshVar + val tp = ApproximateDependentMap(restpe) + val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) + + if(isView || isViewLike) { + tpInstantiated match { + case MethodType(_, tv: TypeVar) if !tv.instValid => + // views with result types which have an uninstantiated type variable as their outer type + // constructor might not match correctly against the view template until they have been + // fully applied so we fall back to the slow path. + true + case _ => + matchesPt(tpInstantiated, wildPt, allUndetparams) || { + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) + false + } + } + } else { if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) false - } else if(!isView && !isViewLike) { + } else { // we can't usefully prune views any further because we would need to type an application // of the view to the term as is done in the computation of itree2 in typedImplicit1. val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) @@ -593,12 +609,13 @@ trait Implicits { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true - } else true - } catch { - case _: NoInstance => false + } } - case _ => true - } + } catch { + case _: NoInstance => false + } + case _ => true + } } /** Capturing the overlap between isPlausiblyCompatible and normSubType. diff --git a/test/files/pos/t11174.scala b/test/files/pos/t11174.scala new file mode 100644 index 000000000000..eac254adbfb1 --- /dev/null +++ b/test/files/pos/t11174.scala @@ -0,0 +1,18 @@ +trait CtorType[P] +class Props[P] extends CtorType[P] { + def foo(props: P): P = ??? +} + +object Generic { + implicit def toComponentCtor[CT[p] <: CtorType[p]](c: ComponentSimple[CT]): CT[Int] = ??? + + trait ComponentSimple[CT[p] <: CtorType[p]] +} + +object Test { + import Generic._ + + val c: ComponentSimple[Props] = ??? + toComponentCtor(c).foo(23) + c.foo(23) +} From 5f48045483bdfaab8dc382ac5ef597257fd199f8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 2 Oct 2018 15:17:33 +1000 Subject: [PATCH 1514/2793] Add the name of recently added ContextMode to the name map --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 1fd78e478858..5eae827baa2b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1654,7 +1654,8 @@ object ContextMode { SuperInit -> "SuperInit", SecondTry -> "SecondTry", TypeConstructorAllowed -> "TypeConstructorAllowed", - SuppressDeadArgWarning -> "SuppressDeadArgWarning" + SuppressDeadArgWarning -> "SuppressDeadArgWarning", + DiagUsedDefaults -> "DiagUsedDefaults" ) } From edd8c781890dbce64509fa3844fadf28c68bc281 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Tue, 2 Oct 2018 21:26:55 +0100 Subject: [PATCH 1515/2793] When matching views instantiate TypeVars to their bounds Implicits to satisfy views are matched against a search template. To match correctly against the template, TypeVars in the candidates type are replaced by their upper bounds once those bounds have been solved as far as possible against the template. --- .../tools/nsc/typechecker/Implicits.scala | 65 ++++++++++--------- test/files/pos/t11174b.scala | 32 +++++++++ test/files/pos/t11174c.scala | 18 +++++ 3 files changed, 85 insertions(+), 30 deletions(-) create mode 100644 test/files/pos/t11174b.scala create mode 100644 test/files/pos/t11174c.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 0147b910eacb..af3ace435dc9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -566,39 +566,45 @@ trait Implicits { } private def matchesPtInst(info: ImplicitInfo): Boolean = { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) - def isViewLike = pt match { case Function1(_, _) => true case _ => false } - info.tpe match { - case PolyType(tparams, restpe) => - try { - val allUndetparams = (undetParams ++ tparams).distinct - val tvars = allUndetparams map freshVar - val tp = ApproximateDependentMap(restpe) - val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) - - if(isView || isViewLike) { - tpInstantiated match { - case MethodType(_, tv: TypeVar) if !tv.instValid => - // views with result types which have an uninstantiated type variable as their outer type - // constructor might not match correctly against the view template until they have been - // fully applied so we fall back to the slow path. - true - case _ => - matchesPt(tpInstantiated, wildPt, allUndetparams) || { - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) - false - } + object tvarToHiBoundMap extends TypeMap { + def apply(tp: Type): Type = tp match { + case tv@TypeVar(_, constr) if !constr.instValid => + val upper = glb(constr.hiBounds) + if(tv.typeArgs.isEmpty) upper + else appliedType(upper, tv.typeArgs) + case _ => mapOver(tp) + } + } + + if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) + info.tpe match { + case PolyType(tparams, restpe) => + try { + val allUndetparams = (undetParams ++ tparams).distinct + val tvars = allUndetparams map freshVar + val tp = ApproximateDependentMap(restpe) + val tpInstantiated = { + val tpInstantiated0 = tp.instantiateTypeParams(allUndetparams, tvars) + if(!isView) tpInstantiated0 + else { + // Implicits to satisfy views are matched against a search template. To + // match correctly against the template, TypeVars in the candidates type + // are replaced by their upper bounds once those bounds have solved as + // far as possible against the template. + normSubType(tpInstantiated0, wildPt) + tvarToHiBoundMap(tpInstantiated0) + } } - } else { + if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) false - } else { + } else if(!isView && !isViewLike) { // we can't usefully prune views any further because we would need to type an application // of the view to the term as is done in the computation of itree2 in typedImplicit1. val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) @@ -609,13 +615,12 @@ trait Implicits { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true - } + } else true + } catch { + case _: NoInstance => false } - } catch { - case _: NoInstance => false - } - case _ => true - } + case _ => true + } } /** Capturing the overlap between isPlausiblyCompatible and normSubType. diff --git a/test/files/pos/t11174b.scala b/test/files/pos/t11174b.scala new file mode 100644 index 000000000000..07e05f838fae --- /dev/null +++ b/test/files/pos/t11174b.scala @@ -0,0 +1,32 @@ +class From { + class To[T] { + def foo(t: T): T = t + } +} + +object Test { + implicit def conv[T](x: From): x.To[T] = ??? + + val from: From = ??? + from.foo(23) +} + +/* +class From { + type To[T] +} + +class FromSub extends From { + class To[T] { + def foo(t: T): T = t + } +} + +object Test { + implicit def conv[T](x: From): x.To[T] = ??? + + val from: FromSub = ??? + conv(from).foo(23) + //from.foo(23) +} +*/ diff --git a/test/files/pos/t11174c.scala b/test/files/pos/t11174c.scala new file mode 100644 index 000000000000..75a48e5e60e1 --- /dev/null +++ b/test/files/pos/t11174c.scala @@ -0,0 +1,18 @@ +trait CtorType +class Props extends CtorType { + def foo(props: Int): Int = ??? +} + +object Generic { + implicit def toComponentCtor[CT <: CtorType](c: ComponentSimple[CT]): CT = ??? + + trait ComponentSimple[CT <: CtorType] +} + +object Test { + import Generic._ + + val c: ComponentSimple[Props] = ??? + toComponentCtor(c).foo(23) + c.foo(23) +} From 5933c1ab4cbf05ea5fc23849eafb139c0216b0e9 Mon Sep 17 00:00:00 2001 From: Miles Sabin Date: Thu, 4 Oct 2018 11:10:40 +0100 Subject: [PATCH 1516/2793] Take views off the fast path --- .../tools/nsc/typechecker/Implicits.scala | 43 +++---------------- test/files/pos/t11174b.scala | 20 --------- 2 files changed, 7 insertions(+), 56 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index af3ace435dc9..96c067c38b7f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -566,21 +566,6 @@ trait Implicits { } private def matchesPtInst(info: ImplicitInfo): Boolean = { - def isViewLike = pt match { - case Function1(_, _) => true - case _ => false - } - - object tvarToHiBoundMap extends TypeMap { - def apply(tp: Type): Type = tp match { - case tv@TypeVar(_, constr) if !constr.instValid => - val upper = glb(constr.hiBounds) - if(tv.typeArgs.isEmpty) upper - else appliedType(upper, tv.typeArgs) - case _ => mapOver(tp) - } - } - if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstCalls) info.tpe match { case PolyType(tparams, restpe) => @@ -588,25 +573,11 @@ trait Implicits { val allUndetparams = (undetParams ++ tparams).distinct val tvars = allUndetparams map freshVar val tp = ApproximateDependentMap(restpe) - val tpInstantiated = { - val tpInstantiated0 = tp.instantiateTypeParams(allUndetparams, tvars) - if(!isView) tpInstantiated0 - else { - // Implicits to satisfy views are matched against a search template. To - // match correctly against the template, TypeVars in the candidates type - // are replaced by their upper bounds once those bounds have solved as - // far as possible against the template. - normSubType(tpInstantiated0, wildPt) - tvarToHiBoundMap(tpInstantiated0) - } - } - + val tpInstantiated = tp.instantiateTypeParams(allUndetparams, tvars) if(!matchesPt(tpInstantiated, wildPt, allUndetparams)) { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) false - } else if(!isView && !isViewLike) { - // we can't usefully prune views any further because we would need to type an application - // of the view to the term as is done in the computation of itree2 in typedImplicit1. + } else { val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(allUndetparams, tvars, targs) val remainingUndet = allUndetparams diff okParams @@ -615,7 +586,7 @@ trait Implicits { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true - } else true + } } catch { case _: NoInstance => false } @@ -1015,9 +986,9 @@ trait Implicits { * - if it matches, forget about all others it improves upon */ - // the pt for views can have embedded unification type variables, BoundedWildcardTypes or - // Nothings which can't be solved for. Rather than attempt to patch things up later we - // just skip those cases altogether. + // the pt can have embedded unification type variables, BoundedWildcardTypes or Nothings + // which can't be solved for. Rather than attempt to patch things up later we just skip + // those cases altogether. lazy val wildPtNotInstantiable = wildPt.exists { case _: BoundedWildcardType | _: TypeVar => true ; case tp if typeIsNothing(tp) => true; case _ => false } @@ -1037,7 +1008,7 @@ trait Implicits { val mark = undoLog.log val typedFirstPending = - if(wildPtNotInstantiable || matchesPtInst(firstPending)) + if(isView || wildPtNotInstantiable || matchesPtInst(firstPending)) typedImplicit(firstPending, ptChecked = true, isLocalToCallsite) else SearchFailure if (typedFirstPending.isFailure && settings.isScala213) diff --git a/test/files/pos/t11174b.scala b/test/files/pos/t11174b.scala index 07e05f838fae..3537e6d16160 100644 --- a/test/files/pos/t11174b.scala +++ b/test/files/pos/t11174b.scala @@ -10,23 +10,3 @@ object Test { val from: From = ??? from.foo(23) } - -/* -class From { - type To[T] -} - -class FromSub extends From { - class To[T] { - def foo(t: T): T = t - } -} - -object Test { - implicit def conv[T](x: From): x.To[T] = ??? - - val from: FromSub = ??? - conv(from).foo(23) - //from.foo(23) -} -*/ From 7b02474c232d7397c0b50461dffc0113a0c411e9 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Tue, 2 Oct 2018 13:58:57 -0400 Subject: [PATCH 1517/2793] [nomerge] Apache License Version 2.0 See https://www.scala-lang.org/news/license-change.html --- LICENSE | 231 +++++++++++++++++++++++++++++----- NOTICE | 15 +++ build.sbt | 44 +++++-- doc/LICENSE.md | 36 ++---- project/GenerateAnyVals.scala | 18 +-- project/plugins.sbt | 2 + src/build/genprod.scala | 38 +++--- 7 files changed, 303 insertions(+), 81 deletions(-) diff --git a/LICENSE b/LICENSE index 57f166ceab73..8a51149ff94b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,28 +1,203 @@ -Copyright (c) 2002-2018 EPFL -Copyright (c) 2011-2018 Lightbend, Inc. - -All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the EPFL nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright (c) 2002-2018 EPFL + Copyright (c) 2011-2018 Lightbend, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NOTICE b/NOTICE index a15b912aa44f..2c4ab263d386 100644 --- a/NOTICE +++ b/NOTICE @@ -1 +1,16 @@ +Scala +Copyright (c) 2002-2018 EPFL +Copyright (c) 2011-2018 Lightbend, Inc. + +Scala includes software developed at +LAMP/EPFL (https://lamp.epfl.ch/) and +Lightbend, Inc. (https://www.lightbend.com/). + +Licensed under the Apache License, Version 2.0 (the "License"). +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + This software includes projects with other licenses -- see `doc/LICENSE.md`. diff --git a/build.sbt b/build.sbt index 0745a1eb0d7c..fa233d7d6a68 100644 --- a/build.sbt +++ b/build.sbt @@ -106,14 +106,30 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.8" +baseVersion in Global := "2.12.8" baseVersionSuffix in Global := "SNAPSHOT" +organization in ThisBuild := "org.scala-lang" +homepage in ThisBuild := Some(url("https://www.scala-lang.org")) +startYear in ThisBuild := Some(2002) +licenses in ThisBuild += (("Apache-2.0", url("https://www.apache.org/licenses/LICENSE-2.0"))) +headerLicense in ThisBuild := Some(HeaderLicense.Custom( + s"""Scala (${(homepage in ThisBuild).value.get}) + | + |Copyright EPFL and Lightbend, Inc. + | + |Licensed under Apache License 2.0 + |(http://www.apache.org/licenses/LICENSE-2.0). + | + |See the NOTICE file distributed with this work for + |additional information regarding copyright ownership. + |""".stripMargin +)) + mimaReferenceVersion in Global := Some("2.12.0") -scalaVersion in Global := versionProps("starr.version") +scalaVersion in Global := versionProps("starr.version") lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]]( - organization := "org.scala-lang", // we don't cross build Scala itself crossPaths := false, // do not add Scala library jar as a dependency automatically @@ -191,9 +207,6 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH}.scala#L1" ), incOptions := (incOptions in LocalProject("root")).value, - homepage := Some(url("http://www.scala-lang.org")), - startYear := Some(2002), - licenses += (("BSD 3-Clause", url("http://www.scala-lang.org/license.html"))), apiURL := Some(url("http://www.scala-lang.org/api/" + versionProperties.value.mavenVersion + "/")), pomIncludeRepository := { _ => false }, pomExtra := { @@ -224,6 +237,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + case None => base } }, + headerLicense := (headerLicense in ThisBuild).value, // Remove auto-generated manifest attributes packageOptions in Compile in packageBin := Seq.empty, packageOptions in Compile in packageSrc := Seq.empty, @@ -593,7 +607,23 @@ lazy val scalap = configureAsSubproject(project) "/project/name" -> Scalap, "/project/description" -> bytecode analysis tool, "/project/properties" -> scala.xml.Text("") - ) + ), + headerLicense := Some(HeaderLicense.Custom( + s"""Scala classfile decoder (${(homepage in ThisBuild).value.get}) + | + |Copyright EPFL and Lightbend, Inc. + | + |Licensed under Apache License 2.0 + |(http://www.apache.org/licenses/LICENSE-2.0). + | + |See the NOTICE file distributed with this work for + |additional information regarding copyright ownership. + |""".stripMargin)), + (headerSources in Compile) ~= { xs => + val excluded = Set("Memoisable.scala", "Result.scala", "Rule.scala", "Rules.scala", "SeqRule.scala") + xs filter { x => !excluded(x.getName) } + }, + (headerResources in Compile) := Nil ) .dependsOn(compiler) diff --git a/doc/LICENSE.md b/doc/LICENSE.md index fd489c64b7ac..cd337666c94d 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -1,4 +1,4 @@ -Scala is licensed under the [BSD 3-Clause License](http://opensource.org/licenses/BSD-3-Clause). +Scala is licensed under the [Apache License Version 2.0](https://www.apache.org/licenses/LICENSE-2.0). ## Scala License @@ -8,29 +8,17 @@ Copyright (c) 2011-2018 Lightbend, Inc. All rights reserved. -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, - this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - * Neither the name of the EPFL nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR -CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, -EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, -PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR -PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF -LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. # Other Licenses diff --git a/project/GenerateAnyVals.scala b/project/GenerateAnyVals.scala index b8078c607b6b..18c9f20d7c20 100644 --- a/project/GenerateAnyVals.scala +++ b/project/GenerateAnyVals.scala @@ -271,13 +271,17 @@ import scala.language.implicitConversions""" } trait GenerateAnyValTemplates { - def headerTemplate = """/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ + def headerTemplate = """/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/project/plugins.sbt b/project/plugins.sbt index 351c52084c6a..7a95a915a763 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -29,3 +29,5 @@ libraryDependencies ++= Seq( concurrentRestrictions in Global := Seq( Tags.limitAll(1) // workaround for https://github.com/sbt/sbt/issues/2970 ) + +addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") diff --git a/src/build/genprod.scala b/src/build/genprod.scala index a45dc752cc6b..f5485a9eb46c 100644 --- a/src/build/genprod.scala +++ b/src/build/genprod.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /** This program generates the ProductN, TupleN, FunctionN, * and AbstractFunctionN, where 0 <= N <= MaxArity. @@ -57,14 +61,18 @@ object genprod extends App { def packageDef = "scala" def imports = "" - def header = """ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ + def header = """/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT.%s package %s From 8baf5c38415cb6f46a0203c0f83878bf49e84290 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 5 Oct 2018 11:05:51 +0200 Subject: [PATCH 1518/2793] [nomerge] license headers for generated sources Re-generated by `generateSources` sbt task, and the JavaUniverseForce test (test/files/run/t6240-universe-code-gen.scala) Also update headers under library-aux and in some test files, as the `headerCreate` task won't touch those. --- src/library-aux/scala/Any.scala | 18 +++++++++------- src/library-aux/scala/AnyRef.scala | 18 +++++++++------- src/library-aux/scala/Nothing.scala | 18 +++++++++------- src/library-aux/scala/Null.scala | 18 +++++++++------- src/library/scala/Boolean.scala | 18 +++++++++------- src/library/scala/Byte.scala | 18 +++++++++------- src/library/scala/Char.scala | 18 +++++++++------- src/library/scala/Double.scala | 18 +++++++++------- src/library/scala/Float.scala | 18 +++++++++------- src/library/scala/Function0.scala | 21 ++++++++++++------- src/library/scala/Function1.scala | 19 ++++++++++------- src/library/scala/Function10.scala | 19 ++++++++++------- src/library/scala/Function11.scala | 19 ++++++++++------- src/library/scala/Function12.scala | 19 ++++++++++------- src/library/scala/Function13.scala | 19 ++++++++++------- src/library/scala/Function14.scala | 19 ++++++++++------- src/library/scala/Function15.scala | 19 ++++++++++------- src/library/scala/Function16.scala | 19 ++++++++++------- src/library/scala/Function17.scala | 19 ++++++++++------- src/library/scala/Function18.scala | 19 ++++++++++------- src/library/scala/Function19.scala | 19 ++++++++++------- src/library/scala/Function2.scala | 19 ++++++++++------- src/library/scala/Function20.scala | 19 ++++++++++------- src/library/scala/Function21.scala | 19 ++++++++++------- src/library/scala/Function22.scala | 19 ++++++++++------- src/library/scala/Function3.scala | 19 ++++++++++------- src/library/scala/Function4.scala | 19 ++++++++++------- src/library/scala/Function5.scala | 19 ++++++++++------- src/library/scala/Function6.scala | 19 ++++++++++------- src/library/scala/Function7.scala | 19 ++++++++++------- src/library/scala/Function8.scala | 19 ++++++++++------- src/library/scala/Function9.scala | 19 ++++++++++------- src/library/scala/Int.scala | 18 +++++++++------- src/library/scala/Long.scala | 18 +++++++++------- src/library/scala/Product1.scala | 19 ++++++++++------- src/library/scala/Product10.scala | 19 ++++++++++------- src/library/scala/Product11.scala | 19 ++++++++++------- src/library/scala/Product12.scala | 19 ++++++++++------- src/library/scala/Product13.scala | 19 ++++++++++------- src/library/scala/Product14.scala | 19 ++++++++++------- src/library/scala/Product15.scala | 19 ++++++++++------- src/library/scala/Product16.scala | 19 ++++++++++------- src/library/scala/Product17.scala | 19 ++++++++++------- src/library/scala/Product18.scala | 19 ++++++++++------- src/library/scala/Product19.scala | 19 ++++++++++------- src/library/scala/Product2.scala | 19 ++++++++++------- src/library/scala/Product20.scala | 19 ++++++++++------- src/library/scala/Product21.scala | 19 ++++++++++------- src/library/scala/Product22.scala | 19 ++++++++++------- src/library/scala/Product3.scala | 19 ++++++++++------- src/library/scala/Product4.scala | 19 ++++++++++------- src/library/scala/Product5.scala | 19 ++++++++++------- src/library/scala/Product6.scala | 19 ++++++++++------- src/library/scala/Product7.scala | 19 ++++++++++------- src/library/scala/Product8.scala | 19 ++++++++++------- src/library/scala/Product9.scala | 19 ++++++++++------- src/library/scala/Short.scala | 18 +++++++++------- src/library/scala/Tuple1.scala | 19 ++++++++++------- src/library/scala/Tuple10.scala | 19 ++++++++++------- src/library/scala/Tuple11.scala | 19 ++++++++++------- src/library/scala/Tuple12.scala | 19 ++++++++++------- src/library/scala/Tuple13.scala | 19 ++++++++++------- src/library/scala/Tuple14.scala | 19 ++++++++++------- src/library/scala/Tuple15.scala | 19 ++++++++++------- src/library/scala/Tuple16.scala | 19 ++++++++++------- src/library/scala/Tuple17.scala | 19 ++++++++++------- src/library/scala/Tuple18.scala | 19 ++++++++++------- src/library/scala/Tuple19.scala | 19 ++++++++++------- src/library/scala/Tuple2.scala | 19 ++++++++++------- src/library/scala/Tuple20.scala | 19 ++++++++++------- src/library/scala/Tuple21.scala | 19 ++++++++++------- src/library/scala/Tuple22.scala | 19 ++++++++++------- src/library/scala/Tuple3.scala | 19 ++++++++++------- src/library/scala/Tuple4.scala | 19 ++++++++++------- src/library/scala/Tuple5.scala | 19 ++++++++++------- src/library/scala/Tuple6.scala | 19 ++++++++++------- src/library/scala/Tuple7.scala | 19 ++++++++++------- src/library/scala/Tuple8.scala | 19 ++++++++++------- src/library/scala/Tuple9.scala | 19 ++++++++++------- src/library/scala/Unit.scala | 18 +++++++++------- .../scala/runtime/AbstractFunction0.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction1.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction10.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction11.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction12.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction13.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction14.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction15.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction16.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction17.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction18.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction19.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction2.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction20.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction21.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction22.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction3.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction4.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction5.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction6.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction7.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction8.scala | 19 ++++++++++------- .../scala/runtime/AbstractFunction9.scala | 19 ++++++++++------- .../reflect/runtime/JavaUniverseForce.scala | 12 +++++++++++ test/files/pos/spec-Function1.scala | 18 +++++++++------- test/files/pos/t5644/BoxesRunTime.java | 20 ++++++++++-------- test/files/run/t6240-universe-code-gen.scala | 14 ++++++++++++- .../scala/util/control/ExceptionTest.scala | 18 +++++++++------- test/scaladoc/resources/doc-root/Any.scala | 18 +++++++++------- test/scaladoc/resources/doc-root/AnyRef.scala | 18 +++++++++------- .../scaladoc/resources/doc-root/Nothing.scala | 18 +++++++++------- test/scaladoc/resources/doc-root/Null.scala | 18 +++++++++------- 112 files changed, 1326 insertions(+), 774 deletions(-) diff --git a/src/library-aux/scala/Any.scala b/src/library-aux/scala/Any.scala index 68ec04e9c44c..f0d55190a669 100644 --- a/src/library-aux/scala/Any.scala +++ b/src/library-aux/scala/Any.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library-aux/scala/AnyRef.scala b/src/library-aux/scala/AnyRef.scala index 67090bae473b..fabb1a7f51a9 100644 --- a/src/library-aux/scala/AnyRef.scala +++ b/src/library-aux/scala/AnyRef.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library-aux/scala/Nothing.scala b/src/library-aux/scala/Nothing.scala index 57f6fac3f9e3..d52353e3f314 100644 --- a/src/library-aux/scala/Nothing.scala +++ b/src/library-aux/scala/Nothing.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library-aux/scala/Null.scala b/src/library-aux/scala/Null.scala index 931beb2d1a7e..66f544dc6374 100644 --- a/src/library-aux/scala/Null.scala +++ b/src/library-aux/scala/Null.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Boolean.scala b/src/library/scala/Boolean.scala index 017f10a28334..a208f863346f 100644 --- a/src/library/scala/Boolean.scala +++ b/src/library/scala/Boolean.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Byte.scala b/src/library/scala/Byte.scala index 9a51e9e45dad..25e668033061 100644 --- a/src/library/scala/Byte.scala +++ b/src/library/scala/Byte.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Char.scala b/src/library/scala/Char.scala index ff3246f7d605..ad88d1721fdc 100644 --- a/src/library/scala/Char.scala +++ b/src/library/scala/Char.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Double.scala b/src/library/scala/Double.scala index 08bcb9fefcdd..fb90a6e291d4 100644 --- a/src/library/scala/Double.scala +++ b/src/library/scala/Double.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Float.scala b/src/library/scala/Float.scala index 01fdbc00e48c..af4d3d0a7e98 100644 --- a/src/library/scala/Float.scala +++ b/src/library/scala/Float.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala index cfcc7b3726fe..82f464564b23 100644 --- a/src/library/scala/Function0.scala +++ b/src/library/scala/Function0.scala @@ -1,12 +1,17 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. -// genprod generated these sources at: Wed May 30 22:17:36 CEST 2018 +// genprod generated these sources at: Fri Oct 05 11:04:52 CEST 2018 package scala diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala index 572901c6f357..e9b67af8502d 100644 --- a/src/library/scala/Function1.scala +++ b/src/library/scala/Function1.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala index 7789970a4418..2bf6ecf08065 100644 --- a/src/library/scala/Function10.scala +++ b/src/library/scala/Function10.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala index d4276f3fd109..1842fbca065e 100644 --- a/src/library/scala/Function11.scala +++ b/src/library/scala/Function11.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala index dfa8bcfce66b..080d4b4c4156 100644 --- a/src/library/scala/Function12.scala +++ b/src/library/scala/Function12.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala index 5404c208bf31..448b96e2fdfe 100644 --- a/src/library/scala/Function13.scala +++ b/src/library/scala/Function13.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala index 3145290bcfa2..2f6569a1c3a0 100644 --- a/src/library/scala/Function14.scala +++ b/src/library/scala/Function14.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala index 309ef53e71c3..0af3fb8bc378 100644 --- a/src/library/scala/Function15.scala +++ b/src/library/scala/Function15.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala index c4cb107e872e..cf883964dcfd 100644 --- a/src/library/scala/Function16.scala +++ b/src/library/scala/Function16.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala index 005ae2ab79dc..4dc75ffe2604 100644 --- a/src/library/scala/Function17.scala +++ b/src/library/scala/Function17.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala index 371630dae319..f8705fd1c208 100644 --- a/src/library/scala/Function18.scala +++ b/src/library/scala/Function18.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala index 95c60a467e50..b4c661886e87 100644 --- a/src/library/scala/Function19.scala +++ b/src/library/scala/Function19.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala index e2c094ea4022..0d02341c40ec 100644 --- a/src/library/scala/Function2.scala +++ b/src/library/scala/Function2.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala index a93f999d44fb..b1523f689392 100644 --- a/src/library/scala/Function20.scala +++ b/src/library/scala/Function20.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala index 7ebbb0679843..c41374058d36 100644 --- a/src/library/scala/Function21.scala +++ b/src/library/scala/Function21.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala index e5a3d83fb95d..175cc0d30887 100644 --- a/src/library/scala/Function22.scala +++ b/src/library/scala/Function22.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala index 850290d244bf..a9479f28e049 100644 --- a/src/library/scala/Function3.scala +++ b/src/library/scala/Function3.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala index c9ac6df32ea0..ab7b89016dd4 100644 --- a/src/library/scala/Function4.scala +++ b/src/library/scala/Function4.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala index 360a460306f2..9c822db6cba1 100644 --- a/src/library/scala/Function5.scala +++ b/src/library/scala/Function5.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala index d30877e7658b..109f53626d80 100644 --- a/src/library/scala/Function6.scala +++ b/src/library/scala/Function6.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala index b19caf2b5004..bc0ae325c92f 100644 --- a/src/library/scala/Function7.scala +++ b/src/library/scala/Function7.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala index 3aff0b034cf6..5bae2f6f378c 100644 --- a/src/library/scala/Function8.scala +++ b/src/library/scala/Function8.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala index f80ccf48f9b7..cc2f55c79d30 100644 --- a/src/library/scala/Function9.scala +++ b/src/library/scala/Function9.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Int.scala b/src/library/scala/Int.scala index cda015063a17..0d791adae651 100644 --- a/src/library/scala/Int.scala +++ b/src/library/scala/Int.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Long.scala b/src/library/scala/Long.scala index 84e6f09da354..862a2b190f32 100644 --- a/src/library/scala/Long.scala +++ b/src/library/scala/Long.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala index 3b0194e41f18..41e97a9005b8 100644 --- a/src/library/scala/Product1.scala +++ b/src/library/scala/Product1.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala index 8826d95007e5..a170baf72dc0 100644 --- a/src/library/scala/Product10.scala +++ b/src/library/scala/Product10.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala index 2a846fff4e22..d67fcb33ac4e 100644 --- a/src/library/scala/Product11.scala +++ b/src/library/scala/Product11.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala index 87419048d626..f1113f34a681 100644 --- a/src/library/scala/Product12.scala +++ b/src/library/scala/Product12.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala index a944279a2eff..08b93a102f2e 100644 --- a/src/library/scala/Product13.scala +++ b/src/library/scala/Product13.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala index 098721f21637..664d9cf5abed 100644 --- a/src/library/scala/Product14.scala +++ b/src/library/scala/Product14.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala index ef550c80d2a8..c34f9c0311e2 100644 --- a/src/library/scala/Product15.scala +++ b/src/library/scala/Product15.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala index dd32e2f63741..f990d3ae7a0d 100644 --- a/src/library/scala/Product16.scala +++ b/src/library/scala/Product16.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala index e97cc5189ef9..858ffb6f789f 100644 --- a/src/library/scala/Product17.scala +++ b/src/library/scala/Product17.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala index 1266b77a9f52..eb76d6657104 100644 --- a/src/library/scala/Product18.scala +++ b/src/library/scala/Product18.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala index 4bf5dcf23ebd..6f2bad96293a 100644 --- a/src/library/scala/Product19.scala +++ b/src/library/scala/Product19.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala index 93144abeb3c3..fa3c648a207c 100644 --- a/src/library/scala/Product2.scala +++ b/src/library/scala/Product2.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala index a1dfd469add8..0893588f9669 100644 --- a/src/library/scala/Product20.scala +++ b/src/library/scala/Product20.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala index 4f01277ad3df..463b022ad8fc 100644 --- a/src/library/scala/Product21.scala +++ b/src/library/scala/Product21.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala index cef8d3040282..dd251cd60a3f 100644 --- a/src/library/scala/Product22.scala +++ b/src/library/scala/Product22.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala index 7da324106d36..ec73f4e15c22 100644 --- a/src/library/scala/Product3.scala +++ b/src/library/scala/Product3.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala index 88e5dea9d3b2..1eb820b8f14d 100644 --- a/src/library/scala/Product4.scala +++ b/src/library/scala/Product4.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala index d8c3ffc190aa..3b3c0aca2338 100644 --- a/src/library/scala/Product5.scala +++ b/src/library/scala/Product5.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala index ab50d678fc56..7c77df212139 100644 --- a/src/library/scala/Product6.scala +++ b/src/library/scala/Product6.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala index efdeb142d18b..05a4be35fbe4 100644 --- a/src/library/scala/Product7.scala +++ b/src/library/scala/Product7.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala index 743c0ac4858d..3bf83f34f301 100644 --- a/src/library/scala/Product8.scala +++ b/src/library/scala/Product8.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala index 8d04213cd93f..895032834ef6 100644 --- a/src/library/scala/Product9.scala +++ b/src/library/scala/Product9.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Short.scala b/src/library/scala/Short.scala index 94dea784d3af..334e60ae893f 100644 --- a/src/library/scala/Short.scala +++ b/src/library/scala/Short.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala index 5544a5f63d9c..a9e6eb31d777 100644 --- a/src/library/scala/Tuple1.scala +++ b/src/library/scala/Tuple1.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala index c57acb7c6e7f..f2447ca8ab6f 100644 --- a/src/library/scala/Tuple10.scala +++ b/src/library/scala/Tuple10.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala index 06360e6679ba..63f7ebe23fc6 100644 --- a/src/library/scala/Tuple11.scala +++ b/src/library/scala/Tuple11.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala index e223de104d95..808c91c14cd2 100644 --- a/src/library/scala/Tuple12.scala +++ b/src/library/scala/Tuple12.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala index 56e12b96b676..7a5b9d6b5ff4 100644 --- a/src/library/scala/Tuple13.scala +++ b/src/library/scala/Tuple13.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala index 53dd4d79bbf3..7c26a5d23048 100644 --- a/src/library/scala/Tuple14.scala +++ b/src/library/scala/Tuple14.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala index 0aca96d00da0..dd2de9347f61 100644 --- a/src/library/scala/Tuple15.scala +++ b/src/library/scala/Tuple15.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala index d4c0c318070f..fba6bc957ef5 100644 --- a/src/library/scala/Tuple16.scala +++ b/src/library/scala/Tuple16.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala index 47df6cb59f07..1600d31da5bd 100644 --- a/src/library/scala/Tuple17.scala +++ b/src/library/scala/Tuple17.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala index 464b08fb2840..b7d30b5c0941 100644 --- a/src/library/scala/Tuple18.scala +++ b/src/library/scala/Tuple18.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala index d64b3920b441..b1d55c889a45 100644 --- a/src/library/scala/Tuple19.scala +++ b/src/library/scala/Tuple19.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala index 5faa4e982150..ec00f2254f55 100644 --- a/src/library/scala/Tuple2.scala +++ b/src/library/scala/Tuple2.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala index a96c41baa551..cc6493d54774 100644 --- a/src/library/scala/Tuple20.scala +++ b/src/library/scala/Tuple20.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala index 6f240d1fba7f..0b4be8c55ce2 100644 --- a/src/library/scala/Tuple21.scala +++ b/src/library/scala/Tuple21.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala index 681b709f6578..9fab34a4e013 100644 --- a/src/library/scala/Tuple22.scala +++ b/src/library/scala/Tuple22.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala index 86f8f7e1a4b3..368f11569ad7 100644 --- a/src/library/scala/Tuple3.scala +++ b/src/library/scala/Tuple3.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala index f3266c262c41..0abe4119fc67 100644 --- a/src/library/scala/Tuple4.scala +++ b/src/library/scala/Tuple4.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala index e6ac0a62452c..aac3687b511c 100644 --- a/src/library/scala/Tuple5.scala +++ b/src/library/scala/Tuple5.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala index cf69b9c10a6a..41a01471615c 100644 --- a/src/library/scala/Tuple6.scala +++ b/src/library/scala/Tuple6.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala index 4f0200fe238f..38f04b2c2926 100644 --- a/src/library/scala/Tuple7.scala +++ b/src/library/scala/Tuple7.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala index ebd9f7025264..9a94e80dbb7a 100644 --- a/src/library/scala/Tuple8.scala +++ b/src/library/scala/Tuple8.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala index 854fe97b4401..f4296d0dcd67 100644 --- a/src/library/scala/Tuple9.scala +++ b/src/library/scala/Tuple9.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala diff --git a/src/library/scala/Unit.scala b/src/library/scala/Unit.scala index eb6d1d0ddffa..440a131e0672 100644 --- a/src/library/scala/Unit.scala +++ b/src/library/scala/Unit.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // DO NOT EDIT, CHANGES WILL BE LOST // This auto-generated code can be modified in "project/GenerateAnyVals.scala". diff --git a/src/library/scala/runtime/AbstractFunction0.scala b/src/library/scala/runtime/AbstractFunction0.scala index 1e677e800877..1d0658dd13ac 100644 --- a/src/library/scala/runtime/AbstractFunction0.scala +++ b/src/library/scala/runtime/AbstractFunction0.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala index 178280cb4600..c4db90a4c750 100644 --- a/src/library/scala/runtime/AbstractFunction1.scala +++ b/src/library/scala/runtime/AbstractFunction1.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction10.scala b/src/library/scala/runtime/AbstractFunction10.scala index 776f52238d20..0c4a08cbfac7 100644 --- a/src/library/scala/runtime/AbstractFunction10.scala +++ b/src/library/scala/runtime/AbstractFunction10.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction11.scala b/src/library/scala/runtime/AbstractFunction11.scala index 76cd8fbb3c92..26dc92a984d8 100644 --- a/src/library/scala/runtime/AbstractFunction11.scala +++ b/src/library/scala/runtime/AbstractFunction11.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction12.scala b/src/library/scala/runtime/AbstractFunction12.scala index 10066ed4b356..f52587200984 100644 --- a/src/library/scala/runtime/AbstractFunction12.scala +++ b/src/library/scala/runtime/AbstractFunction12.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction13.scala b/src/library/scala/runtime/AbstractFunction13.scala index 6c3a45734ce3..f5280e525973 100644 --- a/src/library/scala/runtime/AbstractFunction13.scala +++ b/src/library/scala/runtime/AbstractFunction13.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction14.scala b/src/library/scala/runtime/AbstractFunction14.scala index bf2b6736f405..af6784cffaf2 100644 --- a/src/library/scala/runtime/AbstractFunction14.scala +++ b/src/library/scala/runtime/AbstractFunction14.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction15.scala b/src/library/scala/runtime/AbstractFunction15.scala index 5136f666c827..aac162fac06a 100644 --- a/src/library/scala/runtime/AbstractFunction15.scala +++ b/src/library/scala/runtime/AbstractFunction15.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction16.scala b/src/library/scala/runtime/AbstractFunction16.scala index dbafab83015c..699842586255 100644 --- a/src/library/scala/runtime/AbstractFunction16.scala +++ b/src/library/scala/runtime/AbstractFunction16.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction17.scala b/src/library/scala/runtime/AbstractFunction17.scala index 9c36dbf5d8d9..2531a748d885 100644 --- a/src/library/scala/runtime/AbstractFunction17.scala +++ b/src/library/scala/runtime/AbstractFunction17.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction18.scala b/src/library/scala/runtime/AbstractFunction18.scala index 30eee9586fbe..14d0dd72046a 100644 --- a/src/library/scala/runtime/AbstractFunction18.scala +++ b/src/library/scala/runtime/AbstractFunction18.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction19.scala b/src/library/scala/runtime/AbstractFunction19.scala index 14baf5f1eba3..13d633113954 100644 --- a/src/library/scala/runtime/AbstractFunction19.scala +++ b/src/library/scala/runtime/AbstractFunction19.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction2.scala b/src/library/scala/runtime/AbstractFunction2.scala index 223ade99838f..b39793a9817d 100644 --- a/src/library/scala/runtime/AbstractFunction2.scala +++ b/src/library/scala/runtime/AbstractFunction2.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction20.scala b/src/library/scala/runtime/AbstractFunction20.scala index f5c29571bf97..4debd7473893 100644 --- a/src/library/scala/runtime/AbstractFunction20.scala +++ b/src/library/scala/runtime/AbstractFunction20.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction21.scala b/src/library/scala/runtime/AbstractFunction21.scala index 15feea3a669a..523a42f4ebea 100644 --- a/src/library/scala/runtime/AbstractFunction21.scala +++ b/src/library/scala/runtime/AbstractFunction21.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction22.scala b/src/library/scala/runtime/AbstractFunction22.scala index d77369ff0125..7c77f05e7f03 100644 --- a/src/library/scala/runtime/AbstractFunction22.scala +++ b/src/library/scala/runtime/AbstractFunction22.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction3.scala b/src/library/scala/runtime/AbstractFunction3.scala index f8635092140e..829125342d3c 100644 --- a/src/library/scala/runtime/AbstractFunction3.scala +++ b/src/library/scala/runtime/AbstractFunction3.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction4.scala b/src/library/scala/runtime/AbstractFunction4.scala index 5927015ef8b1..6f479f3395ae 100644 --- a/src/library/scala/runtime/AbstractFunction4.scala +++ b/src/library/scala/runtime/AbstractFunction4.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction5.scala b/src/library/scala/runtime/AbstractFunction5.scala index 411e1e14bf63..50ad931bdd77 100644 --- a/src/library/scala/runtime/AbstractFunction5.scala +++ b/src/library/scala/runtime/AbstractFunction5.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction6.scala b/src/library/scala/runtime/AbstractFunction6.scala index 411c30d480d3..e60229bb03df 100644 --- a/src/library/scala/runtime/AbstractFunction6.scala +++ b/src/library/scala/runtime/AbstractFunction6.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction7.scala b/src/library/scala/runtime/AbstractFunction7.scala index 498f98633ae7..1f6eae1291fa 100644 --- a/src/library/scala/runtime/AbstractFunction7.scala +++ b/src/library/scala/runtime/AbstractFunction7.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction8.scala b/src/library/scala/runtime/AbstractFunction8.scala index c6d320b8874d..06677c3e3923 100644 --- a/src/library/scala/runtime/AbstractFunction8.scala +++ b/src/library/scala/runtime/AbstractFunction8.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/library/scala/runtime/AbstractFunction9.scala b/src/library/scala/runtime/AbstractFunction9.scala index 34bd9d710721..863e73f18177 100644 --- a/src/library/scala/runtime/AbstractFunction9.scala +++ b/src/library/scala/runtime/AbstractFunction9.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // GENERATED CODE: DO NOT EDIT. See scala.Function0 for timestamp. package scala.runtime diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index ef081c8055fd..7c5dfe17296b 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // Generated Code, validated by run/t6240-universe-code-gen.scala package scala.reflect package runtime diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala index 5a115501d8b8..8428adedbd70 100644 --- a/test/files/pos/spec-Function1.scala +++ b/test/files/pos/spec-Function1.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // generated by genprod on Wed Apr 23 10:06:16 CEST 2008 (with fancy comment) (with extra methods) diff --git a/test/files/pos/t5644/BoxesRunTime.java b/test/files/pos/t5644/BoxesRunTime.java index 2b931519aaca..915760c57199 100644 --- a/test/files/pos/t5644/BoxesRunTime.java +++ b/test/files/pos/t5644/BoxesRunTime.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala index 80b60bab7e3c..e5a49921577c 100644 --- a/test/files/run/t6240-universe-code-gen.scala +++ b/test/files/run/t6240-universe-code-gen.scala @@ -25,7 +25,19 @@ object Test extends App { } val code = - s"""|// Generated Code, validated by run/t6240-universe-code-gen.scala + s"""|/* + | * Scala (https://www.scala-lang.org) + | * + | * Copyright EPFL and Lightbend, Inc. + | * + | * Licensed under Apache License 2.0 + | * (http://www.apache.org/licenses/LICENSE-2.0). + | * + | * See the NOTICE file distributed with this work for + | * additional information regarding copyright ownership. + | */ + | + |// Generated Code, validated by run/t6240-universe-code-gen.scala |package scala.reflect |package runtime | diff --git a/test/junit/scala/util/control/ExceptionTest.scala b/test/junit/scala/util/control/ExceptionTest.scala index 5211d31839f4..a93d2e2c26d9 100644 --- a/test/junit/scala/util/control/ExceptionTest.scala +++ b/test/junit/scala/util/control/ExceptionTest.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2016-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.util diff --git a/test/scaladoc/resources/doc-root/Any.scala b/test/scaladoc/resources/doc-root/Any.scala index fd4c287b4ff2..c582ad607c56 100644 --- a/test/scaladoc/resources/doc-root/Any.scala +++ b/test/scaladoc/resources/doc-root/Any.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/test/scaladoc/resources/doc-root/AnyRef.scala b/test/scaladoc/resources/doc-root/AnyRef.scala index 7cdc3d1adae4..e1883be5da2b 100644 --- a/test/scaladoc/resources/doc-root/AnyRef.scala +++ b/test/scaladoc/resources/doc-root/AnyRef.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/test/scaladoc/resources/doc-root/Nothing.scala b/test/scaladoc/resources/doc-root/Nothing.scala index 57f6fac3f9e3..d52353e3f314 100644 --- a/test/scaladoc/resources/doc-root/Nothing.scala +++ b/test/scaladoc/resources/doc-root/Nothing.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/test/scaladoc/resources/doc-root/Null.scala b/test/scaladoc/resources/doc-root/Null.scala index 931beb2d1a7e..66f544dc6374 100644 --- a/test/scaladoc/resources/doc-root/Null.scala +++ b/test/scaladoc/resources/doc-root/Null.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala From 515916c42f8f2f5c476ffabd6774e5a903feda10 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 5 Oct 2018 11:13:07 +0200 Subject: [PATCH 1519/2793] [nomerge] Auto-update headers in `src/` Using the `headerCreate` sbt task, now enforced in CI using `headerCheck`. --- .travis.yml | 2 +- .../compiler/DefaultMacroCompiler.scala | 12 +++++++++++ .../reflect/macros/compiler/Errors.scala | 12 +++++++++++ .../reflect/macros/compiler/Resolvers.scala | 12 +++++++++++ .../reflect/macros/compiler/Validators.scala | 12 +++++++++++ .../reflect/macros/contexts/Aliases.scala | 12 +++++++++++ .../reflect/macros/contexts/Context.scala | 12 +++++++++++ .../reflect/macros/contexts/Enclosures.scala | 12 +++++++++++ .../scala/reflect/macros/contexts/Evals.scala | 12 +++++++++++ .../reflect/macros/contexts/ExprUtils.scala | 12 +++++++++++ .../reflect/macros/contexts/FrontEnds.scala | 12 +++++++++++ .../macros/contexts/Infrastructure.scala | 12 +++++++++++ .../reflect/macros/contexts/Internals.scala | 12 +++++++++++ .../scala/reflect/macros/contexts/Names.scala | 12 +++++++++++ .../reflect/macros/contexts/Parsers.scala | 12 +++++++++++ .../reflect/macros/contexts/Reifiers.scala | 13 +++++++++--- .../reflect/macros/contexts/Traces.scala | 12 +++++++++++ .../reflect/macros/contexts/Typers.scala | 12 +++++++++++ .../macros/runtime/AbortMacroException.scala | 12 +++++++++++ .../runtime/JavaReflectionRuntimes.scala | 12 +++++++++++ .../macros/runtime/MacroRuntimes.scala | 12 +++++++++++ .../reflect/macros/runtime/package.scala | 12 +++++++++++ .../scala/reflect/macros/util/Helpers.scala | 12 +++++++++++ .../scala/reflect/macros/util/Traces.scala | 12 +++++++++++ .../scala/reflect/quasiquotes/Holes.scala | 12 +++++++++++ .../scala/reflect/quasiquotes/Parsers.scala | 12 +++++++++++ .../reflect/quasiquotes/Placeholders.scala | 12 +++++++++++ .../reflect/quasiquotes/Quasiquotes.scala | 12 +++++++++++ .../scala/reflect/quasiquotes/Reifiers.scala | 12 +++++++++++ src/compiler/scala/reflect/reify/Errors.scala | 12 +++++++++++ src/compiler/scala/reflect/reify/Phases.scala | 12 +++++++++++ .../scala/reflect/reify/Reifier.scala | 12 +++++++++++ src/compiler/scala/reflect/reify/States.scala | 12 +++++++++++ .../scala/reflect/reify/Taggers.scala | 12 +++++++++++ .../reify/codegen/GenAnnotationInfos.scala | 12 +++++++++++ .../reflect/reify/codegen/GenNames.scala | 12 +++++++++++ .../reflect/reify/codegen/GenPositions.scala | 12 +++++++++++ .../reflect/reify/codegen/GenSymbols.scala | 12 +++++++++++ .../reflect/reify/codegen/GenTrees.scala | 12 +++++++++++ .../reflect/reify/codegen/GenTypes.scala | 12 +++++++++++ .../reflect/reify/codegen/GenUtils.scala | 12 +++++++++++ .../scala/reflect/reify/package.scala | 12 +++++++++++ .../reflect/reify/phases/Calculate.scala | 12 +++++++++++ .../reflect/reify/phases/Metalevels.scala | 12 +++++++++++ .../scala/reflect/reify/phases/Reify.scala | 12 +++++++++++ .../scala/reflect/reify/phases/Reshape.scala | 12 +++++++++++ .../reflect/reify/utils/Extractors.scala | 12 +++++++++++ .../reflect/reify/utils/NodePrinters.scala | 14 ++++++++++--- .../reflect/reify/utils/StdAttachments.scala | 12 +++++++++++ .../reflect/reify/utils/SymbolTables.scala | 12 +++++++++++ .../scala/reflect/reify/utils/Utils.scala | 12 +++++++++++ .../scala/tools/ant/ClassloadVerify.scala | 18 ++++++++++------- src/compiler/scala/tools/ant/FastScalac.scala | 18 ++++++++++------- .../scala/tools/ant/Pack200Task.scala | 18 ++++++++++------- src/compiler/scala/tools/ant/Same.scala | 18 ++++++++++------- .../scala/tools/ant/ScalaMatchingTask.scala | 18 ++++++++++------- src/compiler/scala/tools/ant/ScalaTool.scala | 18 ++++++++++------- src/compiler/scala/tools/ant/Scalac.scala | 18 ++++++++++------- .../scala/tools/ant/ScalacShared.scala | 18 ++++++++++------- .../scala/tools/ant/sabbus/Break.scala | 19 ++++++++++-------- .../tools/ant/sabbus/CompilationFailure.scala | 19 ++++++++++-------- .../scala/tools/ant/sabbus/Compiler.scala | 19 ++++++++++-------- .../scala/tools/ant/sabbus/Compilers.scala | 19 ++++++++++-------- .../tools/ant/sabbus/ForeignCompiler.scala | 19 ++++++++++-------- .../scala/tools/ant/sabbus/Make.scala | 19 ++++++++++-------- .../scala/tools/ant/sabbus/ScalacFork.scala | 18 ++++++++++------- .../scala/tools/ant/sabbus/Settings.scala | 18 ++++++++++------- .../scala/tools/ant/sabbus/TaskArgs.scala | 19 ++++++++++-------- src/compiler/scala/tools/ant/sabbus/Use.scala | 19 ++++++++++-------- .../scala/tools/cmd/CommandLine.scala | 13 +++++++++--- .../scala/tools/cmd/CommandLineParser.scala | 14 ++++++++++--- src/compiler/scala/tools/cmd/FromString.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/Instance.scala | 13 +++++++++--- .../scala/tools/cmd/Interpolation.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/Meta.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/Opt.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/Property.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/Reference.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/Spec.scala | 13 +++++++++--- src/compiler/scala/tools/cmd/package.scala | 13 +++++++++--- .../ClassPathMemoryConsumptionTester.scala | 11 +++++++++- .../scala/tools/nsc/CompilationUnits.scala | 13 +++++++++--- .../scala/tools/nsc/CompileClient.scala | 13 +++++++++--- .../scala/tools/nsc/CompileServer.scala | 13 +++++++++--- .../scala/tools/nsc/CompileSocket.scala | 13 +++++++++--- .../scala/tools/nsc/CompilerCommand.scala | 13 +++++++++--- .../scala/tools/nsc/ConsoleWriter.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/Driver.scala | 12 +++++++++++ src/compiler/scala/tools/nsc/EvalLoop.scala | 13 +++++++++--- .../tools/nsc/GenericRunnerCommand.scala | 13 +++++++++--- .../tools/nsc/GenericRunnerSettings.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/Global.scala | 13 +++++++++--- .../scala/tools/nsc/GlobalSymbolLoaders.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/Main.scala | 14 ++++++++++--- src/compiler/scala/tools/nsc/MainBench.scala | 13 +++++++++--- .../scala/tools/nsc/MainTokenMetric.scala | 13 +++++++++--- .../scala/tools/nsc/NewLinePrintWriter.scala | 13 +++++++++--- .../scala/tools/nsc/ObjectRunner.scala | 14 +++++++++---- .../tools/nsc/OfflineCompilerCommand.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/Parsing.scala | 13 +++++++++--- .../scala/tools/nsc/PhaseAssembly.scala | 14 +++++++++---- src/compiler/scala/tools/nsc/Properties.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/Reporting.scala | 13 +++++++++--- .../scala/tools/nsc/ScriptRunner.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/Settings.scala | 13 +++++++++--- .../scala/tools/nsc/SubComponent.scala | 13 +++++++++--- .../scala/tools/nsc/ast/DocComments.scala | 13 +++++++++--- .../scala/tools/nsc/ast/NodePrinters.scala | 13 +++++++++--- .../scala/tools/nsc/ast/Positions.scala | 12 +++++++++++ .../scala/tools/nsc/ast/Printers.scala | 13 +++++++++--- .../scala/tools/nsc/ast/TreeBrowsers.scala | 13 +++++++++--- .../scala/tools/nsc/ast/TreeDSL.scala | 12 ++++++++--- .../scala/tools/nsc/ast/TreeGen.scala | 13 +++++++++--- .../scala/tools/nsc/ast/TreeInfo.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/ast/Trees.scala | 13 +++++++++--- .../tools/nsc/ast/parser/BracePair.scala | 14 ++++++++++--- .../tools/nsc/ast/parser/BracePatch.scala | 14 ++++++++++--- .../scala/tools/nsc/ast/parser/Change.scala | 14 ++++++++++--- .../tools/nsc/ast/parser/CommonTokens.scala | 12 +++++++++++ .../tools/nsc/ast/parser/MarkupParsers.scala | 13 +++++++++--- .../scala/tools/nsc/ast/parser/Parsers.scala | 13 +++++++++--- .../scala/tools/nsc/ast/parser/Patch.scala | 14 ++++++++++--- .../scala/tools/nsc/ast/parser/Scanners.scala | 14 ++++++++++--- .../nsc/ast/parser/SymbolicXMLBuilder.scala | 13 +++++++++--- .../tools/nsc/ast/parser/SyntaxAnalyzer.scala | 13 +++++++++--- .../scala/tools/nsc/ast/parser/Tokens.scala | 13 +++++++++--- .../tools/nsc/ast/parser/TreeBuilder.scala | 13 +++++++++--- .../ast/parser/xml/MarkupParserCommon.scala | 18 ++++++++++------- .../tools/nsc/ast/parser/xml/Utility.scala | 18 ++++++++++------- .../tools/nsc/backend/JavaPlatform.scala | 13 +++++++++--- .../scala/tools/nsc/backend/Platform.scala | 13 +++++++++--- .../tools/nsc/backend/ScalaPrimitives.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/AsmUtils.scala | 13 +++++++++--- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 13 +++++++++--- .../nsc/backend/jvm/BCodeIdiomatic.scala | 13 +++++++++--- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 14 +++++++++---- .../nsc/backend/jvm/BCodeSyncAndTry.scala | 14 +++++++++---- .../scala/tools/nsc/backend/jvm/BTypes.scala | 13 +++++++++--- .../nsc/backend/jvm/BTypesFromClassfile.scala | 12 +++++++++++ .../nsc/backend/jvm/BTypesFromSymbols.scala | 13 +++++++++--- .../nsc/backend/jvm/BackendReporting.scala | 12 +++++++++++ .../tools/nsc/backend/jvm/BackendStats.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/ClassNode1.java | 14 ++++++++++--- .../nsc/backend/jvm/ClassfileWriters.scala | 12 +++++++++++ .../scala/tools/nsc/backend/jvm/CodeGen.scala | 12 +++++++++++ .../tools/nsc/backend/jvm/CoreBTypes.scala | 12 +++++++++++ .../tools/nsc/backend/jvm/GenBCode.scala | 13 +++++++++--- .../backend/jvm/GeneratedClassHandler.scala | 12 +++++++++++ .../tools/nsc/backend/jvm/LabelNode1.java | 14 ++++++++++--- .../tools/nsc/backend/jvm/MethodNode1.java | 14 ++++++++++--- .../tools/nsc/backend/jvm/PerRunInit.scala | 12 +++++++++++ .../tools/nsc/backend/jvm/PostProcessor.scala | 12 +++++++++++ .../jvm/PostProcessorFrontendAccess.scala | 12 +++++++++++ .../backend/jvm/analysis/AliasingFrame.scala | 12 +++++++++++ .../backend/jvm/analysis/BackendUtils.scala | 12 +++++++++++ .../jvm/analysis/InstructionStackEffect.scala | 12 +++++++++++ .../jvm/analysis/NullnessAnalyzer.scala | 12 +++++++++++ .../jvm/analysis/ProdConsAnalyzerImpl.scala | 13 +++++++++--- .../jvm/analysis/TypeFlowInterpreter.scala | 12 +++++++++++ .../nsc/backend/jvm/analysis/package.scala | 12 +++++++++++ .../tools/nsc/backend/jvm/opt/BoxUnbox.scala | 13 +++++++++--- .../backend/jvm/opt/ByteCodeRepository.scala | 13 +++++++++--- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 13 +++++++++--- .../backend/jvm/opt/ClosureOptimizer.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/opt/CopyProp.scala | 13 +++++++++--- .../backend/jvm/opt/InlineInfoAttribute.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/opt/Inliner.scala | 13 +++++++++--- .../backend/jvm/opt/InlinerHeuristics.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/opt/LocalOpt.scala | 13 +++++++++--- .../tools/nsc/backend/jvm/opt/LruMap.scala | 12 +++++++++++ .../nsc/classpath/AggregateClassPath.scala | 11 +++++++++- .../scala/tools/nsc/classpath/ClassPath.scala | 11 +++++++++- .../nsc/classpath/ClassPathFactory.scala | 11 +++++++++- .../nsc/classpath/DirectoryClassPath.scala | 11 +++++++++- .../scala/tools/nsc/classpath/FileUtils.scala | 11 +++++++++- .../nsc/classpath/PackageNameUtils.scala | 11 +++++++++- .../classpath/VirtualDirectoryClassPath.scala | 12 +++++++++++ .../ZipAndJarFileLookupFactory.scala | 11 +++++++++- .../nsc/classpath/ZipArchiveFileLookup.scala | 11 +++++++++- src/compiler/scala/tools/nsc/io/Jar.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/io/Socket.scala | 13 +++++++++--- .../scala/tools/nsc/io/SourceReader.scala | 14 +++++++++---- src/compiler/scala/tools/nsc/io/package.scala | 13 +++++++++--- .../scala/tools/nsc/javac/JavaParsers.scala | 14 ++++++++++--- .../scala/tools/nsc/javac/JavaScanners.scala | 13 +++++++++--- .../scala/tools/nsc/javac/JavaTokens.scala | 13 +++++++++--- src/compiler/scala/tools/nsc/package.scala | 13 +++++++++--- .../scala/tools/nsc/plugins/Plugin.scala | 13 +++++++++--- .../tools/nsc/plugins/PluginComponent.scala | 14 +++++++++---- .../tools/nsc/plugins/PluginDescription.scala | 13 +++++++++--- .../scala/tools/nsc/plugins/Plugins.scala | 14 +++++++++---- .../nsc/profile/ExtendedThreadMxBean.java | 12 +++++++++++ .../tools/nsc/profile/ExternalToolHook.java | 12 +++++++++++ .../scala/tools/nsc/profile/Profiler.scala | 12 +++++++++++ .../tools/nsc/profile/ProfilerPlugin.scala | 12 +++++++++++ .../tools/nsc/profile/ThreadPoolFactory.scala | 12 +++++++++++ .../nsc/reporters/AbstractReporter.scala | 13 +++++++++--- .../tools/nsc/reporters/ConsoleReporter.scala | 13 +++++++++--- .../nsc/reporters/LimitingReporter.scala | 12 +++++++++++ .../tools/nsc/reporters/NoReporter.scala | 12 +++++++++++ .../scala/tools/nsc/reporters/Reporter.scala | 13 +++++++++--- .../tools/nsc/reporters/StoreReporter.scala | 13 +++++++++--- .../tools/nsc/settings/AbsScalaSettings.scala | 13 +++++++++--- .../tools/nsc/settings/AbsSettings.scala | 13 +++++++++--- .../tools/nsc/settings/FscSettings.scala | 13 +++++++++--- .../tools/nsc/settings/MutableSettings.scala | 14 ++++++++++--- .../tools/nsc/settings/ScalaSettings.scala | 14 ++++++++++--- .../tools/nsc/settings/ScalaVersion.scala | 14 ++++++++++--- .../nsc/settings/StandardScalaSettings.scala | 13 +++++++++--- .../scala/tools/nsc/settings/Warnings.scala | 13 +++++++++--- .../tools/nsc/symtab/BrowsingLoaders.scala | 13 +++++++++--- .../tools/nsc/symtab/SymbolLoaders.scala | 13 +++++++++--- .../scala/tools/nsc/symtab/SymbolTable.scala | 13 +++++++++--- .../tools/nsc/symtab/SymbolTrackers.scala | 13 +++++++++--- .../symtab/classfile/AbstractFileReader.scala | 14 +++++++++---- .../symtab/classfile/ClassfileParser.scala | 13 +++++++++--- .../tools/nsc/symtab/classfile/Pickler.scala | 13 +++++++++--- .../tools/nsc/symtab/classfile/package.scala | 12 +++++++++++ .../scala/tools/nsc/symtab/package.scala | 12 +++++++++++ .../nsc/transform/AccessorSynthesis.scala | 12 +++++++++++ .../scala/tools/nsc/transform/CleanUp.scala | 13 +++++++++--- .../tools/nsc/transform/Constructors.scala | 13 +++++++++--- .../tools/nsc/transform/Delambdafy.scala | 12 +++++++++++ .../scala/tools/nsc/transform/Erasure.scala | 13 +++++++++--- .../tools/nsc/transform/ExplicitOuter.scala | 13 +++++++++--- .../nsc/transform/ExtensionMethods.scala | 14 ++++++++++--- .../scala/tools/nsc/transform/Fields.scala | 13 +++++++++--- .../scala/tools/nsc/transform/Flatten.scala | 13 +++++++++--- .../tools/nsc/transform/InfoTransform.scala | 13 +++++++++--- .../tools/nsc/transform/LambdaLift.scala | 13 +++++++++--- .../scala/tools/nsc/transform/Mixin.scala | 12 ++++++++--- .../tools/nsc/transform/OverridingPairs.scala | 13 +++++++++--- .../tools/nsc/transform/PostErasure.scala | 14 ++++++++++--- .../tools/nsc/transform/SampleTransform.scala | 13 +++++++++--- .../tools/nsc/transform/SpecializeTypes.scala | 13 +++++++++--- .../scala/tools/nsc/transform/Statics.scala | 12 +++++++++++ .../scala/tools/nsc/transform/TailCalls.scala | 13 +++++++++--- .../scala/tools/nsc/transform/Transform.scala | 13 +++++++++--- .../transform/TypeAdaptingTransformer.scala | 12 +++++++++++ .../nsc/transform/TypingTransformers.scala | 13 +++++++++--- .../scala/tools/nsc/transform/UnCurry.scala | 13 +++++++++--- .../tools/nsc/transform/patmat/Logic.scala | 12 ++++++++--- .../nsc/transform/patmat/MatchAnalysis.scala | 12 ++++++++--- .../nsc/transform/patmat/MatchCodeGen.scala | 12 ++++++++--- .../tools/nsc/transform/patmat/MatchCps.scala | 12 ++++++++--- .../transform/patmat/MatchOptimization.scala | 12 ++++++++--- .../transform/patmat/MatchTranslation.scala | 12 ++++++++--- .../transform/patmat/MatchTreeMaking.scala | 12 ++++++++--- .../nsc/transform/patmat/MatchWarnings.scala | 12 ++++++++--- .../transform/patmat/PatternExpansion.scala | 13 +++++++++--- .../transform/patmat/PatternMatching.scala | 12 ++++++++--- .../tools/nsc/transform/patmat/Solving.scala | 12 ++++++++--- .../tools/nsc/typechecker/Adaptations.scala | 13 +++++++++--- .../tools/nsc/typechecker/Analyzer.scala | 13 +++++++++--- .../nsc/typechecker/AnalyzerPlugins.scala | 13 +++++++++--- .../tools/nsc/typechecker/Checkable.scala | 13 +++++++++--- .../nsc/typechecker/ConstantFolder.scala | 13 +++++++++--- .../tools/nsc/typechecker/ContextErrors.scala | 13 +++++++++--- .../tools/nsc/typechecker/Contexts.scala | 13 +++++++++--- .../nsc/typechecker/DestructureTypes.scala | 15 ++++++++++---- .../tools/nsc/typechecker/Duplicators.scala | 13 +++++++++--- .../tools/nsc/typechecker/EtaExpansion.scala | 13 +++++++++--- .../tools/nsc/typechecker/Implicits.scala | 13 +++++++++--- .../scala/tools/nsc/typechecker/Infer.scala | 13 +++++++++--- .../scala/tools/nsc/typechecker/Macros.scala | 12 +++++++++++ .../nsc/typechecker/MethodSynthesis.scala | 14 ++++++++++--- .../scala/tools/nsc/typechecker/Namers.scala | 13 +++++++++--- .../tools/nsc/typechecker/NamesDefaults.scala | 13 +++++++++--- .../tools/nsc/typechecker/PatternTypers.scala | 13 +++++++++--- .../tools/nsc/typechecker/RefChecks.scala | 13 +++++++++--- .../nsc/typechecker/StdAttachments.scala | 12 +++++++++++ .../nsc/typechecker/SuperAccessors.scala | 11 ++++++++++ .../nsc/typechecker/SyntheticMethods.scala | 13 +++++++++--- .../scala/tools/nsc/typechecker/Tags.scala | 12 +++++++++++ .../tools/nsc/typechecker/TreeCheckers.scala | 13 +++++++++--- .../nsc/typechecker/TypeDiagnostics.scala | 13 +++++++++--- .../tools/nsc/typechecker/TypeStrings.scala | 13 +++++++++--- .../scala/tools/nsc/typechecker/Typers.scala | 13 +++++++++--- .../nsc/typechecker/TypersTracking.scala | 13 +++++++++--- .../tools/nsc/typechecker/Unapplies.scala | 13 +++++++++--- .../tools/nsc/util/CharArrayReader.scala | 13 +++++++++--- .../scala/tools/nsc/util/ClassPath.scala | 14 +++++++++---- .../scala/tools/nsc/util/DocStrings.scala | 13 +++++++++--- .../scala/tools/nsc/util/Exceptional.scala | 12 +++++++++++ .../scala/tools/nsc/util/InterruptReq.scala | 12 +++++++++++ .../tools/nsc/util/JavaCharArrayReader.scala | 13 +++++++++--- .../scala/tools/nsc/util/ShowPickled.scala | 13 +++++++++--- .../scala/tools/nsc/util/SimpleTracer.scala | 12 +++++++++++ .../scala/tools/nsc/util/StackTracing.scala | 12 +++++++++-- .../scala/tools/nsc/util/WorkScheduler.scala | 12 +++++++++++ .../scala/tools/nsc/util/package.scala | 13 +++++++++--- .../scala/tools/reflect/FastTrack.scala | 12 +++++++++++ .../tools/reflect/FormatInterpolator.scala | 12 +++++++++++ .../scala/tools/reflect/FrontEnd.scala | 12 +++++++++++ .../scala/tools/reflect/ReflectGlobal.scala | 12 +++++++++++ .../scala/tools/reflect/ReflectMain.scala | 12 +++++++++++ .../scala/tools/reflect/ReflectSetup.scala | 12 +++++++++++ .../scala/tools/reflect/StdTags.scala | 12 +++++++++++ .../scala/tools/reflect/ToolBox.scala | 12 +++++++++++ .../scala/tools/reflect/ToolBoxFactory.scala | 12 +++++++++++ .../tools/reflect/WrappedProperties.scala | 13 +++++++++--- .../scala/tools/reflect/package.scala | 13 +++++++++--- .../scala/tools/util/PathResolver.scala | 13 +++++++++--- .../scala/tools/util/SocketServer.scala | 18 ++++++++++------- .../scala/tools/util/VerifyClass.scala | 12 +++++++++++ .../nsc/ScalaCompilerOptionsExporter.scala | 12 +++++++++++ .../nsc/interactive/CompilerControl.scala | 14 ++++++++++--- .../tools/nsc/interactive/ContextTrees.scala | 14 ++++++++++--- .../scala/tools/nsc/interactive/Global.scala | 14 ++++++++++--- .../nsc/interactive/InteractiveReporter.scala | 14 ++++++++++--- .../scala/tools/nsc/interactive/Lexer.scala | 12 +++++++++++ .../scala/tools/nsc/interactive/Main.scala | 13 +++++++++--- .../scala/tools/nsc/interactive/Pickler.scala | 12 +++++++++++ .../tools/nsc/interactive/Picklers.scala | 14 ++++++++++--- .../PresentationCompilerThread.scala | 15 ++++++++++---- .../tools/nsc/interactive/PrettyWriter.scala | 12 +++++++++++ .../scala/tools/nsc/interactive/REPL.scala | 14 ++++++++++--- .../nsc/interactive/RangePositions.scala | 13 +++++++++--- .../tools/nsc/interactive/Replayer.scala | 12 +++++++++++ .../tools/nsc/interactive/Response.scala | 14 ++++++++++--- .../interactive/RichCompilationUnits.scala | 14 ++++++++++--- .../interactive/tests/InteractiveTest.scala | 14 ++++++++++--- .../tests/InteractiveTestSettings.scala | 12 +++++++++++ .../tools/nsc/interactive/tests/Tester.scala | 14 ++++++++++--- .../interactive/tests/core/AskCommand.scala | 14 ++++++++++--- .../interactive/tests/core/CoreTestDefs.scala | 12 +++++++++++ .../core/PresentationCompilerInstance.scala | 12 +++++++++++ ...sentationCompilerRequestsWorkingMode.scala | 12 +++++++++++ .../core/PresentationCompilerTestDef.scala | 12 +++++++++++ .../nsc/interactive/tests/core/Reporter.scala | 12 +++++++++++ .../tests/core/SourcesCollector.scala | 12 +++++++++++ .../interactive/tests/core/TestMarker.scala | 12 +++++++++++ .../tests/core/TestResources.scala | 12 +++++++++++ .../interactive/tests/core/TestSettings.scala | 12 +++++++++++ src/library/scala/AnyVal.scala | 18 ++++++++++------- src/library/scala/AnyValCompanion.scala | 18 ++++++++++------- src/library/scala/App.scala | 18 ++++++++++------- src/library/scala/Array.scala | 18 ++++++++++------- src/library/scala/Cloneable.scala | 18 ++++++++++------- src/library/scala/Console.scala | 18 ++++++++++------- src/library/scala/DelayedInit.scala | 18 ++++++++++------- src/library/scala/Dynamic.scala | 18 ++++++++++------- src/library/scala/Enumeration.scala | 18 ++++++++++------- src/library/scala/Equals.scala | 18 ++++++++++------- src/library/scala/Function.scala | 20 ++++++++++--------- src/library/scala/Immutable.scala | 20 ++++++++++--------- src/library/scala/MatchError.scala | 20 ++++++++++--------- src/library/scala/Mutable.scala | 20 ++++++++++--------- src/library/scala/NotImplementedError.scala | 20 ++++++++++--------- src/library/scala/NotNull.scala | 18 ++++++++++------- src/library/scala/Option.scala | 18 ++++++++++------- src/library/scala/PartialFunction.scala | 18 ++++++++++------- src/library/scala/Predef.scala | 18 ++++++++++------- src/library/scala/Product.scala | 18 ++++++++++------- src/library/scala/Proxy.scala | 18 ++++++++++------- src/library/scala/Responder.scala | 19 ++++++++++-------- src/library/scala/SerialVersionUID.scala | 18 ++++++++++------- src/library/scala/Serializable.scala | 18 ++++++++++------- src/library/scala/Specializable.scala | 18 ++++++++++------- src/library/scala/StringContext.scala | 18 ++++++++++------- src/library/scala/Symbol.scala | 18 ++++++++++------- src/library/scala/UninitializedError.scala | 20 ++++++++++--------- .../scala/UninitializedFieldError.scala | 20 ++++++++++--------- src/library/scala/annotation/Annotation.scala | 18 ++++++++++------- .../annotation/ClassfileAnnotation.scala | 18 ++++++++++------- .../scala/annotation/StaticAnnotation.scala | 18 ++++++++++------- .../scala/annotation/TypeConstraint.scala | 18 ++++++++++------- src/library/scala/annotation/bridge.scala | 18 ++++++++++------- .../scala/annotation/compileTimeOnly.scala | 12 +++++++++++ src/library/scala/annotation/elidable.scala | 18 ++++++++++------- .../scala/annotation/implicitAmbiguous.scala | 12 +++++++++++ .../scala/annotation/implicitNotFound.scala | 18 ++++++++++------- .../scala/annotation/meta/beanGetter.scala | 19 +++++++++++------- .../scala/annotation/meta/beanSetter.scala | 19 +++++++++++------- .../annotation/meta/companionClass.scala | 19 +++++++++++------- .../annotation/meta/companionMethod.scala | 19 +++++++++++------- .../annotation/meta/companionObject.scala | 19 +++++++++++------- src/library/scala/annotation/meta/field.scala | 19 +++++++++++------- .../scala/annotation/meta/getter.scala | 19 +++++++++++------- .../annotation/meta/languageFeature.scala | 19 +++++++++++------- .../scala/annotation/meta/package.scala | 12 +++++++++++ src/library/scala/annotation/meta/param.scala | 19 +++++++++++------- .../scala/annotation/meta/setter.scala | 19 +++++++++++------- src/library/scala/annotation/migration.scala | 18 ++++++++++------- .../scala/annotation/showAsInfix.scala | 12 +++++++++++ src/library/scala/annotation/strictfp.scala | 18 ++++++++++------- src/library/scala/annotation/switch.scala | 19 +++++++++++------- src/library/scala/annotation/tailrec.scala | 18 ++++++++++------- .../unchecked/uncheckedStable.scala | 19 +++++++++++------- .../unchecked/uncheckedVariance.scala | 19 +++++++++++------- .../scala/annotation/unspecialized.scala | 18 ++++++++++------- src/library/scala/annotation/varargs.scala | 18 ++++++++++------- src/library/scala/beans/BeanDescription.scala | 19 ++++++++++-------- src/library/scala/beans/BeanDisplayName.scala | 19 ++++++++++-------- src/library/scala/beans/BeanInfo.scala | 18 ++++++++++------- src/library/scala/beans/BeanInfoSkip.scala | 19 ++++++++++-------- src/library/scala/beans/BeanProperty.scala | 18 ++++++++++------- .../scala/beans/BooleanBeanProperty.scala | 18 ++++++++++------- src/library/scala/beans/ScalaBeanInfo.scala | 19 ++++++++++-------- src/library/scala/collection/BitSet.scala | 20 ++++++++++--------- src/library/scala/collection/BitSetLike.scala | 20 ++++++++++--------- .../scala/collection/BufferedIterator.scala | 20 ++++++++++--------- .../collection/CustomParallelizable.scala | 18 ++++++++++------- src/library/scala/collection/DefaultMap.scala | 18 ++++++++++------- .../scala/collection/GenIterable.scala | 18 ++++++++++------- .../scala/collection/GenIterableLike.scala | 18 ++++++++++------- src/library/scala/collection/GenMap.scala | 18 ++++++++++------- src/library/scala/collection/GenMapLike.scala | 18 ++++++++++------- src/library/scala/collection/GenSeq.scala | 18 ++++++++++------- src/library/scala/collection/GenSeqLike.scala | 18 ++++++++++------- src/library/scala/collection/GenSet.scala | 19 ++++++++++-------- src/library/scala/collection/GenSetLike.scala | 18 ++++++++++------- .../scala/collection/GenTraversable.scala | 18 ++++++++++------- .../scala/collection/GenTraversableLike.scala | 18 ++++++++++------- .../scala/collection/GenTraversableOnce.scala | 18 ++++++++++------- src/library/scala/collection/IndexedSeq.scala | 18 ++++++++++------- .../scala/collection/IndexedSeqLike.scala | 18 ++++++++++------- .../collection/IndexedSeqOptimized.scala | 18 ++++++++++------- src/library/scala/collection/Iterable.scala | 20 ++++++++++--------- .../scala/collection/IterableLike.scala | 18 ++++++++++------- .../scala/collection/IterableProxy.scala | 18 ++++++++++------- .../scala/collection/IterableProxyLike.scala | 20 ++++++++++--------- .../scala/collection/IterableView.scala | 20 ++++++++++--------- .../scala/collection/IterableViewLike.scala | 18 ++++++++++------- src/library/scala/collection/Iterator.scala | 18 ++++++++++------- .../scala/collection/JavaConversions.scala | 18 ++++++++++------- .../scala/collection/JavaConverters.scala | 18 ++++++++++------- src/library/scala/collection/LinearSeq.scala | 20 ++++++++++--------- .../scala/collection/LinearSeqLike.scala | 18 ++++++++++------- .../scala/collection/LinearSeqOptimized.scala | 18 ++++++++++------- src/library/scala/collection/Map.scala | 18 ++++++++++------- src/library/scala/collection/MapLike.scala | 18 ++++++++++------- src/library/scala/collection/MapProxy.scala | 18 ++++++++++------- .../scala/collection/MapProxyLike.scala | 18 ++++++++++------- src/library/scala/collection/Parallel.scala | 18 ++++++++++------- .../scala/collection/Parallelizable.scala | 18 ++++++++++------- src/library/scala/collection/Searching.scala | 18 ++++++++++------- src/library/scala/collection/Seq.scala | 18 ++++++++++------- .../scala/collection/SeqExtractors.scala | 12 +++++++++++ src/library/scala/collection/SeqLike.scala | 18 ++++++++++------- src/library/scala/collection/SeqProxy.scala | 20 ++++++++++--------- .../scala/collection/SeqProxyLike.scala | 20 ++++++++++--------- src/library/scala/collection/SeqView.scala | 20 ++++++++++--------- .../scala/collection/SeqViewLike.scala | 18 ++++++++++------- src/library/scala/collection/Set.scala | 18 ++++++++++------- src/library/scala/collection/SetLike.scala | 18 ++++++++++------- src/library/scala/collection/SetProxy.scala | 18 ++++++++++------- .../scala/collection/SetProxyLike.scala | 18 ++++++++++------- src/library/scala/collection/SortedMap.scala | 18 ++++++++++------- .../scala/collection/SortedMapLike.scala | 18 ++++++++++------- src/library/scala/collection/SortedSet.scala | 19 ++++++++++-------- .../scala/collection/SortedSetLike.scala | 19 ++++++++++-------- .../scala/collection/Traversable.scala | 18 ++++++++++------- .../scala/collection/TraversableLike.scala | 18 ++++++++++------- .../scala/collection/TraversableOnce.scala | 18 ++++++++++------- .../scala/collection/TraversableProxy.scala | 20 ++++++++++--------- .../collection/TraversableProxyLike.scala | 20 ++++++++++--------- .../scala/collection/TraversableView.scala | 18 ++++++++++------- .../collection/TraversableViewLike.scala | 18 ++++++++++------- .../collection/concurrent/BasicNode.java | 18 ++++++++++------- .../collection/concurrent/CNodeBase.java | 18 ++++++++++------- .../scala/collection/concurrent/Gen.java | 18 ++++++++++------- .../collection/concurrent/INodeBase.java | 18 ++++++++++------- .../scala/collection/concurrent/MainNode.java | 18 ++++++++++------- .../scala/collection/concurrent/Map.scala | 18 ++++++++++------- .../scala/collection/concurrent/TrieMap.scala | 18 ++++++++++------- .../collection/convert/AsJavaConverters.scala | 18 ++++++++++------- .../convert/AsScalaConverters.scala | 18 ++++++++++------- .../collection/convert/DecorateAsJava.scala | 18 ++++++++++------- .../collection/convert/DecorateAsScala.scala | 18 ++++++++++------- .../scala/collection/convert/Decorators.scala | 18 ++++++++++------- .../convert/ImplicitConversions.scala | 18 ++++++++++------- .../scala/collection/convert/WrapAsJava.scala | 18 ++++++++++------- .../collection/convert/WrapAsScala.scala | 18 ++++++++++------- .../scala/collection/convert/Wrappers.scala | 18 ++++++++++------- .../scala/collection/convert/package.scala | 18 ++++++++++------- .../collection/generic/BitOperations.scala | 18 ++++++++++------- .../collection/generic/BitSetFactory.scala | 20 ++++++++++--------- .../collection/generic/CanBuildFrom.scala | 19 ++++++++++-------- .../collection/generic/CanCombineFrom.scala | 18 ++++++++++------- .../generic/ClassTagTraversableFactory.scala | 18 ++++++++++------- .../scala/collection/generic/Clearable.scala | 18 ++++++++++------- .../collection/generic/FilterMonadic.scala | 18 ++++++++++------- .../collection/generic/GenMapFactory.scala | 18 ++++++++++------- .../collection/generic/GenSeqFactory.scala | 20 ++++++++++--------- .../collection/generic/GenSetFactory.scala | 20 ++++++++++--------- .../generic/GenTraversableFactory.scala | 19 ++++++++++-------- .../generic/GenericClassTagCompanion.scala | 18 ++++++++++------- .../GenericClassTagTraversableTemplate.scala | 18 ++++++++++------- .../collection/generic/GenericCompanion.scala | 18 ++++++++++------- .../generic/GenericOrderedCompanion.scala | 18 ++++++++++------- .../GenericOrderedTraversableTemplate.scala | 20 ++++++++++--------- .../generic/GenericParCompanion.scala | 18 ++++++++++------- .../generic/GenericParTemplate.scala | 18 ++++++++++------- .../generic/GenericSeqCompanion.scala | 18 ++++++++++------- .../generic/GenericSetTemplate.scala | 18 ++++++++++------- .../generic/GenericTraversableTemplate.scala | 20 ++++++++++--------- .../scala/collection/generic/Growable.scala | 18 ++++++++++------- .../collection/generic/HasNewBuilder.scala | 19 +++++++++++------- .../collection/generic/HasNewCombiner.scala | 18 ++++++++++------- .../generic/ImmutableMapFactory.scala | 19 ++++++++++-------- .../generic/ImmutableSetFactory.scala | 18 ++++++++++------- .../generic/ImmutableSortedMapFactory.scala | 20 ++++++++++--------- .../generic/ImmutableSortedSetFactory.scala | 20 ++++++++++--------- .../generic/IndexedSeqFactory.scala | 18 ++++++++++------- .../scala/collection/generic/IsSeqLike.scala | 18 ++++++++++------- .../generic/IsTraversableLike.scala | 18 ++++++++++------- .../generic/IsTraversableOnce.scala | 18 ++++++++++------- .../generic/IterableForwarder.scala | 18 ++++++++++------- .../scala/collection/generic/MapFactory.scala | 18 ++++++++++------- .../generic/MutableMapFactory.scala | 20 ++++++++++--------- .../generic/MutableSetFactory.scala | 18 ++++++++++------- .../generic/MutableSortedMapFactory.scala | 12 +++++++++++ .../generic/MutableSortedSetFactory.scala | 18 ++++++++++------- .../generic/OrderedTraversableFactory.scala | 19 ++++++++++-------- .../scala/collection/generic/ParFactory.scala | 18 ++++++++++------- .../collection/generic/ParMapFactory.scala | 18 ++++++++++------- .../collection/generic/ParSetFactory.scala | 18 ++++++++++------- .../scala/collection/generic/SeqFactory.scala | 20 ++++++++++--------- .../collection/generic/SeqForwarder.scala | 18 ++++++++++------- .../scala/collection/generic/SetFactory.scala | 20 ++++++++++--------- .../scala/collection/generic/Shrinkable.scala | 18 ++++++++++------- .../scala/collection/generic/Signalling.scala | 18 ++++++++++------- .../scala/collection/generic/Sizing.scala | 18 ++++++++++------- .../collection/generic/SliceInterval.scala | 18 ++++++++++------- .../scala/collection/generic/Sorted.scala | 18 ++++++++++------- .../collection/generic/SortedMapFactory.scala | 20 ++++++++++--------- .../collection/generic/SortedSetFactory.scala | 20 ++++++++++--------- .../collection/generic/Subtractable.scala | 19 ++++++++++-------- .../generic/TraversableFactory.scala | 19 ++++++++++-------- .../generic/TraversableForwarder.scala | 18 ++++++++++------- .../scala/collection/generic/package.scala | 12 +++++++++++ .../scala/collection/immutable/BitSet.scala | 20 ++++++++++--------- .../collection/immutable/DefaultMap.scala | 18 ++++++++++------- .../scala/collection/immutable/HashMap.scala | 18 ++++++++++------- .../scala/collection/immutable/HashSet.scala | 20 ++++++++++--------- .../collection/immutable/IndexedSeq.scala | 19 ++++++++++-------- .../scala/collection/immutable/IntMap.scala | 18 ++++++++++------- .../scala/collection/immutable/Iterable.scala | 20 ++++++++++--------- .../collection/immutable/LinearSeq.scala | 20 ++++++++++--------- .../scala/collection/immutable/List.scala | 18 ++++++++++------- .../scala/collection/immutable/ListMap.scala | 18 ++++++++++------- .../scala/collection/immutable/ListSet.scala | 18 ++++++++++------- .../scala/collection/immutable/LongMap.scala | 18 ++++++++++------- .../scala/collection/immutable/Map.scala | 19 ++++++++++-------- .../scala/collection/immutable/MapLike.scala | 18 ++++++++++------- .../scala/collection/immutable/MapProxy.scala | 20 ++++++++++--------- .../collection/immutable/NumericRange.scala | 18 ++++++++++------- .../scala/collection/immutable/PagedSeq.scala | 20 ++++++++++--------- .../scala/collection/immutable/Queue.scala | 18 ++++++++++------- .../scala/collection/immutable/Range.scala | 19 ++++++++++-------- .../collection/immutable/RedBlackTree.scala | 20 ++++++++++--------- .../scala/collection/immutable/Seq.scala | 20 ++++++++++--------- .../scala/collection/immutable/Set.scala | 20 ++++++++++--------- .../scala/collection/immutable/SetProxy.scala | 20 ++++++++++--------- .../collection/immutable/SortedMap.scala | 20 ++++++++++--------- .../collection/immutable/SortedSet.scala | 20 ++++++++++--------- .../scala/collection/immutable/Stack.scala | 18 ++++++++++------- .../scala/collection/immutable/Stream.scala | 18 ++++++++++------- .../collection/immutable/StreamView.scala | 12 +++++++++++ .../collection/immutable/StreamViewLike.scala | 12 +++++++++++ .../collection/immutable/StringLike.scala | 18 ++++++++++------- .../collection/immutable/StringOps.scala | 18 ++++++++++------- .../collection/immutable/Traversable.scala | 20 ++++++++++--------- .../scala/collection/immutable/TreeMap.scala | 18 ++++++++++------- .../scala/collection/immutable/TreeSet.scala | 20 ++++++++++--------- .../collection/immutable/TrieIterator.scala | 18 ++++++++++------- .../scala/collection/immutable/Vector.scala | 18 ++++++++++------- .../collection/immutable/WrappedString.scala | 20 ++++++++++--------- .../scala/collection/mutable/AnyRefMap.scala | 12 +++++++++++ .../collection/mutable/ArrayBuffer.scala | 20 ++++++++++--------- .../collection/mutable/ArrayBuilder.scala | 18 ++++++++++------- .../scala/collection/mutable/ArrayLike.scala | 18 ++++++++++------- .../scala/collection/mutable/ArrayOps.scala | 18 ++++++++++------- .../scala/collection/mutable/ArraySeq.scala | 20 ++++++++++--------- .../scala/collection/mutable/ArrayStack.scala | 18 ++++++++++------- .../scala/collection/mutable/BitSet.scala | 20 ++++++++++--------- .../scala/collection/mutable/Buffer.scala | 20 ++++++++++--------- .../scala/collection/mutable/BufferLike.scala | 20 ++++++++++--------- .../collection/mutable/BufferProxy.scala | 18 ++++++++++------- .../scala/collection/mutable/Builder.scala | 19 ++++++++++-------- .../scala/collection/mutable/Cloneable.scala | 20 ++++++++++--------- .../collection/mutable/DefaultEntry.scala | 18 ++++++++++------- .../collection/mutable/DefaultMapModel.scala | 20 ++++++++++--------- .../collection/mutable/DoubleLinkedList.scala | 20 ++++++++++--------- .../mutable/DoubleLinkedListLike.scala | 20 ++++++++++--------- .../collection/mutable/FlatHashTable.scala | 18 ++++++++++------- .../collection/mutable/GrowingBuilder.scala | 18 ++++++++++------- .../scala/collection/mutable/HashEntry.scala | 19 +++++++++++------- .../scala/collection/mutable/HashMap.scala | 18 ++++++++++------- .../scala/collection/mutable/HashSet.scala | 20 ++++++++++--------- .../scala/collection/mutable/HashTable.scala | 20 ++++++++++--------- .../scala/collection/mutable/History.scala | 20 ++++++++++--------- .../mutable/ImmutableMapAdaptor.scala | 20 ++++++++++--------- .../mutable/ImmutableSetAdaptor.scala | 18 ++++++++++------- .../scala/collection/mutable/IndexedSeq.scala | 20 ++++++++++--------- .../collection/mutable/IndexedSeqLike.scala | 18 ++++++++++------- .../mutable/IndexedSeqOptimized.scala | 18 ++++++++++------- .../collection/mutable/IndexedSeqView.scala | 20 ++++++++++--------- .../scala/collection/mutable/Iterable.scala | 19 +++++++++++------- .../collection/mutable/LazyBuilder.scala | 18 ++++++++++------- .../scala/collection/mutable/LinearSeq.scala | 20 ++++++++++--------- .../collection/mutable/LinkedEntry.scala | 20 ++++++++++--------- .../collection/mutable/LinkedHashMap.scala | 20 ++++++++++--------- .../collection/mutable/LinkedHashSet.scala | 19 ++++++++++-------- .../scala/collection/mutable/LinkedList.scala | 20 ++++++++++--------- .../collection/mutable/LinkedListLike.scala | 18 ++++++++++------- .../scala/collection/mutable/ListBuffer.scala | 18 ++++++++++------- .../scala/collection/mutable/ListMap.scala | 20 ++++++++++--------- .../scala/collection/mutable/LongMap.scala | 12 +++++++++++ .../scala/collection/mutable/Map.scala | 20 ++++++++++--------- .../scala/collection/mutable/MapBuilder.scala | 19 ++++++++++-------- .../scala/collection/mutable/MapLike.scala | 19 ++++++++++-------- .../scala/collection/mutable/MapProxy.scala | 18 ++++++++++------- .../scala/collection/mutable/MultiMap.scala | 20 ++++++++++--------- .../collection/mutable/MutableList.scala | 18 ++++++++++------- .../collection/mutable/ObservableBuffer.scala | 20 ++++++++++--------- .../collection/mutable/ObservableMap.scala | 20 ++++++++++--------- .../collection/mutable/ObservableSet.scala | 20 ++++++++++--------- .../collection/mutable/OpenHashMap.scala | 18 ++++++++++------- .../collection/mutable/PriorityQueue.scala | 18 ++++++++++------- .../scala/collection/mutable/Publisher.scala | 20 ++++++++++--------- .../scala/collection/mutable/Queue.scala | 20 ++++++++++--------- .../scala/collection/mutable/QueueProxy.scala | 20 ++++++++++--------- .../collection/mutable/RedBlackTree.scala | 12 +++++++++++ .../collection/mutable/ResizableArray.scala | 18 ++++++++++------- .../collection/mutable/ReusableBuilder.scala | 19 ++++++++++-------- .../mutable/RevertibleHistory.scala | 20 ++++++++++--------- .../scala/collection/mutable/Seq.scala | 20 ++++++++++--------- .../scala/collection/mutable/SeqLike.scala | 18 ++++++++++------- .../scala/collection/mutable/Set.scala | 20 ++++++++++--------- .../scala/collection/mutable/SetBuilder.scala | 18 ++++++++++------- .../scala/collection/mutable/SetLike.scala | 18 ++++++++++------- .../scala/collection/mutable/SetProxy.scala | 18 ++++++++++------- .../scala/collection/mutable/SortedMap.scala | 12 +++++++++++ .../scala/collection/mutable/SortedSet.scala | 18 ++++++++++------- .../scala/collection/mutable/Stack.scala | 20 ++++++++++--------- .../scala/collection/mutable/StackProxy.scala | 18 ++++++++++------- .../collection/mutable/StringBuilder.scala | 18 ++++++++++------- .../scala/collection/mutable/Subscriber.scala | 18 ++++++++++------- .../mutable/SynchronizedBuffer.scala | 20 ++++++++++--------- .../collection/mutable/SynchronizedMap.scala | 18 ++++++++++------- .../mutable/SynchronizedQueue.scala | 20 ++++++++++--------- .../collection/mutable/SynchronizedSet.scala | 19 ++++++++++-------- .../mutable/SynchronizedStack.scala | 20 ++++++++++--------- .../collection/mutable/Traversable.scala | 20 ++++++++++--------- .../scala/collection/mutable/TreeMap.scala | 12 +++++++++++ .../scala/collection/mutable/TreeSet.scala | 18 ++++++++++------- .../scala/collection/mutable/Undoable.scala | 20 ++++++++++--------- .../collection/mutable/UnrolledBuffer.scala | 18 ++++++++++------- .../collection/mutable/WeakHashMap.scala | 18 ++++++++++------- .../collection/mutable/WrappedArray.scala | 20 ++++++++++--------- .../mutable/WrappedArrayBuilder.scala | 20 ++++++++++--------- src/library/scala/collection/package.scala | 18 ++++++++++------- .../scala/collection/parallel/Combiner.scala | 18 ++++++++++------- .../collection/parallel/ParIterable.scala | 18 ++++++++++------- .../collection/parallel/ParIterableLike.scala | 18 ++++++++++------- .../scala/collection/parallel/ParMap.scala | 18 ++++++++++------- .../collection/parallel/ParMapLike.scala | 18 ++++++++++------- .../scala/collection/parallel/ParSeq.scala | 18 ++++++++++------- .../collection/parallel/ParSeqLike.scala | 18 ++++++++++------- .../scala/collection/parallel/ParSet.scala | 18 ++++++++++------- .../collection/parallel/ParSetLike.scala | 18 ++++++++++------- .../collection/parallel/PreciseSplitter.scala | 18 ++++++++++------- .../collection/parallel/RemainsIterator.scala | 18 ++++++++++------- .../scala/collection/parallel/Splitter.scala | 18 ++++++++++------- .../collection/parallel/TaskSupport.scala | 18 ++++++++++------- .../scala/collection/parallel/Tasks.scala | 18 ++++++++++------- .../parallel/immutable/ParHashMap.scala | 18 ++++++++++------- .../parallel/immutable/ParHashSet.scala | 18 ++++++++++------- .../parallel/immutable/ParIterable.scala | 18 ++++++++++------- .../parallel/immutable/ParMap.scala | 18 ++++++++++------- .../parallel/immutable/ParRange.scala | 18 ++++++++++------- .../parallel/immutable/ParSeq.scala | 18 ++++++++++------- .../parallel/immutable/ParSet.scala | 18 ++++++++++------- .../parallel/immutable/ParVector.scala | 18 ++++++++++------- .../parallel/immutable/package.scala | 18 ++++++++++------- .../parallel/mutable/LazyCombiner.scala | 18 ++++++++++------- .../parallel/mutable/ParArray.scala | 19 ++++++++++-------- .../parallel/mutable/ParFlatHashTable.scala | 18 ++++++++++------- .../parallel/mutable/ParHashMap.scala | 18 ++++++++++------- .../parallel/mutable/ParHashSet.scala | 18 ++++++++++------- .../parallel/mutable/ParHashTable.scala | 18 ++++++++++------- .../parallel/mutable/ParIterable.scala | 18 ++++++++++------- .../collection/parallel/mutable/ParMap.scala | 18 ++++++++++------- .../parallel/mutable/ParMapLike.scala | 18 ++++++++++------- .../collection/parallel/mutable/ParSeq.scala | 18 ++++++++++------- .../collection/parallel/mutable/ParSet.scala | 18 ++++++++++------- .../parallel/mutable/ParSetLike.scala | 18 ++++++++++------- .../parallel/mutable/ParTrieMap.scala | 18 ++++++++++------- .../mutable/ResizableParArrayCombiner.scala | 18 ++++++++++------- .../mutable/UnrolledParArrayCombiner.scala | 18 ++++++++++------- .../collection/parallel/mutable/package.scala | 18 ++++++++++------- .../scala/collection/parallel/package.scala | 18 ++++++++++------- .../scala/collection/script/Location.scala | 18 ++++++++++------- .../scala/collection/script/Message.scala | 18 ++++++++++------- .../scala/collection/script/Scriptable.scala | 18 ++++++++++------- src/library/scala/compat/Platform.scala | 18 ++++++++++------- src/library/scala/concurrent/Awaitable.scala | 18 ++++++++++------- .../scala/concurrent/BatchingExecutor.scala | 18 ++++++++++------- .../scala/concurrent/BlockContext.scala | 18 ++++++++++------- src/library/scala/concurrent/Channel.scala | 20 ++++++++++--------- .../scala/concurrent/DelayedLazyVal.scala | 18 ++++++++++------- .../scala/concurrent/ExecutionContext.scala | 18 ++++++++++------- src/library/scala/concurrent/Future.scala | 18 ++++++++++------- .../scala/concurrent/JavaConversions.scala | 18 ++++++++++------- src/library/scala/concurrent/Lock.scala | 20 ++++++++++--------- src/library/scala/concurrent/Promise.scala | 18 ++++++++++------- .../scala/concurrent/SyncChannel.scala | 18 ++++++++++------- src/library/scala/concurrent/SyncVar.scala | 18 ++++++++++------- .../scala/concurrent/duration/Deadline.scala | 18 ++++++++++------- .../scala/concurrent/duration/Duration.scala | 18 ++++++++++------- .../duration/DurationConversions.scala | 18 ++++++++++------- .../scala/concurrent/duration/package.scala | 12 +++++++++++ .../scala/concurrent/forkjoin/package.scala | 18 ++++++++++------- .../impl/ExecutionContextImpl.scala | 18 ++++++++++------- .../scala/concurrent/impl/Promise.scala | 18 ++++++++++------- src/library/scala/concurrent/package.scala | 18 ++++++++++------- src/library/scala/deprecated.scala | 18 ++++++++++------- src/library/scala/deprecatedInheritance.scala | 18 ++++++++++------- src/library/scala/deprecatedName.scala | 18 ++++++++++------- src/library/scala/deprecatedOverriding.scala | 18 ++++++++++------- src/library/scala/inline.scala | 20 ++++++++++--------- src/library/scala/io/AnsiColor.scala | 12 +++++++++++ src/library/scala/io/BufferedSource.scala | 18 ++++++++++------- src/library/scala/io/Codec.scala | 18 ++++++++++------- src/library/scala/io/Position.scala | 18 ++++++++++------- src/library/scala/io/Source.scala | 18 ++++++++++------- src/library/scala/io/StdIn.scala | 12 +++++++++++ src/library/scala/language.scala | 20 ++++++++++--------- src/library/scala/languageFeature.scala | 20 ++++++++++--------- src/library/scala/math/BigDecimal.scala | 19 ++++++++++-------- src/library/scala/math/BigInt.scala | 18 ++++++++++------- src/library/scala/math/Equiv.scala | 18 ++++++++++------- src/library/scala/math/Fractional.scala | 18 ++++++++++------- src/library/scala/math/Integral.scala | 18 ++++++++++------- src/library/scala/math/Numeric.scala | 18 ++++++++++------- src/library/scala/math/Ordered.scala | 18 ++++++++++------- src/library/scala/math/Ordering.scala | 18 ++++++++++------- src/library/scala/math/PartialOrdering.scala | 18 ++++++++++------- src/library/scala/math/PartiallyOrdered.scala | 20 ++++++++++--------- src/library/scala/math/ScalaNumber.java | 18 ++++++++++------- .../scala/math/ScalaNumericConversions.scala | 18 ++++++++++------- src/library/scala/math/package.scala | 18 ++++++++++------- src/library/scala/native.scala | 20 ++++++++++--------- src/library/scala/noinline.scala | 20 ++++++++++--------- src/library/scala/package.scala | 19 ++++++++++-------- src/library/scala/ref/PhantomReference.scala | 19 ++++++++++-------- src/library/scala/ref/Reference.scala | 18 ++++++++++------- src/library/scala/ref/ReferenceQueue.scala | 19 ++++++++++-------- src/library/scala/ref/ReferenceWrapper.scala | 19 ++++++++++-------- src/library/scala/ref/SoftReference.scala | 19 ++++++++++-------- src/library/scala/ref/WeakReference.scala | 19 ++++++++++-------- .../reflect/ClassManifestDeprecatedApis.scala | 18 ++++++++++------- src/library/scala/reflect/ClassTag.scala | 12 +++++++++++ src/library/scala/reflect/Manifest.scala | 18 ++++++++++------- .../scala/reflect/NameTransformer.scala | 18 ++++++++++------- src/library/scala/reflect/NoManifest.scala | 18 ++++++++++------- src/library/scala/reflect/OptManifest.scala | 18 ++++++++++------- .../scala/reflect/ScalaLongSignature.java | 12 +++++++++++ src/library/scala/reflect/ScalaSignature.java | 12 +++++++++++ .../reflect/macros/internal/macroImpl.scala | 12 +++++++++++ src/library/scala/reflect/package.scala | 12 +++++++++++ src/library/scala/remote.scala | 18 ++++++++++------- .../runtime/AbstractPartialFunction.scala | 18 ++++++++++------- src/library/scala/runtime/BooleanRef.java | 20 ++++++++++--------- src/library/scala/runtime/BoxedUnit.java | 20 ++++++++++--------- src/library/scala/runtime/BoxesRunTime.java | 20 ++++++++++--------- src/library/scala/runtime/ByteRef.java | 20 ++++++++++--------- src/library/scala/runtime/CharRef.java | 20 ++++++++++--------- src/library/scala/runtime/DoubleRef.java | 20 ++++++++++--------- src/library/scala/runtime/FloatRef.java | 20 ++++++++++--------- src/library/scala/runtime/IntRef.java | 20 ++++++++++--------- .../scala/runtime/LambdaDeserialize.java | 12 +++++++++++ .../scala/runtime/LambdaDeserializer.scala | 12 +++++++++++ src/library/scala/runtime/LazyRef.scala | 18 ++++++++++------- src/library/scala/runtime/LongRef.java | 20 ++++++++++--------- src/library/scala/runtime/MethodCache.scala | 18 ++++++++++------- .../scala/runtime/NonLocalReturnControl.scala | 18 ++++++++++------- src/library/scala/runtime/Nothing$.scala | 18 ++++++++++------- src/library/scala/runtime/Null$.scala | 18 ++++++++++------- src/library/scala/runtime/ObjectRef.java | 20 ++++++++++--------- src/library/scala/runtime/RichBoolean.scala | 18 ++++++++++------- src/library/scala/runtime/RichByte.scala | 18 ++++++++++------- src/library/scala/runtime/RichChar.scala | 18 ++++++++++------- src/library/scala/runtime/RichDouble.scala | 18 ++++++++++------- src/library/scala/runtime/RichException.scala | 18 ++++++++++------- src/library/scala/runtime/RichFloat.scala | 18 ++++++++++------- src/library/scala/runtime/RichInt.scala | 18 ++++++++++------- src/library/scala/runtime/RichLong.scala | 18 ++++++++++------- src/library/scala/runtime/RichShort.scala | 18 ++++++++++------- .../scala/runtime/ScalaNumberProxy.scala | 18 ++++++++++------- src/library/scala/runtime/ScalaRunTime.scala | 18 ++++++++++------- .../scala/runtime/SeqCharSequence.scala | 18 ++++++++++------- src/library/scala/runtime/ShortRef.java | 20 ++++++++++--------- src/library/scala/runtime/Statics.java | 12 +++++++++++ src/library/scala/runtime/StringAdd.scala | 18 ++++++++++------- src/library/scala/runtime/StringFormat.scala | 18 ++++++++++------- .../scala/runtime/StructuralCallSite.java | 12 +++++++++++ src/library/scala/runtime/SymbolLiteral.java | 12 +++++++++++ src/library/scala/runtime/TraitSetter.java | 12 +++++++++++ src/library/scala/runtime/Tuple2Zipped.scala | 18 ++++++++++------- src/library/scala/runtime/Tuple3Zipped.scala | 18 ++++++++++------- .../scala/runtime/VolatileBooleanRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileByteRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileCharRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileDoubleRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileFloatRef.java | 20 ++++++++++--------- src/library/scala/runtime/VolatileIntRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileLongRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileObjectRef.java | 20 ++++++++++--------- .../scala/runtime/VolatileShortRef.java | 20 ++++++++++--------- .../runtime/java8/JFunction0$mcB$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcC$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcS$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcV$sp.java | 11 ++++++++++ .../runtime/java8/JFunction0$mcZ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcDF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcFD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcFF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcFI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcFJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcIF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcJF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcJJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcVD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcVF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcVI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcVJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcZD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcZF$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcZI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction1$mcZJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcDJJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcFJJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcIJJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcJJJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcVJJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZDD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZDI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZDJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZID$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZII$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZIJ$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZJD$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZJI$sp.java | 11 ++++++++++ .../runtime/java8/JFunction2$mcZJJ$sp.java | 11 ++++++++++ src/library/scala/runtime/package.scala | 12 +++++++++++ src/library/scala/specialized.scala | 18 ++++++++++------- src/library/scala/sys/BooleanProp.scala | 18 ++++++++++------- src/library/scala/sys/Prop.scala | 18 ++++++++++------- src/library/scala/sys/PropImpl.scala | 18 ++++++++++------- .../scala/sys/ShutdownHookThread.scala | 18 ++++++++++------- src/library/scala/sys/SystemProperties.scala | 18 ++++++++++------- src/library/scala/sys/package.scala | 18 ++++++++++------- src/library/scala/sys/process/BasicIO.scala | 18 ++++++++++------- src/library/scala/sys/process/Process.scala | 18 ++++++++++------- .../scala/sys/process/ProcessBuilder.scala | 18 ++++++++++------- .../sys/process/ProcessBuilderImpl.scala | 18 ++++++++++------- src/library/scala/sys/process/ProcessIO.scala | 18 ++++++++++------- .../scala/sys/process/ProcessImpl.scala | 18 ++++++++++------- .../scala/sys/process/ProcessLogger.scala | 18 ++++++++++------- src/library/scala/sys/process/package.scala | 18 ++++++++++------- src/library/scala/text/Document.scala | 18 ++++++++++------- src/library/scala/throws.scala | 18 ++++++++++------- src/library/scala/transient.scala | 18 ++++++++++------- src/library/scala/unchecked.scala | 18 ++++++++++------- src/library/scala/util/DynamicVariable.scala | 18 ++++++++++------- src/library/scala/util/Either.scala | 18 ++++++++++------- src/library/scala/util/MurmurHash.scala | 18 ++++++++++------- src/library/scala/util/Properties.scala | 19 ++++++++++-------- src/library/scala/util/Random.scala | 18 ++++++++++------- src/library/scala/util/Sorting.scala | 18 ++++++++++------- src/library/scala/util/Try.scala | 18 ++++++++++------- src/library/scala/util/control/Breaks.scala | 18 ++++++++++------- .../scala/util/control/ControlThrowable.scala | 18 ++++++++++------- .../scala/util/control/Exception.scala | 18 ++++++++++------- .../scala/util/control/NoStackTrace.scala | 18 ++++++++++------- src/library/scala/util/control/NonFatal.scala | 18 ++++++++++------- .../scala/util/control/TailCalls.scala | 18 ++++++++++------- .../scala/util/hashing/ByteswapHashing.scala | 18 ++++++++++------- src/library/scala/util/hashing/Hashing.scala | 18 ++++++++++------- .../scala/util/hashing/MurmurHash3.scala | 18 ++++++++++------- src/library/scala/util/hashing/package.scala | 18 ++++++++++------- src/library/scala/util/matching/Regex.scala | 18 ++++++++++------- src/library/scala/volatile.scala | 18 ++++++++++------- .../scala/tools/partest/ASMConverters.scala | 12 +++++++++++ .../scala/tools/partest/AsmNode.scala | 12 +++++++++++ .../scala/tools/partest/BytecodeTest.scala | 12 +++++++++++ .../scala/tools/partest/IcodeComparison.scala | 13 +++++++++--- .../scala/tools/partest/JavapTest.scala | 11 ++++++++++ .../scala/tools/partest/ParserTest.scala | 12 +++++++++-- .../scala/tools/partest/ReplTest.scala | 13 +++++++++--- .../tools/partest/ScaladocJavaModelTest.scala | 12 +++++++++++ .../tools/partest/ScaladocModelTest.scala | 13 +++++++++--- .../scala/tools/partest/ScriptTest.scala | 12 +++++++++-- .../scala/tools/partest/SigTest.scala | 13 +++++++++--- .../tools/partest/StubErrorMessageTest.scala | 12 +++++++++++ .../scala/tools/partest/Util.scala | 12 +++++++++++ .../instrumented/Instrumentation.scala | 13 +++++++++--- .../tools/partest/instrumented/Profiler.java | 13 +++++++++--- .../scala/reflect/api/Annotations.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Constants.scala | 13 +++++++++--- src/reflect/scala/reflect/api/Exprs.scala | 13 +++++++++--- src/reflect/scala/reflect/api/FlagSets.scala | 12 +++++++++++ .../scala/reflect/api/ImplicitTags.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Internals.scala | 12 +++++++++++ .../scala/reflect/api/JavaUniverse.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Liftables.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Mirror.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Mirrors.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Names.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Position.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Positions.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Printers.scala | 12 +++++++++++ .../scala/reflect/api/Quasiquotes.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Scopes.scala | 12 +++++++++++ .../reflect/api/StandardDefinitions.scala | 14 ++++++++++--- .../scala/reflect/api/StandardLiftables.scala | 12 +++++++++++ .../scala/reflect/api/StandardNames.scala | 16 +++++++++++---- src/reflect/scala/reflect/api/Symbols.scala | 12 +++++++++++ .../scala/reflect/api/TreeCreator.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Trees.scala | 14 ++++++++++--- .../scala/reflect/api/TypeCreator.scala | 12 +++++++++++ src/reflect/scala/reflect/api/TypeTags.scala | 13 +++++++++--- src/reflect/scala/reflect/api/Types.scala | 12 +++++++++++ src/reflect/scala/reflect/api/Universe.scala | 12 +++++++++++ src/reflect/scala/reflect/api/package.scala | 12 +++++++++++ .../reflect/internal/AnnotationCheckers.scala | 13 +++++++++--- .../reflect/internal/AnnotationInfos.scala | 13 +++++++++--- .../scala/reflect/internal/BaseTypeSeqs.scala | 14 ++++++++++--- .../reflect/internal/CapturedVariables.scala | 12 +++++++++++ .../scala/reflect/internal/Chars.scala | 14 ++++++++++--- .../reflect/internal/ClassfileConstants.scala | 13 +++++++++--- .../scala/reflect/internal/Constants.scala | 13 +++++++++--- .../scala/reflect/internal/Definitions.scala | 13 +++++++++--- .../scala/reflect/internal/Depth.scala | 12 +++++++++++ .../internal/ExistentialsAndSkolems.scala | 13 +++++++++--- .../scala/reflect/internal/FatalError.scala | 14 ++++++++++--- .../scala/reflect/internal/FlagSets.scala | 12 +++++++++++ .../scala/reflect/internal/Flags.scala | 13 +++++++++--- .../scala/reflect/internal/FreshNames.scala | 12 +++++++++-- .../scala/reflect/internal/HasFlags.scala | 12 +++++++++++ .../scala/reflect/internal/Importers.scala | 12 +++++++++++ .../reflect/internal/InfoTransformers.scala | 13 +++++++++--- .../scala/reflect/internal/Internals.scala | 12 +++++++++++ .../reflect/internal/JDK9Reflectors.java | 12 +++++++++++ .../internal/JMethodOrConstructor.scala | 14 ++++++++++--- .../scala/reflect/internal/JavaAccFlags.scala | 14 ++++++++++--- .../scala/reflect/internal/Kinds.scala | 13 +++++++++--- .../scala/reflect/internal/Mirrors.scala | 13 +++++++++--- .../internal/MissingRequirementError.scala | 13 +++++++++--- src/reflect/scala/reflect/internal/Mode.scala | 13 +++++++++--- .../scala/reflect/internal/Names.scala | 13 +++++++++--- .../scala/reflect/internal/Phase.scala | 13 +++++++++--- .../scala/reflect/internal/Positions.scala | 12 +++++++++++ .../scala/reflect/internal/Precedence.scala | 12 +++++++++++ .../scala/reflect/internal/Printers.scala | 13 +++++++++--- .../reflect/internal/PrivateWithin.scala | 12 +++++++++++ .../reflect/internal/ReificationSupport.scala | 12 +++++++++++ .../scala/reflect/internal/Reporting.scala | 13 +++++++++--- .../scala/reflect/internal/Required.scala | 12 +++++++++++ .../scala/reflect/internal/Scopes.scala | 13 +++++++++--- .../reflect/internal/StdAttachments.scala | 12 +++++++++++ .../scala/reflect/internal/StdCreators.scala | 12 +++++++++++ .../scala/reflect/internal/StdNames.scala | 13 +++++++++--- .../scala/reflect/internal/SymbolPairs.scala | 13 +++++++++--- .../scala/reflect/internal/SymbolTable.scala | 13 +++++++++--- .../scala/reflect/internal/Symbols.scala | 14 ++++++++++++- .../scala/reflect/internal/TreeGen.scala | 12 +++++++++++ .../scala/reflect/internal/TreeInfo.scala | 13 +++++++++--- .../scala/reflect/internal/Trees.scala | 13 +++++++++--- .../reflect/internal/TypeDebugging.scala | 13 +++++++++--- .../scala/reflect/internal/Types.scala | 13 +++++++++--- .../scala/reflect/internal/Variance.scala | 13 +++++++++--- .../scala/reflect/internal/Variances.scala | 13 +++++++++--- .../internal/annotations/package.scala | 12 +++++++++++ .../annotations/uncheckedBounds.scala | 12 +++++++++++ .../internal/pickling/ByteCodecs.scala | 19 +++++++++++------- .../internal/pickling/PickleBuffer.scala | 13 +++++++++--- .../internal/pickling/PickleFormat.scala | 12 +++++++++++ .../internal/pickling/Translations.scala | 13 +++++++++--- .../reflect/internal/pickling/UnPickler.scala | 13 +++++++++--- .../internal/settings/AbsSettings.scala | 13 +++++++++--- .../internal/settings/MutableSettings.scala | 14 ++++++++++--- .../reflect/internal/tpe/CommonOwners.scala | 12 +++++++++++ .../reflect/internal/tpe/FindMembers.scala | 14 ++++++++++--- .../scala/reflect/internal/tpe/GlbLubs.scala | 12 +++++++++++ .../reflect/internal/tpe/TypeComparers.scala | 12 +++++++++++ .../internal/tpe/TypeConstraints.scala | 12 +++++++++++ .../scala/reflect/internal/tpe/TypeMaps.scala | 12 +++++++++++ .../reflect/internal/tpe/TypeToStrings.scala | 12 +++++++++++ .../reflect/internal/transform/Erasure.scala | 12 +++++++++++ .../internal/transform/PostErasure.scala | 12 +++++++++++ .../internal/transform/Transforms.scala | 12 +++++++++++ .../reflect/internal/transform/UnCurry.scala | 12 +++++++++++ .../util/AbstractFileClassLoader.scala | 12 +++++++++-- .../internal/util/AlmostFinalValue.java | 12 +++++++++++ .../internal/util/BooleanContainer.java | 12 +++++++++++ .../reflect/internal/util/Collections.scala | 13 +++++++++--- .../internal/util/FreshNameCreator.scala | 13 +++++++++--- .../scala/reflect/internal/util/HashSet.scala | 13 +++++++++--- .../reflect/internal/util/JavaClearable.scala | 12 +++++++++++ .../scala/reflect/internal/util/Origins.scala | 13 +++++++++--- .../internal/util/OwnerOnlyChmod.scala | 14 ++++++++++--- .../reflect/internal/util/Position.scala | 13 +++++++++--- .../internal/util/ScalaClassLoader.scala | 13 +++++++++--- .../scala/reflect/internal/util/Set.scala | 14 ++++++++++--- .../reflect/internal/util/SourceFile.scala | 13 +++++++++--- .../reflect/internal/util/Statistics.scala | 12 +++++++++++ .../internal/util/StatisticsStatics.java | 12 +++++++++++ .../reflect/internal/util/StringOps.scala | 18 ++++++++++------- .../util/StripMarginInterpolator.scala | 12 +++++++++++ .../reflect/internal/util/TableDef.scala | 12 +++++++++++ .../reflect/internal/util/ThreeValues.scala | 12 +++++++++++ .../internal/util/TraceSymbolActivity.scala | 12 +++++++++++ .../reflect/internal/util/TriState.scala | 12 +++++++++++ .../reflect/internal/util/WeakHashSet.scala | 12 +++++++++++ .../scala/reflect/internal/util/package.scala | 12 +++++++++++ .../scala/reflect/io/AbstractFile.scala | 14 +++++++++---- src/reflect/scala/reflect/io/Directory.scala | 18 ++++++++++------- src/reflect/scala/reflect/io/File.scala | 18 ++++++++++------- .../reflect/io/FileOperationException.scala | 19 ++++++++++-------- src/reflect/scala/reflect/io/IOStats.scala | 12 +++++++++++ .../scala/reflect/io/NoAbstractFile.scala | 13 +++++++++--- src/reflect/scala/reflect/io/Path.scala | 13 +++++++++--- src/reflect/scala/reflect/io/PlainFile.scala | 13 +++++++++--- src/reflect/scala/reflect/io/Streamable.scala | 13 +++++++++--- .../scala/reflect/io/VirtualDirectory.scala | 12 +++++++++-- .../scala/reflect/io/VirtualFile.scala | 13 +++++++++--- src/reflect/scala/reflect/io/ZipArchive.scala | 13 +++++++++--- .../scala/reflect/macros/Aliases.scala | 12 +++++++++++ .../scala/reflect/macros/Attachments.scala | 12 +++++++++++ .../scala/reflect/macros/Enclosures.scala | 12 +++++++++++ src/reflect/scala/reflect/macros/Evals.scala | 12 +++++++++++ .../scala/reflect/macros/ExprUtils.scala | 12 +++++++++++ .../scala/reflect/macros/FrontEnds.scala | 12 +++++++++++ .../scala/reflect/macros/Infrastructure.scala | 12 +++++++++++ .../scala/reflect/macros/Internals.scala | 12 +++++++++++ src/reflect/scala/reflect/macros/Names.scala | 12 +++++++++++ .../scala/reflect/macros/Parsers.scala | 12 +++++++++++ .../scala/reflect/macros/Reifiers.scala | 12 +++++++++++ src/reflect/scala/reflect/macros/Typers.scala | 12 +++++++++++ .../scala/reflect/macros/Universe.scala | 12 +++++++++++ .../reflect/macros/blackbox/Context.scala | 12 +++++++++++ .../scala/reflect/macros/package.scala | 12 +++++++++++ .../reflect/macros/whitebox/Context.scala | 12 +++++++++++ src/reflect/scala/reflect/runtime/Gil.scala | 12 +++++++++++ .../scala/reflect/runtime/JavaMirrors.scala | 12 +++++++++++ .../scala/reflect/runtime/JavaUniverse.scala | 12 +++++++++++ .../scala/reflect/runtime/ReflectSetup.scala | 12 +++++++++++ .../reflect/runtime/ReflectionUtils.scala | 13 +++++++++--- .../scala/reflect/runtime/Settings.scala | 12 +++++++++++ .../scala/reflect/runtime/SymbolLoaders.scala | 12 +++++++++++ .../scala/reflect/runtime/SymbolTable.scala | 12 +++++++++++ .../reflect/runtime/SynchronizedOps.scala | 12 +++++++++++ .../reflect/runtime/SynchronizedSymbols.scala | 12 +++++++++++ .../reflect/runtime/SynchronizedTypes.scala | 12 +++++++++++ .../reflect/runtime/ThreadLocalStorage.scala | 12 +++++++++++ .../scala/reflect/runtime/TwoWayCache.scala | 12 +++++++++++ .../scala/reflect/runtime/TwoWayCaches.scala | 12 +++++++++++ .../scala/reflect/runtime/package.scala | 12 +++++++++++ .../interpreter/jline/FileBackedHistory.scala | 13 +++++++++--- .../interpreter/jline/JLineDelimiter.scala | 13 +++++++++--- .../nsc/interpreter/jline/JLineHistory.scala | 13 +++++++++--- .../nsc/interpreter/jline/JLineReader.scala | 17 ++++++++++------ src/repl/scala/tools/nsc/Interpreter.scala | 12 +++++++++++ .../scala/tools/nsc/InterpreterLoop.scala | 12 +++++++++++ .../scala/tools/nsc/MainGenericRunner.scala | 13 +++++++++--- .../interpreter/AbstractFileClassLoader.scala | 12 +++++++++++ .../AbstractOrMissingHandler.scala | 13 +++++++++--- .../tools/nsc/interpreter/CommandLine.scala | 13 +++++++++--- .../tools/nsc/interpreter/Completion.scala | 13 +++++++++--- .../tools/nsc/interpreter/ExprTyper.scala | 13 +++++++++--- .../tools/nsc/interpreter/IBindings.java | 13 +++++++++--- .../scala/tools/nsc/interpreter/ILoop.scala | 14 ++++++++++--- .../scala/tools/nsc/interpreter/IMain.scala | 13 +++++++++--- .../tools/nsc/interpreter/ISettings.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Imports.scala | 13 +++++++++--- .../nsc/interpreter/InteractiveReader.scala | 13 +++++++++--- .../tools/nsc/interpreter/JavapClass.scala | 14 ++++++++++--- .../scala/tools/nsc/interpreter/Logger.scala | 13 +++++++++--- .../tools/nsc/interpreter/LoopCommands.scala | 13 +++++++++--- .../nsc/interpreter/MemberHandlers.scala | 13 +++++++++--- .../tools/nsc/interpreter/NamedParam.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Naming.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Parsed.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Pasted.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Phased.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Power.scala | 13 +++++++++--- .../interpreter/PresentationCompilation.scala | 14 ++++++++++--- .../PresentationCompilerCompleter.scala | 14 ++++++++++--- .../tools/nsc/interpreter/ReplConfig.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/ReplDir.scala | 13 +++++++++--- .../tools/nsc/interpreter/ReplGlobal.scala | 13 +++++++++--- .../tools/nsc/interpreter/ReplProps.scala | 13 +++++++++--- .../tools/nsc/interpreter/ReplReporter.scala | 13 +++++++++--- .../tools/nsc/interpreter/ReplStrings.scala | 13 +++++++++--- .../tools/nsc/interpreter/ReplVals.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/Results.scala | 13 +++++++++--- .../tools/nsc/interpreter/RichClass.scala | 13 +++++++++--- .../tools/nsc/interpreter/Scripted.scala | 13 ++++++++++-- .../tools/nsc/interpreter/SimpleReader.scala | 13 +++++++++--- .../tools/nsc/interpreter/StdReplTags.scala | 12 +++++++++++ .../tools/nsc/interpreter/Tabulators.scala | 13 +++++++++--- .../scala/tools/nsc/interpreter/package.scala | 13 +++++++++--- .../nsc/interpreter/session/History.scala | 13 +++++++++--- .../interpreter/session/SimpleHistory.scala | 13 +++++++++--- .../nsc/interpreter/session/package.scala | 13 +++++++++--- src/scaladoc/scala/tools/ant/Scaladoc.scala | 18 ++++++++++------- src/scaladoc/scala/tools/nsc/ScalaDoc.scala | 14 +++++++++---- .../scala/tools/nsc/doc/DocFactory.scala | 13 +++++++++--- .../scala/tools/nsc/doc/DocParser.scala | 13 +++++++++--- src/scaladoc/scala/tools/nsc/doc/Index.scala | 13 +++++++++--- .../tools/nsc/doc/ScaladocAnalyzer.scala | 13 +++++++++--- .../scala/tools/nsc/doc/ScaladocGlobal.scala | 13 +++++++++--- .../scala/tools/nsc/doc/Settings.scala | 13 +++++++++--- .../scala/tools/nsc/doc/Uncompilable.scala | 13 +++++++++--- .../scala/tools/nsc/doc/Universe.scala | 13 +++++++++--- .../nsc/doc/base/CommentFactoryBase.scala | 13 +++++++++--- .../scala/tools/nsc/doc/base/LinkTo.scala | 12 +++++++++-- .../tools/nsc/doc/base/MemberLookupBase.scala | 12 +++++++++++ .../tools/nsc/doc/base/comment/Body.scala | 13 +++++++++--- .../tools/nsc/doc/base/comment/Comment.scala | 13 +++++++++--- .../tools/nsc/doc/doclet/Generator.scala | 12 +++++++++++ .../tools/nsc/doc/doclet/Universer.scala | 12 +++++++++++ .../scala/tools/nsc/doc/html/Doclet.scala | 13 +++++++++--- .../tools/nsc/doc/html/HtmlFactory.scala | 13 +++++++++--- .../scala/tools/nsc/doc/html/HtmlPage.scala | 13 +++++++++--- .../scala/tools/nsc/doc/html/Page.scala | 13 +++++++++--- .../scala/tools/nsc/doc/html/SyntaxHigh.scala | 13 +++++++++--- .../tools/nsc/doc/html/page/Entity.scala | 13 +++++++++--- .../tools/nsc/doc/html/page/IndexScript.scala | 13 +++++++++--- .../scala/tools/nsc/doc/html/page/JSON.scala | 12 +++++++++++ .../html/page/diagram/DiagramGenerator.scala | 14 ++++++++++--- .../doc/html/page/diagram/DiagramStats.scala | 13 ++++++++++-- .../page/diagram/DotDiagramGenerator.scala | 14 ++++++++++--- .../nsc/doc/html/page/diagram/DotRunner.scala | 12 +++++++++++ .../tools/nsc/doc/model/CommentFactory.scala | 13 +++++++++--- .../scala/tools/nsc/doc/model/Entity.scala | 14 +++++++++---- .../nsc/doc/model/IndexModelFactory.scala | 13 +++++++++--- .../tools/nsc/doc/model/MemberLookup.scala | 12 +++++++++++ .../tools/nsc/doc/model/ModelFactory.scala | 12 ++++++++++- .../model/ModelFactoryImplicitSupport.scala | 12 +++++++---- .../doc/model/ModelFactoryTypeSupport.scala | 12 ++++++++++- .../tools/nsc/doc/model/TreeEntity.scala | 13 +++++++++--- .../tools/nsc/doc/model/TreeFactory.scala | 12 +++++++++++ .../tools/nsc/doc/model/TypeEntity.scala | 13 +++++++++--- .../tools/nsc/doc/model/ValueArgument.scala | 13 +++++++++--- .../tools/nsc/doc/model/Visibility.scala | 13 +++++++++--- .../tools/nsc/doc/model/diagram/Diagram.scala | 12 +++++++++++ .../diagram/DiagramDirectiveParser.scala | 12 +++++++++++ .../doc/model/diagram/DiagramFactory.scala | 12 +++++++++++ src/scalap/scala/tools/scalap/Arguments.scala | 18 ++++++++++------- .../scala/tools/scalap/ByteArrayReader.scala | 18 ++++++++++------- src/scalap/scala/tools/scalap/Classfile.scala | 18 ++++++++++------- .../scala/tools/scalap/Classfiles.scala | 18 ++++++++++------- .../scala/tools/scalap/CodeWriter.scala | 18 ++++++++++------- src/scalap/scala/tools/scalap/Decode.scala | 17 ++++++++++------ .../scala/tools/scalap/JavaWriter.scala | 18 ++++++++++------- src/scalap/scala/tools/scalap/Main.scala | 17 ++++++++++------ .../scala/tools/scalap/MetaParser.scala | 18 ++++++++++------- .../scala/tools/scalap/Properties.scala | 18 ++++++++++------- .../rules/scalasig/ClassFileParser.scala | 12 +++++++++++ .../scalap/scalax/rules/scalasig/Flags.scala | 12 +++++++++++ .../scalax/rules/scalasig/ScalaSig.scala | 18 ++++++++++------- .../rules/scalasig/ScalaSigPrinter.scala | 18 ++++++++++------- .../scalasig/SourceFileAttributeParser.scala | 12 +++++++++++ .../scalap/scalax/rules/scalasig/Symbol.scala | 12 +++++++++++ .../scalap/scalax/rules/scalasig/Type.scala | 12 +++++++++++ .../tools/scalap/scalax/util/StringUtil.scala | 12 +++++++++++ 1225 files changed, 13438 insertions(+), 4985 deletions(-) diff --git a/.travis.yml b/.travis.yml index e678559fce58..e83fd018e548 100644 --- a/.travis.yml +++ b/.travis.yml @@ -39,7 +39,7 @@ jobs: - stage: build if: type = pull_request script: - - sbt -warn setupPublishCore generateBuildCharacterPropertiesFile publishLocal + - sbt -warn setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal - STARR=`cat buildcharacter.properties | grep ^maven.version.number | cut -d= -f2` && echo $STARR - sbt -Dstarr.version=$STARR -warn setupValidateTest test:compile info testAll diff --git a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala index a3bf894b25a8..db1891ca44bd 100644 --- a/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala +++ b/src/compiler/scala/reflect/macros/compiler/DefaultMacroCompiler.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package compiler diff --git a/src/compiler/scala/reflect/macros/compiler/Errors.scala b/src/compiler/scala/reflect/macros/compiler/Errors.scala index 98fd091e9cd5..07cb00776c72 100644 --- a/src/compiler/scala/reflect/macros/compiler/Errors.scala +++ b/src/compiler/scala/reflect/macros/compiler/Errors.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package compiler diff --git a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala index d3f49390ea60..d1a2f0ba433b 100644 --- a/src/compiler/scala/reflect/macros/compiler/Resolvers.scala +++ b/src/compiler/scala/reflect/macros/compiler/Resolvers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package compiler diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala index 97b8196ec950..cb8cf79640bf 100644 --- a/src/compiler/scala/reflect/macros/compiler/Validators.scala +++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package compiler diff --git a/src/compiler/scala/reflect/macros/contexts/Aliases.scala b/src/compiler/scala/reflect/macros/contexts/Aliases.scala index cc64d97d85ac..5035d2e99ce2 100644 --- a/src/compiler/scala/reflect/macros/contexts/Aliases.scala +++ b/src/compiler/scala/reflect/macros/contexts/Aliases.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Context.scala b/src/compiler/scala/reflect/macros/contexts/Context.scala index f3dd29d8b265..e0c1b71ca95a 100644 --- a/src/compiler/scala/reflect/macros/contexts/Context.scala +++ b/src/compiler/scala/reflect/macros/contexts/Context.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala index df99daa2c935..19ce230d0dd9 100644 --- a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala +++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Evals.scala b/src/compiler/scala/reflect/macros/contexts/Evals.scala index a715af986c2d..74f1d7ed3878 100644 --- a/src/compiler/scala/reflect/macros/contexts/Evals.scala +++ b/src/compiler/scala/reflect/macros/contexts/Evals.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala index 4846325d1e22..857386f1ceca 100644 --- a/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala +++ b/src/compiler/scala/reflect/macros/contexts/ExprUtils.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala index fda05de09cea..34f16de3855c 100644 --- a/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala +++ b/src/compiler/scala/reflect/macros/contexts/FrontEnds.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala index 7088058145d9..c6dfc56d62e4 100644 --- a/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala +++ b/src/compiler/scala/reflect/macros/contexts/Infrastructure.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Internals.scala b/src/compiler/scala/reflect/macros/contexts/Internals.scala index 8c784d7e54eb..d4713f540507 100644 --- a/src/compiler/scala/reflect/macros/contexts/Internals.scala +++ b/src/compiler/scala/reflect/macros/contexts/Internals.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala index 8af8888a56a7..39983fb0f439 100644 --- a/src/compiler/scala/reflect/macros/contexts/Names.scala +++ b/src/compiler/scala/reflect/macros/contexts/Names.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala index cc3f01e53b4a..9b019cdaec02 100644 --- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala index 010829f6abb9..b9dc58295dca 100644 --- a/src/compiler/scala/reflect/macros/contexts/Reifiers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Reifiers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Gilles Dubochet +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.reflect.macros diff --git a/src/compiler/scala/reflect/macros/contexts/Traces.scala b/src/compiler/scala/reflect/macros/contexts/Traces.scala index df47f6ba816a..6487adec728d 100644 --- a/src/compiler/scala/reflect/macros/contexts/Traces.scala +++ b/src/compiler/scala/reflect/macros/contexts/Traces.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/contexts/Typers.scala b/src/compiler/scala/reflect/macros/contexts/Typers.scala index a0dfbf5df109..a36f530af32d 100644 --- a/src/compiler/scala/reflect/macros/contexts/Typers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Typers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package contexts diff --git a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala index 4e4d88c0be09..d41e2993f18e 100644 --- a/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala +++ b/src/compiler/scala/reflect/macros/runtime/AbortMacroException.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package runtime diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala index be114efbc008..37d3c4ce213d 100644 --- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package runtime diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 7e700a524c37..73520dffb925 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package runtime diff --git a/src/compiler/scala/reflect/macros/runtime/package.scala b/src/compiler/scala/reflect/macros/runtime/package.scala index 9ef820076029..e580d2eb12b1 100644 --- a/src/compiler/scala/reflect/macros/runtime/package.scala +++ b/src/compiler/scala/reflect/macros/runtime/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package object runtime { diff --git a/src/compiler/scala/reflect/macros/util/Helpers.scala b/src/compiler/scala/reflect/macros/util/Helpers.scala index 961c41dab5c7..81e8be07597e 100644 --- a/src/compiler/scala/reflect/macros/util/Helpers.scala +++ b/src/compiler/scala/reflect/macros/util/Helpers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package util diff --git a/src/compiler/scala/reflect/macros/util/Traces.scala b/src/compiler/scala/reflect/macros/util/Traces.scala index 2dffc6874563..1bee131b8e0e 100644 --- a/src/compiler/scala/reflect/macros/util/Traces.scala +++ b/src/compiler/scala/reflect/macros/util/Traces.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package util diff --git a/src/compiler/scala/reflect/quasiquotes/Holes.scala b/src/compiler/scala/reflect/quasiquotes/Holes.scala index d61ac343d309..c9039fcbaaff 100644 --- a/src/compiler/scala/reflect/quasiquotes/Holes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Holes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package quasiquotes diff --git a/src/compiler/scala/reflect/quasiquotes/Parsers.scala b/src/compiler/scala/reflect/quasiquotes/Parsers.scala index d1af1fab3f5c..815618a8496d 100644 --- a/src/compiler/scala/reflect/quasiquotes/Parsers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Parsers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package quasiquotes diff --git a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala index bc4f95427519..d2f1cb46f15f 100644 --- a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala +++ b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package quasiquotes diff --git a/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala index 72e6000e9fe6..f112e7ccade5 100644 --- a/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala +++ b/src/compiler/scala/reflect/quasiquotes/Quasiquotes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package quasiquotes diff --git a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala index 68c270d33aba..a149862aa052 100644 --- a/src/compiler/scala/reflect/quasiquotes/Reifiers.scala +++ b/src/compiler/scala/reflect/quasiquotes/Reifiers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package quasiquotes diff --git a/src/compiler/scala/reflect/reify/Errors.scala b/src/compiler/scala/reflect/reify/Errors.scala index 35d0ad62c5fe..012eca623c19 100644 --- a/src/compiler/scala/reflect/reify/Errors.scala +++ b/src/compiler/scala/reflect/reify/Errors.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify import scala.reflect.macros.ReificationException diff --git a/src/compiler/scala/reflect/reify/Phases.scala b/src/compiler/scala/reflect/reify/Phases.scala index 4572caeb3693..9f89d420c3e3 100644 --- a/src/compiler/scala/reflect/reify/Phases.scala +++ b/src/compiler/scala/reflect/reify/Phases.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify import phases._ diff --git a/src/compiler/scala/reflect/reify/Reifier.scala b/src/compiler/scala/reflect/reify/Reifier.scala index e6c2dd1e6274..bbc3a0de8849 100644 --- a/src/compiler/scala/reflect/reify/Reifier.scala +++ b/src/compiler/scala/reflect/reify/Reifier.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify import scala.tools.nsc.Global diff --git a/src/compiler/scala/reflect/reify/States.scala b/src/compiler/scala/reflect/reify/States.scala index 65f3f424e8c6..c24d8752fd28 100644 --- a/src/compiler/scala/reflect/reify/States.scala +++ b/src/compiler/scala/reflect/reify/States.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify trait States { diff --git a/src/compiler/scala/reflect/reify/Taggers.scala b/src/compiler/scala/reflect/reify/Taggers.scala index 0863ee38f9c9..b829183e3711 100644 --- a/src/compiler/scala/reflect/reify/Taggers.scala +++ b/src/compiler/scala/reflect/reify/Taggers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify import scala.reflect.macros.{ReificationException, UnexpectedReificationException, TypecheckException} diff --git a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala index 089f07de0655..83356aa19c23 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenAnnotationInfos.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/codegen/GenNames.scala b/src/compiler/scala/reflect/reify/codegen/GenNames.scala index 4266c6f8d623..d083eb17daf0 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenNames.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenNames.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala index 1d151c5135f2..429ee203027d 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenPositions.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenPositions.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala index be5f545e4ad5..cac858d57f9f 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenSymbols.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala index 7753f36e8f82..2949cff99817 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTrees.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTrees.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala index b3e6f529e055..d3bcaf7676ca 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenTypes.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenTypes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala index 242e5d60b3c2..5a7b7450b435 100644 --- a/src/compiler/scala/reflect/reify/codegen/GenUtils.scala +++ b/src/compiler/scala/reflect/reify/codegen/GenUtils.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package codegen diff --git a/src/compiler/scala/reflect/reify/package.scala b/src/compiler/scala/reflect/reify/package.scala index 8102bd7170c0..b647e9d202df 100644 --- a/src/compiler/scala/reflect/reify/package.scala +++ b/src/compiler/scala/reflect/reify/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect diff --git a/src/compiler/scala/reflect/reify/phases/Calculate.scala b/src/compiler/scala/reflect/reify/phases/Calculate.scala index a0035d73d675..a79d3a47d130 100644 --- a/src/compiler/scala/reflect/reify/phases/Calculate.scala +++ b/src/compiler/scala/reflect/reify/phases/Calculate.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package phases diff --git a/src/compiler/scala/reflect/reify/phases/Metalevels.scala b/src/compiler/scala/reflect/reify/phases/Metalevels.scala index f5766bc63edf..c1f3af723d93 100644 --- a/src/compiler/scala/reflect/reify/phases/Metalevels.scala +++ b/src/compiler/scala/reflect/reify/phases/Metalevels.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package phases diff --git a/src/compiler/scala/reflect/reify/phases/Reify.scala b/src/compiler/scala/reflect/reify/phases/Reify.scala index 93f6f99d813b..02cf4ec09bf8 100644 --- a/src/compiler/scala/reflect/reify/phases/Reify.scala +++ b/src/compiler/scala/reflect/reify/phases/Reify.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package phases diff --git a/src/compiler/scala/reflect/reify/phases/Reshape.scala b/src/compiler/scala/reflect/reify/phases/Reshape.scala index 727eee8a8b9e..b90985760152 100644 --- a/src/compiler/scala/reflect/reify/phases/Reshape.scala +++ b/src/compiler/scala/reflect/reify/phases/Reshape.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package phases diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala index 4ec4de28c450..e439b7b3eed4 100644 --- a/src/compiler/scala/reflect/reify/utils/Extractors.scala +++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package utils diff --git a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala index a5c4c7e0a329..ad11ae8c74f0 100644 --- a/src/compiler/scala/reflect/reify/utils/NodePrinters.scala +++ b/src/compiler/scala/reflect/reify/utils/NodePrinters.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.reflect.reify package utils diff --git a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala index 0b9cf58c8991..e36622925186 100644 --- a/src/compiler/scala/reflect/reify/utils/StdAttachments.scala +++ b/src/compiler/scala/reflect/reify/utils/StdAttachments.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package utils diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 5800e88fe1fa..9c398f323bb7 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package utils diff --git a/src/compiler/scala/reflect/reify/utils/Utils.scala b/src/compiler/scala/reflect/reify/utils/Utils.scala index e1213f932cf0..a609a336f201 100644 --- a/src/compiler/scala/reflect/reify/utils/Utils.scala +++ b/src/compiler/scala/reflect/reify/utils/Utils.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.reify package utils diff --git a/src/compiler/scala/tools/ant/ClassloadVerify.scala b/src/compiler/scala/tools/ant/ClassloadVerify.scala index 73555b83d1ea..acc15d5f3437 100644 --- a/src/compiler/scala/tools/ant/ClassloadVerify.scala +++ b/src/compiler/scala/tools/ant/ClassloadVerify.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/FastScalac.scala b/src/compiler/scala/tools/ant/FastScalac.scala index 3b62c493d363..b8bf3a053f75 100644 --- a/src/compiler/scala/tools/ant/FastScalac.scala +++ b/src/compiler/scala/tools/ant/FastScalac.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/Pack200Task.scala b/src/compiler/scala/tools/ant/Pack200Task.scala index df162d734abb..653e5328efbe 100644 --- a/src/compiler/scala/tools/ant/Pack200Task.scala +++ b/src/compiler/scala/tools/ant/Pack200Task.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/Same.scala b/src/compiler/scala/tools/ant/Same.scala index 6036b238b660..d265a7f01ef8 100644 --- a/src/compiler/scala/tools/ant/Same.scala +++ b/src/compiler/scala/tools/ant/Same.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.ant diff --git a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala index 43b9010509d3..b9fe9b4d91a6 100644 --- a/src/compiler/scala/tools/ant/ScalaMatchingTask.scala +++ b/src/compiler/scala/tools/ant/ScalaMatchingTask.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/ScalaTool.scala b/src/compiler/scala/tools/ant/ScalaTool.scala index 67879d6de397..f2ff15d355f7 100644 --- a/src/compiler/scala/tools/ant/ScalaTool.scala +++ b/src/compiler/scala/tools/ant/ScalaTool.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 511572f6f3f0..26b0f79c0bb7 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/ScalacShared.scala b/src/compiler/scala/tools/ant/ScalacShared.scala index 2c88d871ab43..cb1c91cc7b64 100644 --- a/src/compiler/scala/tools/ant/ScalacShared.scala +++ b/src/compiler/scala/tools/ant/ScalacShared.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/compiler/scala/tools/ant/sabbus/Break.scala b/src/compiler/scala/tools/ant/sabbus/Break.scala index b170ceaed8a8..bce500fc19cb 100644 --- a/src/compiler/scala/tools/ant/sabbus/Break.scala +++ b/src/compiler/scala/tools/ant/sabbus/Break.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala index 8032d5ee754d..081cb10861f0 100644 --- a/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala +++ b/src/compiler/scala/tools/ant/sabbus/CompilationFailure.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/Compiler.scala b/src/compiler/scala/tools/ant/sabbus/Compiler.scala index 81cd1f31961a..64252ff5eb6a 100644 --- a/src/compiler/scala/tools/ant/sabbus/Compiler.scala +++ b/src/compiler/scala/tools/ant/sabbus/Compiler.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/Compilers.scala b/src/compiler/scala/tools/ant/sabbus/Compilers.scala index a0aad49f2067..5d71bdb27304 100644 --- a/src/compiler/scala/tools/ant/sabbus/Compilers.scala +++ b/src/compiler/scala/tools/ant/sabbus/Compilers.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala index 13b6f107a681..a89985214a53 100644 --- a/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala +++ b/src/compiler/scala/tools/ant/sabbus/ForeignCompiler.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/Make.scala b/src/compiler/scala/tools/ant/sabbus/Make.scala index 027a828f03d9..f14ca934eaf6 100644 --- a/src/compiler/scala/tools/ant/sabbus/Make.scala +++ b/src/compiler/scala/tools/ant/sabbus/Make.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala index c31f55c9b60d..bd3c350290dc 100644 --- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala +++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.ant diff --git a/src/compiler/scala/tools/ant/sabbus/Settings.scala b/src/compiler/scala/tools/ant/sabbus/Settings.scala index a86af73fe373..768b3a009122 100644 --- a/src/compiler/scala/tools/ant/sabbus/Settings.scala +++ b/src/compiler/scala/tools/ant/sabbus/Settings.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala index b061bcf7fb4a..531014dc3d40 100644 --- a/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala +++ b/src/compiler/scala/tools/ant/sabbus/TaskArgs.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant.sabbus diff --git a/src/compiler/scala/tools/ant/sabbus/Use.scala b/src/compiler/scala/tools/ant/sabbus/Use.scala index cb514e35b380..1021ca7614ca 100644 --- a/src/compiler/scala/tools/ant/sabbus/Use.scala +++ b/src/compiler/scala/tools/ant/sabbus/Use.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.ant diff --git a/src/compiler/scala/tools/cmd/CommandLine.scala b/src/compiler/scala/tools/cmd/CommandLine.scala index 629a700f07fa..d87fbc1fe843 100644 --- a/src/compiler/scala/tools/cmd/CommandLine.scala +++ b/src/compiler/scala/tools/cmd/CommandLine.scala @@ -1,6 +1,13 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/CommandLineParser.scala b/src/compiler/scala/tools/cmd/CommandLineParser.scala index 3a5db0042187..5fcc59314bb2 100644 --- a/src/compiler/scala/tools/cmd/CommandLineParser.scala +++ b/src/compiler/scala/tools/cmd/CommandLineParser.scala @@ -1,7 +1,15 @@ -/* NEST (New Scala Test) - * Copyright 2007-2018 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.cmd import scala.annotation.tailrec diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala index ab49c7507c61..7eb20e43bbc2 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/compiler/scala/tools/cmd/FromString.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/Instance.scala b/src/compiler/scala/tools/cmd/Instance.scala index 0e64e1e0cacb..fefce38f5bf3 100644 --- a/src/compiler/scala/tools/cmd/Instance.scala +++ b/src/compiler/scala/tools/cmd/Instance.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/Interpolation.scala b/src/compiler/scala/tools/cmd/Interpolation.scala index d1c798b62181..7d3ebd501d8c 100644 --- a/src/compiler/scala/tools/cmd/Interpolation.scala +++ b/src/compiler/scala/tools/cmd/Interpolation.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/cmd/Meta.scala b/src/compiler/scala/tools/cmd/Meta.scala index 806e0c799ace..d913de51b32b 100644 --- a/src/compiler/scala/tools/cmd/Meta.scala +++ b/src/compiler/scala/tools/cmd/Meta.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/Opt.scala b/src/compiler/scala/tools/cmd/Opt.scala index 70756c5bb2b9..28f1677fc978 100644 --- a/src/compiler/scala/tools/cmd/Opt.scala +++ b/src/compiler/scala/tools/cmd/Opt.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/Property.scala b/src/compiler/scala/tools/cmd/Property.scala index 18bedd6f7e17..0a1ffff7e8e9 100644 --- a/src/compiler/scala/tools/cmd/Property.scala +++ b/src/compiler/scala/tools/cmd/Property.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/Reference.scala b/src/compiler/scala/tools/cmd/Reference.scala index 25a16b1e3ee9..c837bee156ec 100644 --- a/src/compiler/scala/tools/cmd/Reference.scala +++ b/src/compiler/scala/tools/cmd/Reference.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/Spec.scala b/src/compiler/scala/tools/cmd/Spec.scala index 069a7a89a1b8..9f890f4f73c5 100644 --- a/src/compiler/scala/tools/cmd/Spec.scala +++ b/src/compiler/scala/tools/cmd/Spec.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/cmd/package.scala b/src/compiler/scala/tools/cmd/package.scala index 9754becf10e5..59eda1d6983b 100644 --- a/src/compiler/scala/tools/cmd/package.scala +++ b/src/compiler/scala/tools/cmd/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala index 2faf6c6272ec..1e9349e94417 100644 --- a/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala +++ b/src/compiler/scala/tools/nsc/ClassPathMemoryConsumptionTester.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc import scala.io.StdIn.readLine diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 5c84748b9509..6a3b014d3102 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/CompileClient.scala b/src/compiler/scala/tools/nsc/CompileClient.scala index f25950447302..67c6824962b1 100644 --- a/src/compiler/scala/tools/nsc/CompileClient.scala +++ b/src/compiler/scala/tools/nsc/CompileClient.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/CompileServer.scala b/src/compiler/scala/tools/nsc/CompileServer.scala index 3cd9ce61f969..3757146dbfd1 100644 --- a/src/compiler/scala/tools/nsc/CompileServer.scala +++ b/src/compiler/scala/tools/nsc/CompileServer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/CompileSocket.scala b/src/compiler/scala/tools/nsc/CompileSocket.scala index d0083059fcac..2886b1c9f05f 100644 --- a/src/compiler/scala/tools/nsc/CompileSocket.scala +++ b/src/compiler/scala/tools/nsc/CompileSocket.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 319fc2cacba8..89c311cdf493 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ConsoleWriter.scala b/src/compiler/scala/tools/nsc/ConsoleWriter.scala index 6c16d19d2c78..d311471190be 100644 --- a/src/compiler/scala/tools/nsc/ConsoleWriter.scala +++ b/src/compiler/scala/tools/nsc/ConsoleWriter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/Driver.scala b/src/compiler/scala/tools/nsc/Driver.scala index b30744c4dfb2..bcdeaa57a7f4 100644 --- a/src/compiler/scala/tools/nsc/Driver.scala +++ b/src/compiler/scala/tools/nsc/Driver.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package tools.nsc diff --git a/src/compiler/scala/tools/nsc/EvalLoop.scala b/src/compiler/scala/tools/nsc/EvalLoop.scala index 73f4b9a1199a..443c9bbf400a 100644 --- a/src/compiler/scala/tools/nsc/EvalLoop.scala +++ b/src/compiler/scala/tools/nsc/EvalLoop.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala index 830d466556dc..9bfd798240b6 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerCommand.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index 332467fce2db..cb26b4d9d666 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index d43564e44e3a..79358c172dfa 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala index 69215482302b..9000a820e2b5 100644 --- a/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/GlobalSymbolLoaders.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/Main.scala b/src/compiler/scala/tools/nsc/Main.scala index e2cf49907b70..0a10667687f5 100644 --- a/src/compiler/scala/tools/nsc/Main.scala +++ b/src/compiler/scala/tools/nsc/Main.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools package nsc diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index c5575b8a4c5c..34914c3734d9 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/MainTokenMetric.scala b/src/compiler/scala/tools/nsc/MainTokenMetric.scala index 84eb688b632b..7fb1677420c7 100644 --- a/src/compiler/scala/tools/nsc/MainTokenMetric.scala +++ b/src/compiler/scala/tools/nsc/MainTokenMetric.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala index 2b4cd801bbcf..e4ab36c35225 100644 --- a/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala +++ b/src/compiler/scala/tools/nsc/NewLinePrintWriter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ObjectRunner.scala b/src/compiler/scala/tools/nsc/ObjectRunner.scala index 8e01418e8b32..4f351908f5f6 100644 --- a/src/compiler/scala/tools/nsc/ObjectRunner.scala +++ b/src/compiler/scala/tools/nsc/ObjectRunner.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala.tools.nsc import java.net.URL diff --git a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala index 899aa93a3b0a..a36715067a03 100644 --- a/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/OfflineCompilerCommand.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/Parsing.scala b/src/compiler/scala/tools/nsc/Parsing.scala index 9e5999ce4f04..e2aee496ef83 100644 --- a/src/compiler/scala/tools/nsc/Parsing.scala +++ b/src/compiler/scala/tools/nsc/Parsing.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc. - * @author Adriaan Moors +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/PhaseAssembly.scala b/src/compiler/scala/tools/nsc/PhaseAssembly.scala index 660a079e2360..8883d4a107bb 100644 --- a/src/compiler/scala/tools/nsc/PhaseAssembly.scala +++ b/src/compiler/scala/tools/nsc/PhaseAssembly.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Anders Bach Nielsen - * @version 1.0 +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/Properties.scala b/src/compiler/scala/tools/nsc/Properties.scala index 873f26f51016..a9f345f0d180 100644 --- a/src/compiler/scala/tools/nsc/Properties.scala +++ b/src/compiler/scala/tools/nsc/Properties.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Stephane Micheloud +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/Reporting.scala b/src/compiler/scala/tools/nsc/Reporting.scala index 4bed54a153b8..95687853d180 100644 --- a/src/compiler/scala/tools/nsc/Reporting.scala +++ b/src/compiler/scala/tools/nsc/Reporting.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc. - * @author Adriaan Moors +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index 41db2bb4fdba..b6c2fcd7d959 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/Settings.scala b/src/compiler/scala/tools/nsc/Settings.scala index b64f27859f90..19ad250116eb 100644 --- a/src/compiler/scala/tools/nsc/Settings.scala +++ b/src/compiler/scala/tools/nsc/Settings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala index b21d156145cc..6489eed3347a 100644 --- a/src/compiler/scala/tools/nsc/SubComponent.scala +++ b/src/compiler/scala/tools/nsc/SubComponent.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/DocComments.scala b/src/compiler/scala/tools/nsc/ast/DocComments.scala index 5d8943444c4b..c2e8f8e01ed8 100644 --- a/src/compiler/scala/tools/nsc/ast/DocComments.scala +++ b/src/compiler/scala/tools/nsc/ast/DocComments.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala index 53a37428d770..0c43f37b0fa3 100644 --- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala +++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/Positions.scala b/src/compiler/scala/tools/nsc/ast/Positions.scala index beab801edfe9..d1b9e54a3f20 100644 --- a/src/compiler/scala/tools/nsc/ast/Positions.scala +++ b/src/compiler/scala/tools/nsc/ast/Positions.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package ast diff --git a/src/compiler/scala/tools/nsc/ast/Printers.scala b/src/compiler/scala/tools/nsc/ast/Printers.scala index 8b37948e9ba3..c12993c01603 100644 --- a/src/compiler/scala/tools/nsc/ast/Printers.scala +++ b/src/compiler/scala/tools/nsc/ast/Printers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 105bdee2563f..44380a32e065 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index 9e1498cf3e30..e539bba97e91 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) * - * @author Paul Phillips + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0ba7dad971d3..8d0210a45390 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala index 60558479265a..fa336c0b64f2 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeInfo.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeInfo.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 80f4ac9f1c18..6af6d0ea1ea1 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala index d5fae97eb861..8fbdec3db35c 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/BracePair.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package ast.parser diff --git a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala index a573ddfeb197..4838d59b7cfc 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/BracePatch.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package ast.parser diff --git a/src/compiler/scala/tools/nsc/ast/parser/Change.scala b/src/compiler/scala/tools/nsc/ast/parser/Change.scala index 57dc48a75a4f..664cc9879c47 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Change.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Change.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.ast.parser abstract class Change diff --git a/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala index 5fcb02814b36..090c517054f7 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/CommonTokens.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package ast.parser diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 46d533b03727..3619755cf844 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Burak Emir +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala index 1a76c229cbdd..c7f0c0f65980 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Parsers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ //todo: allow infix type patterns diff --git a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala index 0829b1aad954..618d594a7fe3 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Patch.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Patch.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.ast.parser class Patch(off: Int, change: Change) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index c9fe0c6ab62c..a95cb85f5824 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package ast.parser diff --git a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala index c3c3ee9d471b..6a26e96f4c0d 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SymbolicXMLBuilder.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Burak Emir +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index b4b6f25dc999..308abe7f3972 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala index e624aec88cb4..56dbf3db7494 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Tokens.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 396f1c637ee4..ea7e9f1b0cc5 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala index 82dce9f1f8ed..7c197f174224 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/xml/MarkupParserCommon.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.nsc.ast.parser.xml diff --git a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala index 64b9db52510a..911ae51fee96 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/xml/Utility.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.nsc.ast.parser.xml diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index dc63b335ccc7..ff11f434710b 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/Platform.scala b/src/compiler/scala/tools/nsc/backend/Platform.scala index e464768bb361..a69e79d4c4f1 100644 --- a/src/compiler/scala/tools/nsc/backend/Platform.scala +++ b/src/compiler/scala/tools/nsc/backend/Platform.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala index c18f220d9523..ab739e1868bb 100644 --- a/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala +++ b/src/compiler/scala/tools/nsc/backend/ScalaPrimitives.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala index f7b457e3a02f..403001f4515b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 4885083938e9..eb2e1631614b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index f44bd0b58ffd..65e22eec0de1 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index 7385011eac05..c88600f9aa55 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index c3e9850a1e35..1643d6ac4b10 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala.tools.nsc package backend package jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala index 65129d5d9647..94a590ed2d10 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSyncAndTry.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala package tools.nsc package backend diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index d2d1139a519a..43a589b032ab 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index 095e5911313a..5b127b3fd789 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.backend.jvm import scala.annotation.switch diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index c919c81a346c..1ed9e168aba5 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala index 16441336427d..26012df1e04f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendReporting.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala index 9f4af0b7993e..6388a41bd4b1 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BackendStats.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java index b62374dcc53b..5a4874d7d90e 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2018 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.backend.jvm; import scala.tools.asm.MethodVisitor; diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index a477ec70c23d..8109add34c40 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.backend.jvm import java.io.{BufferedOutputStream, DataOutputStream, FileOutputStream, IOException} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index 743d3ebe8754..bc090f145b8d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala index c30ef7cd7ba2..17d548af5cdd 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CoreBTypes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala index 256090d77caa..cfd1274a3b3a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenBCode.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2012 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index a5284611dad7..ae7d772bd629 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java index 5bb3c5835428..b119ed90625a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java +++ b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2018 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.backend.jvm; import scala.tools.asm.Label; diff --git a/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java index 9c735acdd65a..0c8cfbd3a889 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java +++ b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2018 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.backend.jvm; import scala.tools.asm.Label; diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala b/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala index a27fe22653ac..69eb97565d44 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PerRunInit.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.backend.jvm import scala.collection.mutable.ListBuffer diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index c3b249ad2b93..237ab1951be1 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala index 317b2873e0b9..7de4431d00b9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessorFrontendAccess.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala index db14c1fe683e..6eb12e107afe 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/AliasingFrame.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm package analysis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 4a2369486ac5..40543b2fce4c 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm package analysis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala index dd19ad594f76..f0c21f090269 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/InstructionStackEffect.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm package analysis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index 384445d92060..f55bd730c0e7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm package analysis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala index 98e171cfd168..8e29f5082c10 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2015 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala index 9bb79eae24dc..7adc5f28cd42 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package backend.jvm package analysis diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala index 999c686aac8b..00702da6cb84 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.backend.jvm /** diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala index 967779f677e1..70866382da06 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BoxUnbox.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 206b21a961b3..a74982f68d65 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index cf653a449cec..6036c720756b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index e0c7ae4f3231..11fd4df644e1 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala index b420182cd64c..b3f6765abc7f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2015 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala index 78d9a27b0004..d7f478cd88dd 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CopyProp.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala index 7bc4ea239296..b4590aabb764 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala index 30cff49a2e39..38712e41a645 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/Inliner.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala index 04f1b24e30d8..6654c2ddf6f7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlinerHeuristics.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala index 3d0da4edd1e3..8a46aea9248f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LocalOpt.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala index 2569ee707c46..6d49db505438 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/LruMap.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.backend.jvm.opt import scala.collection.mutable.Map diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala index fb1119a71ea2..68fb3000b8c0 100644 --- a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import java.net.URL diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala index 6ad4142977eb..1a65c230ab80 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPath.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import scala.reflect.io.AbstractFile diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala index 2fb1bd6ea421..fa9166483594 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import scala.reflect.io.{AbstractFile, VirtualDirectory} diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 5f32fa4359e9..9f51672e79a6 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import java.io.File diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index e32ee5015d69..059a83da796c 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import java.io.{File => JFile, FileFilter} diff --git a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala index 14ac12e041bf..c589bcc6598a 100644 --- a/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/PackageNameUtils.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import scala.tools.nsc.util.ClassPath.RootPackage diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 6fefaf0da08e..5b157e9b386e 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.classpath import scala.tools.nsc.util.ClassRepresentation diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 716eeaaa1eaa..6f8b9a55c0cd 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import java.io.File diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 8ef36d1a5576..32ec4cde4485 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -1,6 +1,15 @@ /* - * Copyright (c) 2014 Contributor. All rights reserved. + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.classpath import java.io.File diff --git a/src/compiler/scala/tools/nsc/io/Jar.scala b/src/compiler/scala/tools/nsc/io/Jar.scala index 779f546f69e0..e95d48b5e8c0 100644 --- a/src/compiler/scala/tools/nsc/io/Jar.scala +++ b/src/compiler/scala/tools/nsc/io/Jar.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/io/Socket.scala b/src/compiler/scala/tools/nsc/io/Socket.scala index a17517da2ee4..0953f3dfa7c6 100644 --- a/src/compiler/scala/tools/nsc/io/Socket.scala +++ b/src/compiler/scala/tools/nsc/io/Socket.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index 5ac79f357b04..88d8091d2e35 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala.tools.nsc package io diff --git a/src/compiler/scala/tools/nsc/io/package.scala b/src/compiler/scala/tools/nsc/io/package.scala index 5f2f90c28495..3a0502ae6169 100644 --- a/src/compiler/scala/tools/nsc/io/package.scala +++ b/src/compiler/scala/tools/nsc/io/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 3ef75679eeda..d609898d6f99 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + //todo: allow infix type patterns diff --git a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala index af9b63c8ae51..a25c51eaf358 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaScanners.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaScanners.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala index 9b31e6e8a29b..855fe19e6706 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaTokens.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaTokens.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/package.scala b/src/compiler/scala/tools/nsc/package.scala index 817a4a5c8854..46cd59b63625 100644 --- a/src/compiler/scala/tools/nsc/package.scala +++ b/src/compiler/scala/tools/nsc/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 5b0b77dffb26..b76f67ccf6aa 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala index a6df08c331b3..1424a0420be6 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginComponent.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Lex Spoon - * Updated by Anders Bach Nielsen +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala index bf78c93fcc93..83d5d238bde5 100644 --- a/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala +++ b/src/compiler/scala/tools/nsc/plugins/PluginDescription.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 021d9e48244e..bba855ba541a 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Lex Spoon - * Updated by Anders Bach Nielsen +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java b/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java index 36e6e2c70ae7..1d5cf4bc3e4e 100644 --- a/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java +++ b/src/compiler/scala/tools/nsc/profile/ExtendedThreadMxBean.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.profile; import javax.management.ObjectName; diff --git a/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java b/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java index a716483a56d1..b8ee01090407 100644 --- a/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java +++ b/src/compiler/scala/tools/nsc/profile/ExternalToolHook.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.profile; /** diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index d0931071b3a1..87654e8e8baa 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} diff --git a/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala b/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala index 9418771558ff..97073f448259 100644 --- a/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala +++ b/src/compiler/scala/tools/nsc/profile/ProfilerPlugin.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.profile import scala.tools.nsc.{Phase, Settings} diff --git a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala index 33d8cefde10b..822a7317d284 100644 --- a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala +++ b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.profile import java.util.concurrent.ThreadPoolExecutor.AbortPolicy diff --git a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala index c3ac5d647d05..a7c7961ce61f 100644 --- a/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/AbstractReporter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2002-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index c2cbaf81ac52..c0b4e5e91200 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2002-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala index 46f35d1d7439..569713e49975 100644 --- a/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/LimitingReporter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package reporters diff --git a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala index 26335bd6c4cf..83959040cb3e 100644 --- a/src/compiler/scala/tools/nsc/reporters/NoReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/NoReporter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.reporters import scala.reflect.internal.util.Position diff --git a/src/compiler/scala/tools/nsc/reporters/Reporter.scala b/src/compiler/scala/tools/nsc/reporters/Reporter.scala index 91a28f61f970..0117e8daa148 100644 --- a/src/compiler/scala/tools/nsc/reporters/Reporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/Reporter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2002-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala index 735ad89c8221..21de3d2b6c18 100644 --- a/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/StoreReporter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2002-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala index ad03b5fafb1e..dd5500589ba1 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala index 08fa56d8e90f..64eeb8717a9b 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsSettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/settings/FscSettings.scala b/src/compiler/scala/tools/nsc/settings/FscSettings.scala index d6013e0b004d..d4bea5180922 100644 --- a/src/compiler/scala/tools/nsc/settings/FscSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/FscSettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index bddef769be99..1fabe3ff6f29 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + // $Id$ package scala.tools diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a0fbedc03924..2783b74a9d59 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + // $Id$ package scala diff --git a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala index c38de753c8f5..7870ac960f6c 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaVersion.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author James Iry +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + // $Id$ package scala diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index f197a4930da5..5d2b8ac953b1 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/settings/Warnings.scala b/src/compiler/scala/tools/nsc/settings/Warnings.scala index c274687fd4f2..1a024868d468 100644 --- a/src/compiler/scala/tools/nsc/settings/Warnings.scala +++ b/src/compiler/scala/tools/nsc/settings/Warnings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index 1051dc7afbca..c6c82f9c5a72 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 85ea78c912a7..6444823efced 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala index 2101a65cb1ac..d562c715e493 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTable.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala index daaa625164ab..102fe0549093 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolTrackers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 17e3b08ec295..a8d673663e8d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala.tools.nsc package symtab package classfile diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f99b85b7cfdd..a778fbcf035f 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 7fc9ec14f98e..76b91ba067ca 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala index 1f9a823bb489..ffe00c3c13bf 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/package.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.symtab package object classfile { diff --git a/src/compiler/scala/tools/nsc/symtab/package.scala b/src/compiler/scala/tools/nsc/symtab/package.scala index 0e6719f225a1..7a1dedec3fa3 100644 --- a/src/compiler/scala/tools/nsc/symtab/package.scala +++ b/src/compiler/scala/tools/nsc/symtab/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package object symtab { diff --git a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala index 851482af6e59..823ac0eb9343 100644 --- a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala +++ b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + // Copyright 2005-2017 LAMP/EPFL and Lightbend, Inc package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 81dc15db4c95..5fd6fbc402c6 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index b3e2e7ae6ba3..64ef325824eb 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 9093826050d9..a5ca807db36a 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package transform diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 33d869919083..931ca8e1ac20 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 85a6fa220088..1412c2088f91 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index b97e54f10f81..eccc415615a7 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package transform diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 029b7b951b4d..4ad8c81bcd1c 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index d1c82bb99192..ec1a9861162b 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala index dc321e26ca96..66ad8f319ecd 100644 --- a/src/compiler/scala/tools/nsc/transform/InfoTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/InfoTransform.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index cf3b4b649605..0b551e094e6c 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 1ee9feec31ac..76f03d4b2fed 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2016 LAMP/EPFL and Lightbend, Inc +/* + * Scala (https://www.scala-lang.org) * - * @author Martin Odersky + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index e159b07a7385..ceda2c30f600 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/PostErasure.scala b/src/compiler/scala/tools/nsc/transform/PostErasure.scala index 32987fed8ca6..9eb381e76851 100644 --- a/src/compiler/scala/tools/nsc/transform/PostErasure.scala +++ b/src/compiler/scala/tools/nsc/transform/PostErasure.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package transform diff --git a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala index 4c1705e3864c..26e0347be4f5 100644 --- a/src/compiler/scala/tools/nsc/transform/SampleTransform.scala +++ b/src/compiler/scala/tools/nsc/transform/SampleTransform.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index c7458a9ef38e..bddaf1e8bdb3 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Iulian Dragos +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/transform/Statics.scala b/src/compiler/scala/tools/nsc/transform/Statics.scala index 776805fd9f1c..6c19fda625ef 100644 --- a/src/compiler/scala/tools/nsc/transform/Statics.scala +++ b/src/compiler/scala/tools/nsc/transform/Statics.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package transform diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index 664aef41c0a2..507285efccc4 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Iulian Dragos +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/transform/Transform.scala b/src/compiler/scala/tools/nsc/transform/Transform.scala index 4e69fbce8b4f..3bf69c53795b 100644 --- a/src/compiler/scala/tools/nsc/transform/Transform.scala +++ b/src/compiler/scala/tools/nsc/transform/Transform.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala index ff3e4aeedac7..78b1191e0855 100644 --- a/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala +++ b/src/compiler/scala/tools/nsc/transform/TypeAdaptingTransformer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package transform diff --git a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala index 97e46d5fd8f7..d1722c2d0325 100644 --- a/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala +++ b/src/compiler/scala/tools/nsc/transform/TypingTransformers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 8a466ca3305d..231293a8ad91 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala index 12129884d98f..ac3de202439e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Logic.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 27fdfe806b94..b33148bd017c 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index 7b8a5fd31a50..f11d07ad9851 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala index 0d08120e4395..0b5c089dbfc7 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCps.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index de41991c90ab..b02bdfa152f6 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index c8e27c2640ee..6db93de2c6dd 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 53f27b15e880..4a6731744dcd 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala index 3f27d18e6437..f39488a62867 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchWarnings.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index e56110cb6bb2..aac303f81768 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 89853e59511f..01c742a3e6e0 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2013 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala index ecd2211441c8..4bcb5f3673fc 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/Solving.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) * - * Copyright 2011-2017 LAMP/EPFL - * @author Adriaan Moors + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.transform.patmat diff --git a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala index c9e828f47b21..fcdf60501ccb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Adaptations.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 74b154eb21bc..63147ea26488 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index 1ec9de99b4bb..4c089196f0da 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index ce9923ee7f05..83b3c1e56bcd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala index 8b6240907613..3b2afa914f3e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ConstantFolder.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7aa71cfda051..f13e0fbc2546 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 1fd78e478858..df0fdbfa3de2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2017 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala index 1f1ccbe359c6..3069d4818f9d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala +++ b/src/compiler/scala/tools/nsc/typechecker/DestructureTypes.scala @@ -1,7 +1,14 @@ -/* NSC -- new Scala compiler -* Copyright 2005-2013 LAMP/EPFL -* @author Paul Phillips -*/ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.nsc package typechecker diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index ea827395044c..213ae2785261 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index fe0d6a24f5cb..8e38f0bedbe5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 9be8927d51f2..c9142c4beaf4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ //todo: rewrite or disallow new T where T is a mixin (currently: not a member of T) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index affc06fafa39..4c32bf9678d5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4cb9c2ca39d7..1755042d339e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package typechecker diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 40e07acbc1f0..20535e89f413 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package typechecker diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 806025c026c8..5a1b73ace9c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 0dbb0e860b25..57ddca32cfc5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 100480a6d29f..cbecec507201 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index d817e0612996..372e0ed8ed81 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 524f27559772..1441823ea16f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package typechecker diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 6ba13fd56b70..4f2010d66ee1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 6b4ea13ddf91..e33de6477aee 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Tags.scala b/src/compiler/scala/tools/nsc/typechecker/Tags.scala index 31171d91586a..5a3bfa198a11 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Tags.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Tags.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package typechecker diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index 50743a922a4e..2c78af2272e9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 314b856dab28..e3a6d5adb1bb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala index 63e41971dbcd..0b0bd0910cf0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeStrings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2f828154c5d0..d59bf5d6f7fa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ // Added: Sat Oct 7 16:08:21 2006 diff --git a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala index ec889bd8301c..8bc1822c50d8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypersTracking.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index 0945c68add20..e4862d6872f6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala index e6f95eb0d619..6dac04412fdc 100644 --- a/src/compiler/scala/tools/nsc/util/CharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/CharArrayReader.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/util/ClassPath.scala b/src/compiler/scala/tools/nsc/util/ClassPath.scala index ebfc17183b47..827c7ce5dbd7 100644 --- a/src/compiler/scala/tools/nsc/util/ClassPath.scala +++ b/src/compiler/scala/tools/nsc/util/ClassPath.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala.tools.nsc package util diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index 501546b8f601..a0205f50efde 100644 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/compiler/scala/tools/nsc/util/Exceptional.scala b/src/compiler/scala/tools/nsc/util/Exceptional.scala index 1608ffa42589..903a92e009f9 100644 --- a/src/compiler/scala/tools/nsc/util/Exceptional.scala +++ b/src/compiler/scala/tools/nsc/util/Exceptional.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package util diff --git a/src/compiler/scala/tools/nsc/util/InterruptReq.scala b/src/compiler/scala/tools/nsc/util/InterruptReq.scala index b1b81d09522f..ddb1f3353c93 100644 --- a/src/compiler/scala/tools/nsc/util/InterruptReq.scala +++ b/src/compiler/scala/tools/nsc/util/InterruptReq.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package util diff --git a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala index 58a544246538..2a506f0e373d 100644 --- a/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala +++ b/src/compiler/scala/tools/nsc/util/JavaCharArrayReader.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/util/ShowPickled.scala b/src/compiler/scala/tools/nsc/util/ShowPickled.scala index b804bfb84253..b67f2df20177 100644 --- a/src/compiler/scala/tools/nsc/util/ShowPickled.scala +++ b/src/compiler/scala/tools/nsc/util/ShowPickled.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala index 4e1cf02a6ef4..af49114e52f5 100644 --- a/src/compiler/scala/tools/nsc/util/SimpleTracer.scala +++ b/src/compiler/scala/tools/nsc/util/SimpleTracer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package util diff --git a/src/compiler/scala/tools/nsc/util/StackTracing.scala b/src/compiler/scala/tools/nsc/util/StackTracing.scala index c6749a13f328..43bcd21fff5c 100644 --- a/src/compiler/scala/tools/nsc/util/StackTracing.scala +++ b/src/compiler/scala/tools/nsc/util/StackTracing.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.util diff --git a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala index 4f7a9ff8786b..064d00df6282 100644 --- a/src/compiler/scala/tools/nsc/util/WorkScheduler.scala +++ b/src/compiler/scala/tools/nsc/util/WorkScheduler.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package util diff --git a/src/compiler/scala/tools/nsc/util/package.scala b/src/compiler/scala/tools/nsc/util/package.scala index 80e82c85d8e8..33cbd6628524 100644 --- a/src/compiler/scala/tools/nsc/util/package.scala +++ b/src/compiler/scala/tools/nsc/util/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/reflect/FastTrack.scala b/src/compiler/scala/tools/reflect/FastTrack.scala index dc26c9306618..fcb204d52253 100644 --- a/src/compiler/scala/tools/reflect/FastTrack.scala +++ b/src/compiler/scala/tools/reflect/FastTrack.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/FormatInterpolator.scala b/src/compiler/scala/tools/reflect/FormatInterpolator.scala index 857b733f59f5..ad7084e56dfb 100644 --- a/src/compiler/scala/tools/reflect/FormatInterpolator.scala +++ b/src/compiler/scala/tools/reflect/FormatInterpolator.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.reflect import scala.reflect.macros.runtime.Context diff --git a/src/compiler/scala/tools/reflect/FrontEnd.scala b/src/compiler/scala/tools/reflect/FrontEnd.scala index 6591962d34c2..93c62c2c8a3f 100644 --- a/src/compiler/scala/tools/reflect/FrontEnd.scala +++ b/src/compiler/scala/tools/reflect/FrontEnd.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index b80524df2b37..e1cf834c6fb6 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 7d8291069928..3abd5f390761 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/ReflectSetup.scala b/src/compiler/scala/tools/reflect/ReflectSetup.scala index f18c114d62c2..daea54a79d73 100644 --- a/src/compiler/scala/tools/reflect/ReflectSetup.scala +++ b/src/compiler/scala/tools/reflect/ReflectSetup.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/StdTags.scala b/src/compiler/scala/tools/reflect/StdTags.scala index ee352c5e02ce..db4c386a9189 100644 --- a/src/compiler/scala/tools/reflect/StdTags.scala +++ b/src/compiler/scala/tools/reflect/StdTags.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/ToolBox.scala b/src/compiler/scala/tools/reflect/ToolBox.scala index fc3b78e37cf1..1e31f2fcfdc7 100644 --- a/src/compiler/scala/tools/reflect/ToolBox.scala +++ b/src/compiler/scala/tools/reflect/ToolBox.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools package reflect diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 61166f4239b8..56f032e8d5a4 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package tools package reflect diff --git a/src/compiler/scala/tools/reflect/WrappedProperties.scala b/src/compiler/scala/tools/reflect/WrappedProperties.scala index 3a9ebf028f2b..ae68965b4fa8 100644 --- a/src/compiler/scala/tools/reflect/WrappedProperties.scala +++ b/src/compiler/scala/tools/reflect/WrappedProperties.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/reflect/package.scala b/src/compiler/scala/tools/reflect/package.scala index 1055894121bd..86c2938c274a 100644 --- a/src/compiler/scala/tools/reflect/package.scala +++ b/src/compiler/scala/tools/reflect/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 4a8f1c47f809..1ad471e40f8b 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/compiler/scala/tools/util/SocketServer.scala b/src/compiler/scala/tools/util/SocketServer.scala index acf406c676cf..bc1668f75c80 100644 --- a/src/compiler/scala/tools/util/SocketServer.scala +++ b/src/compiler/scala/tools/util/SocketServer.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.util diff --git a/src/compiler/scala/tools/util/VerifyClass.scala b/src/compiler/scala/tools/util/VerifyClass.scala index a76586df1714..93f65564d2da 100644 --- a/src/compiler/scala/tools/util/VerifyClass.scala +++ b/src/compiler/scala/tools/util/VerifyClass.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.util import scala.tools.nsc.io._ diff --git a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala index 45221343c8df..89c924aa7e0a 100644 --- a/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala +++ b/src/compilerOptionsExporter/scala/tools/nsc/ScalaCompilerOptionsExporter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc import com.fasterxml.jackson.annotation._ diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala index d143301f326f..4ad122148993 100644 --- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala index 2d513f7e429c..5da3a0f1538a 100644 --- a/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala +++ b/src/interactive/scala/tools/nsc/interactive/ContextTrees.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index a65216e920fb..082a9b825b41 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala index 013b152e96c0..713545d4cba8 100644 --- a/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala +++ b/src/interactive/scala/tools/nsc/interactive/InteractiveReporter.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/Lexer.scala b/src/interactive/scala/tools/nsc/interactive/Lexer.scala index 7daf24c20420..39ee494ce62d 100644 --- a/src/interactive/scala/tools/nsc/interactive/Lexer.scala +++ b/src/interactive/scala/tools/nsc/interactive/Lexer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive import java.io.Reader diff --git a/src/interactive/scala/tools/nsc/interactive/Main.scala b/src/interactive/scala/tools/nsc/interactive/Main.scala index 7796c656709a..f69e35a38df7 100644 --- a/src/interactive/scala/tools/nsc/interactive/Main.scala +++ b/src/interactive/scala/tools/nsc/interactive/Main.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/interactive/scala/tools/nsc/interactive/Pickler.scala b/src/interactive/scala/tools/nsc/interactive/Pickler.scala index c7cd33fc0656..13c1d3d88811 100644 --- a/src/interactive/scala/tools/nsc/interactive/Pickler.scala +++ b/src/interactive/scala/tools/nsc/interactive/Pickler.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive import Lexer._ diff --git a/src/interactive/scala/tools/nsc/interactive/Picklers.scala b/src/interactive/scala/tools/nsc/interactive/Picklers.scala index 1f89e6d3aa7f..4577f68dfefd 100644 --- a/src/interactive/scala/tools/nsc/interactive/Picklers.scala +++ b/src/interactive/scala/tools/nsc/interactive/Picklers.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala index a2d8e5d49a32..32f090aa2fa4 100644 --- a/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala +++ b/src/interactive/scala/tools/nsc/interactive/PresentationCompilerThread.scala @@ -1,8 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky - * @author Iulian Dragos +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.interactive /** A presentation compiler thread. This is a lightweight class, delegating most diff --git a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala index d7dadcc6a822..5a965c2431f0 100644 --- a/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala +++ b/src/interactive/scala/tools/nsc/interactive/PrettyWriter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive import java.io.Writer diff --git a/src/interactive/scala/tools/nsc/interactive/REPL.scala b/src/interactive/scala/tools/nsc/interactive/REPL.scala index e9cec3197546..3ffd8ecd3862 100644 --- a/src/interactive/scala/tools/nsc/interactive/REPL.scala +++ b/src/interactive/scala/tools/nsc/interactive/REPL.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala index 410f919daa04..2686ab337947 100644 --- a/src/interactive/scala/tools/nsc/interactive/RangePositions.scala +++ b/src/interactive/scala/tools/nsc/interactive/RangePositions.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/interactive/scala/tools/nsc/interactive/Replayer.scala b/src/interactive/scala/tools/nsc/interactive/Replayer.scala index 0e3e2493fe4b..0f7d439132ab 100644 --- a/src/interactive/scala/tools/nsc/interactive/Replayer.scala +++ b/src/interactive/scala/tools/nsc/interactive/Replayer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive import java.io.{Reader, Writer} diff --git a/src/interactive/scala/tools/nsc/interactive/Response.scala b/src/interactive/scala/tools/nsc/interactive/Response.scala index 3e84c83e55b3..5df96f440ea5 100644 --- a/src/interactive/scala/tools/nsc/interactive/Response.scala +++ b/src/interactive/scala/tools/nsc/interactive/Response.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala index b82888b2aa08..27361f9a367f 100644 --- a/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala +++ b/src/interactive/scala/tools/nsc/interactive/RichCompilationUnits.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala index 00096dd359d8..77ebab667ee0 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTest.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive package tests diff --git a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala index ad5c61b2b02b..0b1f133006e2 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/InteractiveTestSettings.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package interactive package tests diff --git a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala index f1ada328081f..728e7dcf7a1d 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/Tester.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package tools.nsc package interactive diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala index d5da52bc1381..3ed8cd215c94 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/AskCommand.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2009-2013 Typesafe/Scala Solutions and LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package interactive package tests.core diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala index f5cc0f65bc2f..630f2e3317eb 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/CoreTestDefs.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package interactive package tests.core diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala index 29e546f9fe61..cb12424fc2bf 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerInstance.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package interactive package tests.core diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala index b5ae5f2d751d..cc24852f15df 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerRequestsWorkingMode.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package interactive package tests.core diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala index 4d5b4e11292c..b95b26a7d9a5 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/PresentationCompilerTestDef.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive.tests.core import scala.reflect.internal.util.Position diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala index 631504cda587..d60d74031605 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/Reporter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive.tests.core private[tests] trait Reporter { diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala index 40cfc111a1b7..6d9cb255a8a9 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/SourcesCollector.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive.tests.core import scala.reflect.internal.util.{SourceFile,BatchSourceFile} diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala index 3f9b40277c9c..2e39a68b3355 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestMarker.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive.tests.core case class DuplicateTestMarker(msg: String) extends Exception(msg) diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala index 887c3cf29b65..c17cd43c9618 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestResources.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive.tests.core import scala.tools.nsc.io.Path diff --git a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala index 4962d80a8b53..e0ddc18535ec 100644 --- a/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala +++ b/src/interactive/scala/tools/nsc/interactive/tests/core/TestSettings.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.interactive.tests.core import scala.tools.nsc.io.Path diff --git a/src/library/scala/AnyVal.scala b/src/library/scala/AnyVal.scala index e861860196b6..79b2a51ec4f8 100644 --- a/src/library/scala/AnyVal.scala +++ b/src/library/scala/AnyVal.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/AnyValCompanion.scala b/src/library/scala/AnyValCompanion.scala index 302cafe0ecdf..968422915d9b 100644 --- a/src/library/scala/AnyValCompanion.scala +++ b/src/library/scala/AnyValCompanion.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/App.scala b/src/library/scala/App.scala index 663bef28cd40..3298cb0d12ef 100644 --- a/src/library/scala/App.scala +++ b/src/library/scala/App.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Array.scala b/src/library/scala/Array.scala index d9aa6b2ad6ba..bfc54dbe6967 100644 --- a/src/library/scala/Array.scala +++ b/src/library/scala/Array.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Cloneable.scala b/src/library/scala/Cloneable.scala index 2810e3ca9613..a1cd9d7e2788 100644 --- a/src/library/scala/Cloneable.scala +++ b/src/library/scala/Cloneable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Console.scala b/src/library/scala/Console.scala index 47826467a207..399642fbcdbd 100644 --- a/src/library/scala/Console.scala +++ b/src/library/scala/Console.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/DelayedInit.scala b/src/library/scala/DelayedInit.scala index c1d2f28637bd..66cf41a0a97d 100644 --- a/src/library/scala/DelayedInit.scala +++ b/src/library/scala/DelayedInit.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Dynamic.scala b/src/library/scala/Dynamic.scala index 56eb4cfcf45b..1fa6403cf028 100644 --- a/src/library/scala/Dynamic.scala +++ b/src/library/scala/Dynamic.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Enumeration.scala b/src/library/scala/Enumeration.scala index ab3fa18064c3..15360c3ceff3 100644 --- a/src/library/scala/Enumeration.scala +++ b/src/library/scala/Enumeration.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Equals.scala b/src/library/scala/Equals.scala index e06557ccddb6..db8eb9d50bc4 100644 --- a/src/library/scala/Equals.scala +++ b/src/library/scala/Equals.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala index f96fab410467..08f38a71ee46 100644 --- a/src/library/scala/Function.scala +++ b/src/library/scala/Function.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Immutable.scala b/src/library/scala/Immutable.scala index c7e96a46a010..16a04fccbf55 100644 --- a/src/library/scala/Immutable.scala +++ b/src/library/scala/Immutable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/MatchError.scala b/src/library/scala/MatchError.scala index 5286fa42f4f0..0f39e5a51cc5 100644 --- a/src/library/scala/MatchError.scala +++ b/src/library/scala/MatchError.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Mutable.scala b/src/library/scala/Mutable.scala index 43f98ee4df02..4d5ab888882e 100644 --- a/src/library/scala/Mutable.scala +++ b/src/library/scala/Mutable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/NotImplementedError.scala b/src/library/scala/NotImplementedError.scala index 464a9a656d48..b4448fece11a 100644 --- a/src/library/scala/NotImplementedError.scala +++ b/src/library/scala/NotImplementedError.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/NotNull.scala b/src/library/scala/NotNull.scala index 6a9be79281ad..5b94c015dbf3 100644 --- a/src/library/scala/NotNull.scala +++ b/src/library/scala/NotNull.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index ba8baf2c56a1..d158f91e3479 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index d2458d428d6c..28c48d28c426 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 1e1271af8de1..4dde2599e727 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Product.scala b/src/library/scala/Product.scala index 78f6c153200b..3992503f11d9 100644 --- a/src/library/scala/Product.scala +++ b/src/library/scala/Product.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Proxy.scala b/src/library/scala/Proxy.scala index d77fd9910404..e75ec6761049 100644 --- a/src/library/scala/Proxy.scala +++ b/src/library/scala/Proxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Responder.scala b/src/library/scala/Responder.scala index d6517742f991..e741bcf8ed73 100644 --- a/src/library/scala/Responder.scala +++ b/src/library/scala/Responder.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/SerialVersionUID.scala b/src/library/scala/SerialVersionUID.scala index 77094f0bbff7..05023df34f19 100644 --- a/src/library/scala/SerialVersionUID.scala +++ b/src/library/scala/SerialVersionUID.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -*/ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Serializable.scala b/src/library/scala/Serializable.scala index 596ee984aaee..99c839329b34 100644 --- a/src/library/scala/Serializable.scala +++ b/src/library/scala/Serializable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala index 137598c28ddd..f7afc104d4c1 100644 --- a/src/library/scala/Specializable.scala +++ b/src/library/scala/Specializable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/StringContext.scala b/src/library/scala/StringContext.scala index b5e946c75acc..c592e232995e 100644 --- a/src/library/scala/StringContext.scala +++ b/src/library/scala/StringContext.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/Symbol.scala b/src/library/scala/Symbol.scala index 306a10f0d827..fc7b3613f906 100644 --- a/src/library/scala/Symbol.scala +++ b/src/library/scala/Symbol.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/UninitializedError.scala b/src/library/scala/UninitializedError.scala index bb0d5a863c34..87d9cee23d81 100644 --- a/src/library/scala/UninitializedError.scala +++ b/src/library/scala/UninitializedError.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/UninitializedFieldError.scala b/src/library/scala/UninitializedFieldError.scala index 0dfba2a187a5..08946df41d4b 100644 --- a/src/library/scala/UninitializedFieldError.scala +++ b/src/library/scala/UninitializedFieldError.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/annotation/Annotation.scala b/src/library/scala/annotation/Annotation.scala index 52c8cc6ef576..e39874f62aba 100644 --- a/src/library/scala/annotation/Annotation.scala +++ b/src/library/scala/annotation/Annotation.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/ClassfileAnnotation.scala b/src/library/scala/annotation/ClassfileAnnotation.scala index 1cb13dff5454..0ad112f089d2 100644 --- a/src/library/scala/annotation/ClassfileAnnotation.scala +++ b/src/library/scala/annotation/ClassfileAnnotation.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/StaticAnnotation.scala b/src/library/scala/annotation/StaticAnnotation.scala index 2ccbbc66ccde..6a47f28bf2b7 100644 --- a/src/library/scala/annotation/StaticAnnotation.scala +++ b/src/library/scala/annotation/StaticAnnotation.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/TypeConstraint.scala b/src/library/scala/annotation/TypeConstraint.scala index 2192a3d879eb..51d7b133594e 100644 --- a/src/library/scala/annotation/TypeConstraint.scala +++ b/src/library/scala/annotation/TypeConstraint.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/bridge.scala b/src/library/scala/annotation/bridge.scala index c0c6dba42439..e40ce914c6ad 100644 --- a/src/library/scala/annotation/bridge.scala +++ b/src/library/scala/annotation/bridge.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/compileTimeOnly.scala b/src/library/scala/annotation/compileTimeOnly.scala index 942e9cad8c7d..a2eb330621db 100644 --- a/src/library/scala/annotation/compileTimeOnly.scala +++ b/src/library/scala/annotation/compileTimeOnly.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation import scala.annotation.meta._ diff --git a/src/library/scala/annotation/elidable.scala b/src/library/scala/annotation/elidable.scala index dd0d9b511cb0..775e61d483bf 100644 --- a/src/library/scala/annotation/elidable.scala +++ b/src/library/scala/annotation/elidable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala index 44e8d2308591..198d3219bea1 100644 --- a/src/library/scala/annotation/implicitAmbiguous.scala +++ b/src/library/scala/annotation/implicitAmbiguous.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation /** diff --git a/src/library/scala/annotation/implicitNotFound.scala b/src/library/scala/annotation/implicitNotFound.scala index eeedcb014e43..acc2bea24cce 100644 --- a/src/library/scala/annotation/implicitNotFound.scala +++ b/src/library/scala/annotation/implicitNotFound.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/meta/beanGetter.scala b/src/library/scala/annotation/meta/beanGetter.scala index ce4207e1352c..3d45ade30e69 100644 --- a/src/library/scala/annotation/meta/beanGetter.scala +++ b/src/library/scala/annotation/meta/beanGetter.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/beanSetter.scala b/src/library/scala/annotation/meta/beanSetter.scala index ad3093240017..04483bd1759d 100644 --- a/src/library/scala/annotation/meta/beanSetter.scala +++ b/src/library/scala/annotation/meta/beanSetter.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/companionClass.scala b/src/library/scala/annotation/meta/companionClass.scala index a0be63ed99d1..abff9ccb5d3a 100644 --- a/src/library/scala/annotation/meta/companionClass.scala +++ b/src/library/scala/annotation/meta/companionClass.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/companionMethod.scala b/src/library/scala/annotation/meta/companionMethod.scala index 74d624002c37..44eecd2cf541 100644 --- a/src/library/scala/annotation/meta/companionMethod.scala +++ b/src/library/scala/annotation/meta/companionMethod.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/companionObject.scala b/src/library/scala/annotation/meta/companionObject.scala index 882299371c41..d447c87389c4 100644 --- a/src/library/scala/annotation/meta/companionObject.scala +++ b/src/library/scala/annotation/meta/companionObject.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/field.scala b/src/library/scala/annotation/meta/field.scala index 84e7fc89f6fd..267037e8d9f7 100644 --- a/src/library/scala/annotation/meta/field.scala +++ b/src/library/scala/annotation/meta/field.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/getter.scala b/src/library/scala/annotation/meta/getter.scala index 3190aef16384..36d8a76763b5 100644 --- a/src/library/scala/annotation/meta/getter.scala +++ b/src/library/scala/annotation/meta/getter.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/languageFeature.scala b/src/library/scala/annotation/meta/languageFeature.scala index 5b407121851d..6b68f76338cb 100644 --- a/src/library/scala/annotation/meta/languageFeature.scala +++ b/src/library/scala/annotation/meta/languageFeature.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/package.scala b/src/library/scala/annotation/meta/package.scala index 2d18ae5dd714..7d09a8785517 100644 --- a/src/library/scala/annotation/meta/package.scala +++ b/src/library/scala/annotation/meta/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation /** diff --git a/src/library/scala/annotation/meta/param.scala b/src/library/scala/annotation/meta/param.scala index 1b28e8d27f52..5d4ebf5c8221 100644 --- a/src/library/scala/annotation/meta/param.scala +++ b/src/library/scala/annotation/meta/param.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/meta/setter.scala b/src/library/scala/annotation/meta/setter.scala index 33be4f0ab8c4..fae59b5a48a7 100644 --- a/src/library/scala/annotation/meta/setter.scala +++ b/src/library/scala/annotation/meta/setter.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.meta /** diff --git a/src/library/scala/annotation/migration.scala b/src/library/scala/annotation/migration.scala index e71be00f32fd..03e61f36aebf 100644 --- a/src/library/scala/annotation/migration.scala +++ b/src/library/scala/annotation/migration.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/showAsInfix.scala b/src/library/scala/annotation/showAsInfix.scala index 6c25e08efa5d..b5bf349848e7 100644 --- a/src/library/scala/annotation/showAsInfix.scala +++ b/src/library/scala/annotation/showAsInfix.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation /** diff --git a/src/library/scala/annotation/strictfp.scala b/src/library/scala/annotation/strictfp.scala index 3b67ffacbb1c..fde18cbdb780 100644 --- a/src/library/scala/annotation/strictfp.scala +++ b/src/library/scala/annotation/strictfp.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/switch.scala b/src/library/scala/annotation/switch.scala index 00124cf88baf..94df3bfcc132 100644 --- a/src/library/scala/annotation/switch.scala +++ b/src/library/scala/annotation/switch.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation /** An annotation to be applied to a match expression. If present, diff --git a/src/library/scala/annotation/tailrec.scala b/src/library/scala/annotation/tailrec.scala index 03c2b6a166aa..70376fef7b1b 100644 --- a/src/library/scala/annotation/tailrec.scala +++ b/src/library/scala/annotation/tailrec.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/unchecked/uncheckedStable.scala b/src/library/scala/annotation/unchecked/uncheckedStable.scala index d1414df06a88..6d59942affab 100644 --- a/src/library/scala/annotation/unchecked/uncheckedStable.scala +++ b/src/library/scala/annotation/unchecked/uncheckedStable.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.unchecked /** An annotation for values that are assumed to be stable even though their diff --git a/src/library/scala/annotation/unchecked/uncheckedVariance.scala b/src/library/scala/annotation/unchecked/uncheckedVariance.scala index 0cd6aac40fa7..83ff3bb977ea 100644 --- a/src/library/scala/annotation/unchecked/uncheckedVariance.scala +++ b/src/library/scala/annotation/unchecked/uncheckedVariance.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.annotation.unchecked /** An annotation for type arguments for which one wants to suppress variance checking diff --git a/src/library/scala/annotation/unspecialized.scala b/src/library/scala/annotation/unspecialized.scala index 6e77e3a57ec9..83c5ccc88f20 100644 --- a/src/library/scala/annotation/unspecialized.scala +++ b/src/library/scala/annotation/unspecialized.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/annotation/varargs.scala b/src/library/scala/annotation/varargs.scala index 46fc790226a1..255f35cb6663 100644 --- a/src/library/scala/annotation/varargs.scala +++ b/src/library/scala/annotation/varargs.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.annotation diff --git a/src/library/scala/beans/BeanDescription.scala b/src/library/scala/beans/BeanDescription.scala index 2c58d20c7f9f..01fbfaed7d00 100644 --- a/src/library/scala/beans/BeanDescription.scala +++ b/src/library/scala/beans/BeanDescription.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/beans/BeanDisplayName.scala b/src/library/scala/beans/BeanDisplayName.scala index c9b1b61c9755..49c139414319 100644 --- a/src/library/scala/beans/BeanDisplayName.scala +++ b/src/library/scala/beans/BeanDisplayName.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/beans/BeanInfo.scala b/src/library/scala/beans/BeanInfo.scala index d7f0a1618be7..cf7ba97c9ec6 100644 --- a/src/library/scala/beans/BeanInfo.scala +++ b/src/library/scala/beans/BeanInfo.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/beans/BeanInfoSkip.scala b/src/library/scala/beans/BeanInfoSkip.scala index 251dedb613d1..d23a2960645c 100644 --- a/src/library/scala/beans/BeanInfoSkip.scala +++ b/src/library/scala/beans/BeanInfoSkip.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/beans/BeanProperty.scala b/src/library/scala/beans/BeanProperty.scala index fec469dc7034..b05326f4a0cf 100644 --- a/src/library/scala/beans/BeanProperty.scala +++ b/src/library/scala/beans/BeanProperty.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/beans/BooleanBeanProperty.scala b/src/library/scala/beans/BooleanBeanProperty.scala index 775e1ac362aa..da865a0fd4a8 100644 --- a/src/library/scala/beans/BooleanBeanProperty.scala +++ b/src/library/scala/beans/BooleanBeanProperty.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/beans/ScalaBeanInfo.scala b/src/library/scala/beans/ScalaBeanInfo.scala index 93c27eb634a3..e08761027b4b 100644 --- a/src/library/scala/beans/ScalaBeanInfo.scala +++ b/src/library/scala/beans/ScalaBeanInfo.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.beans diff --git a/src/library/scala/collection/BitSet.scala b/src/library/scala/collection/BitSet.scala index e255e961408e..8d880bcd63a4 100644 --- a/src/library/scala/collection/BitSet.scala +++ b/src/library/scala/collection/BitSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/BitSetLike.scala b/src/library/scala/collection/BitSetLike.scala index 3c451ccdc412..6d0fad27b67a 100644 --- a/src/library/scala/collection/BitSetLike.scala +++ b/src/library/scala/collection/BitSetLike.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/BufferedIterator.scala b/src/library/scala/collection/BufferedIterator.scala index 584df7f0edb1..2e058819caec 100644 --- a/src/library/scala/collection/BufferedIterator.scala +++ b/src/library/scala/collection/BufferedIterator.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/CustomParallelizable.scala b/src/library/scala/collection/CustomParallelizable.scala index cbeb28d643f8..54d57603215f 100644 --- a/src/library/scala/collection/CustomParallelizable.scala +++ b/src/library/scala/collection/CustomParallelizable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/DefaultMap.scala b/src/library/scala/collection/DefaultMap.scala index 8afda7cfcfb0..c1b3185c9fb6 100644 --- a/src/library/scala/collection/DefaultMap.scala +++ b/src/library/scala/collection/DefaultMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenIterable.scala b/src/library/scala/collection/GenIterable.scala index 6fd4158726d3..a416d7b53b84 100644 --- a/src/library/scala/collection/GenIterable.scala +++ b/src/library/scala/collection/GenIterable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenIterableLike.scala b/src/library/scala/collection/GenIterableLike.scala index 1dbb54ddc7c1..ab63ebee5a0d 100644 --- a/src/library/scala/collection/GenIterableLike.scala +++ b/src/library/scala/collection/GenIterableLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenMap.scala b/src/library/scala/collection/GenMap.scala index 6bc507ae9319..71772ade331d 100644 --- a/src/library/scala/collection/GenMap.scala +++ b/src/library/scala/collection/GenMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenMapLike.scala b/src/library/scala/collection/GenMapLike.scala index f6c2d071b510..eef8a9e73e62 100644 --- a/src/library/scala/collection/GenMapLike.scala +++ b/src/library/scala/collection/GenMapLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenSeq.scala b/src/library/scala/collection/GenSeq.scala index 480562cab5f6..8978982417e7 100644 --- a/src/library/scala/collection/GenSeq.scala +++ b/src/library/scala/collection/GenSeq.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenSeqLike.scala b/src/library/scala/collection/GenSeqLike.scala index 6828749f4b8c..ab63a153c21d 100644 --- a/src/library/scala/collection/GenSeqLike.scala +++ b/src/library/scala/collection/GenSeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenSet.scala b/src/library/scala/collection/GenSet.scala index 246786009560..a18ee461b90c 100644 --- a/src/library/scala/collection/GenSet.scala +++ b/src/library/scala/collection/GenSet.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenSetLike.scala b/src/library/scala/collection/GenSetLike.scala index c5355e58ecda..34f26810097a 100644 --- a/src/library/scala/collection/GenSetLike.scala +++ b/src/library/scala/collection/GenSetLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenTraversable.scala b/src/library/scala/collection/GenTraversable.scala index 8705965992e5..b26b491dc887 100644 --- a/src/library/scala/collection/GenTraversable.scala +++ b/src/library/scala/collection/GenTraversable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenTraversableLike.scala b/src/library/scala/collection/GenTraversableLike.scala index 86e62f6a8f9f..fefdb7f06d25 100644 --- a/src/library/scala/collection/GenTraversableLike.scala +++ b/src/library/scala/collection/GenTraversableLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index 6788b09a7cb7..fb232d3e30ea 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IndexedSeq.scala b/src/library/scala/collection/IndexedSeq.scala index 1a330261014f..277bf2cd0d5e 100644 --- a/src/library/scala/collection/IndexedSeq.scala +++ b/src/library/scala/collection/IndexedSeq.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IndexedSeqLike.scala b/src/library/scala/collection/IndexedSeqLike.scala index 5f6a127c7955..5760db89dc16 100644 --- a/src/library/scala/collection/IndexedSeqLike.scala +++ b/src/library/scala/collection/IndexedSeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index 0a9a65516d94..d89f826386b3 100644 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Iterable.scala b/src/library/scala/collection/Iterable.scala index afbffd36c694..9f9474e31d49 100644 --- a/src/library/scala/collection/Iterable.scala +++ b/src/library/scala/collection/Iterable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IterableLike.scala b/src/library/scala/collection/IterableLike.scala index 07957d99acda..ff35e0228ae3 100644 --- a/src/library/scala/collection/IterableLike.scala +++ b/src/library/scala/collection/IterableLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IterableProxy.scala b/src/library/scala/collection/IterableProxy.scala index 1977994b040f..4fab88fee13c 100644 --- a/src/library/scala/collection/IterableProxy.scala +++ b/src/library/scala/collection/IterableProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IterableProxyLike.scala b/src/library/scala/collection/IterableProxyLike.scala index 3e2d26605265..7847455af9c1 100644 --- a/src/library/scala/collection/IterableProxyLike.scala +++ b/src/library/scala/collection/IterableProxyLike.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IterableView.scala b/src/library/scala/collection/IterableView.scala index b5f424d2ab37..0bae07f3da00 100644 --- a/src/library/scala/collection/IterableView.scala +++ b/src/library/scala/collection/IterableView.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/IterableViewLike.scala b/src/library/scala/collection/IterableViewLike.scala index 306afecb6127..a60ab4cf4903 100644 --- a/src/library/scala/collection/IterableViewLike.scala +++ b/src/library/scala/collection/IterableViewLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index 3aa95568ec79..b80a19f73177 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala index 93994d80bf01..abfcafa5df13 100644 --- a/src/library/scala/collection/JavaConversions.scala +++ b/src/library/scala/collection/JavaConversions.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index 2337f0ef8424..073066726aef 100644 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/LinearSeq.scala b/src/library/scala/collection/LinearSeq.scala index 5a7bb5891e07..d5e43c41e7b4 100644 --- a/src/library/scala/collection/LinearSeq.scala +++ b/src/library/scala/collection/LinearSeq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/LinearSeqLike.scala b/src/library/scala/collection/LinearSeqLike.scala index a4dd4afaf0c9..0151cbca0c99 100644 --- a/src/library/scala/collection/LinearSeqLike.scala +++ b/src/library/scala/collection/LinearSeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/LinearSeqOptimized.scala b/src/library/scala/collection/LinearSeqOptimized.scala index e545953b2558..62064662c4dd 100644 --- a/src/library/scala/collection/LinearSeqOptimized.scala +++ b/src/library/scala/collection/LinearSeqOptimized.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Map.scala b/src/library/scala/collection/Map.scala index c9a943f1f724..50d6074b5a74 100644 --- a/src/library/scala/collection/Map.scala +++ b/src/library/scala/collection/Map.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/MapLike.scala b/src/library/scala/collection/MapLike.scala index 863b3fd97a2c..0711ab2a01e0 100644 --- a/src/library/scala/collection/MapLike.scala +++ b/src/library/scala/collection/MapLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/MapProxy.scala b/src/library/scala/collection/MapProxy.scala index 441bb5525b74..43f4fa4bdf7a 100644 --- a/src/library/scala/collection/MapProxy.scala +++ b/src/library/scala/collection/MapProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/MapProxyLike.scala b/src/library/scala/collection/MapProxyLike.scala index 0ff51132b32d..8e39c748dc82 100644 --- a/src/library/scala/collection/MapProxyLike.scala +++ b/src/library/scala/collection/MapProxyLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Parallel.scala b/src/library/scala/collection/Parallel.scala index 174e3ab75e57..cdfb5d995b97 100644 --- a/src/library/scala/collection/Parallel.scala +++ b/src/library/scala/collection/Parallel.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Parallelizable.scala b/src/library/scala/collection/Parallelizable.scala index c13155638854..0ad8182404df 100644 --- a/src/library/scala/collection/Parallelizable.scala +++ b/src/library/scala/collection/Parallelizable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Searching.scala b/src/library/scala/collection/Searching.scala index 25e8b5e253d5..8091f53f3778 100644 --- a/src/library/scala/collection/Searching.scala +++ b/src/library/scala/collection/Searching.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Seq.scala b/src/library/scala/collection/Seq.scala index 2f4b3e5f8a09..d4dcfc168ede 100644 --- a/src/library/scala/collection/Seq.scala +++ b/src/library/scala/collection/Seq.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SeqExtractors.scala b/src/library/scala/collection/SeqExtractors.scala index 888b3e20f629..f77a6f16dc29 100644 --- a/src/library/scala/collection/SeqExtractors.scala +++ b/src/library/scala/collection/SeqExtractors.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index dbbf9d42628d..615c73699611 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SeqProxy.scala b/src/library/scala/collection/SeqProxy.scala index d1f8432f1835..3ac78881d582 100644 --- a/src/library/scala/collection/SeqProxy.scala +++ b/src/library/scala/collection/SeqProxy.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SeqProxyLike.scala b/src/library/scala/collection/SeqProxyLike.scala index 2db0b27e08c5..aed6ed15fd85 100644 --- a/src/library/scala/collection/SeqProxyLike.scala +++ b/src/library/scala/collection/SeqProxyLike.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SeqView.scala b/src/library/scala/collection/SeqView.scala index 4afc5bffcd75..ccf9c8cf7bda 100644 --- a/src/library/scala/collection/SeqView.scala +++ b/src/library/scala/collection/SeqView.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SeqViewLike.scala b/src/library/scala/collection/SeqViewLike.scala index b6a12bc1ca20..8b3e5a955c99 100644 --- a/src/library/scala/collection/SeqViewLike.scala +++ b/src/library/scala/collection/SeqViewLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Set.scala b/src/library/scala/collection/Set.scala index f74c26571abc..38104b9a2008 100644 --- a/src/library/scala/collection/Set.scala +++ b/src/library/scala/collection/Set.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SetLike.scala b/src/library/scala/collection/SetLike.scala index dca877560e76..4ba1a1dcfdf9 100644 --- a/src/library/scala/collection/SetLike.scala +++ b/src/library/scala/collection/SetLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SetProxy.scala b/src/library/scala/collection/SetProxy.scala index 8b6e9d007fb8..8e69797d01c2 100644 --- a/src/library/scala/collection/SetProxy.scala +++ b/src/library/scala/collection/SetProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SetProxyLike.scala b/src/library/scala/collection/SetProxyLike.scala index e191d1fe67ff..c170afc5c179 100644 --- a/src/library/scala/collection/SetProxyLike.scala +++ b/src/library/scala/collection/SetProxyLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SortedMap.scala b/src/library/scala/collection/SortedMap.scala index b8f50f2725b6..12d22282bb96 100644 --- a/src/library/scala/collection/SortedMap.scala +++ b/src/library/scala/collection/SortedMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SortedMapLike.scala b/src/library/scala/collection/SortedMapLike.scala index 900d3b8608f1..1703985c890b 100644 --- a/src/library/scala/collection/SortedMapLike.scala +++ b/src/library/scala/collection/SortedMapLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SortedSet.scala b/src/library/scala/collection/SortedSet.scala index 2618dc5d1ebd..89813171c177 100644 --- a/src/library/scala/collection/SortedSet.scala +++ b/src/library/scala/collection/SortedSet.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/SortedSetLike.scala b/src/library/scala/collection/SortedSetLike.scala index 24e285b6475a..044d881931cb 100644 --- a/src/library/scala/collection/SortedSetLike.scala +++ b/src/library/scala/collection/SortedSetLike.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/Traversable.scala b/src/library/scala/collection/Traversable.scala index 8145eaa20419..4ece859e82c7 100644 --- a/src/library/scala/collection/Traversable.scala +++ b/src/library/scala/collection/Traversable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 0bb4c6c9c726..0ec682a3227a 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/TraversableOnce.scala b/src/library/scala/collection/TraversableOnce.scala index f65eb877866e..ca6066a7fc25 100644 --- a/src/library/scala/collection/TraversableOnce.scala +++ b/src/library/scala/collection/TraversableOnce.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/TraversableProxy.scala b/src/library/scala/collection/TraversableProxy.scala index 1d0fdfcb4495..867dd43d9c08 100644 --- a/src/library/scala/collection/TraversableProxy.scala +++ b/src/library/scala/collection/TraversableProxy.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/TraversableProxyLike.scala b/src/library/scala/collection/TraversableProxyLike.scala index 2a6e3c29bdfb..bcf6eeaddb16 100644 --- a/src/library/scala/collection/TraversableProxyLike.scala +++ b/src/library/scala/collection/TraversableProxyLike.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/TraversableView.scala b/src/library/scala/collection/TraversableView.scala index cffce6ff8e08..a2c881365505 100644 --- a/src/library/scala/collection/TraversableView.scala +++ b/src/library/scala/collection/TraversableView.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/TraversableViewLike.scala b/src/library/scala/collection/TraversableViewLike.scala index 25122d6186ab..9b146a0ecc1b 100644 --- a/src/library/scala/collection/TraversableViewLike.scala +++ b/src/library/scala/collection/TraversableViewLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/concurrent/BasicNode.java b/src/library/scala/collection/concurrent/BasicNode.java index 97b88700368e..c6ec91e4fde8 100644 --- a/src/library/scala/collection/concurrent/BasicNode.java +++ b/src/library/scala/collection/concurrent/BasicNode.java @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.collection.concurrent; diff --git a/src/library/scala/collection/concurrent/CNodeBase.java b/src/library/scala/collection/concurrent/CNodeBase.java index 2fce971b2b84..9d7aced75e2b 100644 --- a/src/library/scala/collection/concurrent/CNodeBase.java +++ b/src/library/scala/collection/concurrent/CNodeBase.java @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.collection.concurrent; diff --git a/src/library/scala/collection/concurrent/Gen.java b/src/library/scala/collection/concurrent/Gen.java index 601988468342..07af2983f32d 100644 --- a/src/library/scala/collection/concurrent/Gen.java +++ b/src/library/scala/collection/concurrent/Gen.java @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.collection.concurrent; diff --git a/src/library/scala/collection/concurrent/INodeBase.java b/src/library/scala/collection/concurrent/INodeBase.java index 2f2d20328791..30fa26973d8b 100644 --- a/src/library/scala/collection/concurrent/INodeBase.java +++ b/src/library/scala/collection/concurrent/INodeBase.java @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.collection.concurrent; diff --git a/src/library/scala/collection/concurrent/MainNode.java b/src/library/scala/collection/concurrent/MainNode.java index adb9b59a3de9..c830a19aefcc 100644 --- a/src/library/scala/collection/concurrent/MainNode.java +++ b/src/library/scala/collection/concurrent/MainNode.java @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.collection.concurrent; diff --git a/src/library/scala/collection/concurrent/Map.scala b/src/library/scala/collection/concurrent/Map.scala index f27dfd57fcc1..d475703d88ba 100644 --- a/src/library/scala/collection/concurrent/Map.scala +++ b/src/library/scala/collection/concurrent/Map.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.concurrent diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index c1ef1ff3bf36..0e4ad733789c 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/AsJavaConverters.scala b/src/library/scala/collection/convert/AsJavaConverters.scala index c7c1fb9c745c..632361f7a105 100644 --- a/src/library/scala/collection/convert/AsJavaConverters.scala +++ b/src/library/scala/collection/convert/AsJavaConverters.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/AsScalaConverters.scala b/src/library/scala/collection/convert/AsScalaConverters.scala index f9e38797e1f2..8733338ca784 100644 --- a/src/library/scala/collection/convert/AsScalaConverters.scala +++ b/src/library/scala/collection/convert/AsScalaConverters.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala index 83fffa59402b..c2b26670be7d 100644 --- a/src/library/scala/collection/convert/DecorateAsJava.scala +++ b/src/library/scala/collection/convert/DecorateAsJava.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala index f680aa526707..715c925d4344 100644 --- a/src/library/scala/collection/convert/DecorateAsScala.scala +++ b/src/library/scala/collection/convert/DecorateAsScala.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala index 3e45a0225430..03502ea598a3 100644 --- a/src/library/scala/collection/convert/Decorators.scala +++ b/src/library/scala/collection/convert/Decorators.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/ImplicitConversions.scala b/src/library/scala/collection/convert/ImplicitConversions.scala index 35e6ce1616a2..e4068fa4da62 100644 --- a/src/library/scala/collection/convert/ImplicitConversions.scala +++ b/src/library/scala/collection/convert/ImplicitConversions.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala index e3a064b79dca..7c51d8aa83e7 100644 --- a/src/library/scala/collection/convert/WrapAsJava.scala +++ b/src/library/scala/collection/convert/WrapAsJava.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala index fbaafde79875..c1756364816d 100644 --- a/src/library/scala/collection/convert/WrapAsScala.scala +++ b/src/library/scala/collection/convert/WrapAsScala.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala index e580d0f7c879..7e8970c9d60f 100644 --- a/src/library/scala/collection/convert/Wrappers.scala +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala index 810d112cd5a5..9a2c4c995663 100644 --- a/src/library/scala/collection/convert/package.scala +++ b/src/library/scala/collection/convert/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/BitOperations.scala b/src/library/scala/collection/generic/BitOperations.scala index 6686dbff2fdd..f796ddbbfa83 100644 --- a/src/library/scala/collection/generic/BitOperations.scala +++ b/src/library/scala/collection/generic/BitOperations.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/BitSetFactory.scala b/src/library/scala/collection/generic/BitSetFactory.scala index e44075f655a8..b41dc86b7bcf 100644 --- a/src/library/scala/collection/generic/BitSetFactory.scala +++ b/src/library/scala/collection/generic/BitSetFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/CanBuildFrom.scala b/src/library/scala/collection/generic/CanBuildFrom.scala index 24e5b2a1ddea..a1803134f51c 100644 --- a/src/library/scala/collection/generic/CanBuildFrom.scala +++ b/src/library/scala/collection/generic/CanBuildFrom.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/CanCombineFrom.scala b/src/library/scala/collection/generic/CanCombineFrom.scala index 7f70b4580aba..ead36ffe7709 100644 --- a/src/library/scala/collection/generic/CanCombineFrom.scala +++ b/src/library/scala/collection/generic/CanCombineFrom.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala index e3db40123dda..37f9ee8ee682 100644 --- a/src/library/scala/collection/generic/ClassTagTraversableFactory.scala +++ b/src/library/scala/collection/generic/ClassTagTraversableFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Clearable.scala b/src/library/scala/collection/generic/Clearable.scala index e3922f791f6e..cc655d83e0fd 100644 --- a/src/library/scala/collection/generic/Clearable.scala +++ b/src/library/scala/collection/generic/Clearable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/FilterMonadic.scala b/src/library/scala/collection/generic/FilterMonadic.scala index 8aefbdb92662..6ec66fb72190 100644 --- a/src/library/scala/collection/generic/FilterMonadic.scala +++ b/src/library/scala/collection/generic/FilterMonadic.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenMapFactory.scala b/src/library/scala/collection/generic/GenMapFactory.scala index 0d27e980aa16..0889436e056f 100644 --- a/src/library/scala/collection/generic/GenMapFactory.scala +++ b/src/library/scala/collection/generic/GenMapFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenSeqFactory.scala b/src/library/scala/collection/generic/GenSeqFactory.scala index 6afbb2e2fb4c..37506756e185 100644 --- a/src/library/scala/collection/generic/GenSeqFactory.scala +++ b/src/library/scala/collection/generic/GenSeqFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenSetFactory.scala b/src/library/scala/collection/generic/GenSetFactory.scala index d26cc20db2da..89a6efbb09e3 100644 --- a/src/library/scala/collection/generic/GenSetFactory.scala +++ b/src/library/scala/collection/generic/GenSetFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenTraversableFactory.scala b/src/library/scala/collection/generic/GenTraversableFactory.scala index 65528bdbb326..a3288ba27d40 100644 --- a/src/library/scala/collection/generic/GenTraversableFactory.scala +++ b/src/library/scala/collection/generic/GenTraversableFactory.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericClassTagCompanion.scala b/src/library/scala/collection/generic/GenericClassTagCompanion.scala index a8ac2bf7387d..2aba79a75a00 100644 --- a/src/library/scala/collection/generic/GenericClassTagCompanion.scala +++ b/src/library/scala/collection/generic/GenericClassTagCompanion.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala index 090cd729a416..3627fb247184 100644 --- a/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala +++ b/src/library/scala/collection/generic/GenericClassTagTraversableTemplate.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericCompanion.scala b/src/library/scala/collection/generic/GenericCompanion.scala index 67d0a9c7f75a..eb75fb6a3a53 100644 --- a/src/library/scala/collection/generic/GenericCompanion.scala +++ b/src/library/scala/collection/generic/GenericCompanion.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericOrderedCompanion.scala b/src/library/scala/collection/generic/GenericOrderedCompanion.scala index 5b328bff6ca1..312fffebb655 100644 --- a/src/library/scala/collection/generic/GenericOrderedCompanion.scala +++ b/src/library/scala/collection/generic/GenericOrderedCompanion.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala index c1a41ce7c4be..da2a9d7817b1 100644 --- a/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala +++ b/src/library/scala/collection/generic/GenericOrderedTraversableTemplate.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericParCompanion.scala b/src/library/scala/collection/generic/GenericParCompanion.scala index 432b9135f825..21c69465986b 100644 --- a/src/library/scala/collection/generic/GenericParCompanion.scala +++ b/src/library/scala/collection/generic/GenericParCompanion.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericParTemplate.scala b/src/library/scala/collection/generic/GenericParTemplate.scala index 44a778a95370..c53556108e94 100644 --- a/src/library/scala/collection/generic/GenericParTemplate.scala +++ b/src/library/scala/collection/generic/GenericParTemplate.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericSeqCompanion.scala b/src/library/scala/collection/generic/GenericSeqCompanion.scala index fd1e18a0290b..46050229cce8 100644 --- a/src/library/scala/collection/generic/GenericSeqCompanion.scala +++ b/src/library/scala/collection/generic/GenericSeqCompanion.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericSetTemplate.scala b/src/library/scala/collection/generic/GenericSetTemplate.scala index 2cadd14948d8..106a19673c15 100644 --- a/src/library/scala/collection/generic/GenericSetTemplate.scala +++ b/src/library/scala/collection/generic/GenericSetTemplate.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/GenericTraversableTemplate.scala b/src/library/scala/collection/generic/GenericTraversableTemplate.scala index bdd91ba7a41e..283fde39d393 100644 --- a/src/library/scala/collection/generic/GenericTraversableTemplate.scala +++ b/src/library/scala/collection/generic/GenericTraversableTemplate.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Growable.scala b/src/library/scala/collection/generic/Growable.scala index a3f27c806f62..affe3ace5ecd 100644 --- a/src/library/scala/collection/generic/Growable.scala +++ b/src/library/scala/collection/generic/Growable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/HasNewBuilder.scala b/src/library/scala/collection/generic/HasNewBuilder.scala index aa0ce6698db4..5d788f272d5c 100644 --- a/src/library/scala/collection/generic/HasNewBuilder.scala +++ b/src/library/scala/collection/generic/HasNewBuilder.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package generic diff --git a/src/library/scala/collection/generic/HasNewCombiner.scala b/src/library/scala/collection/generic/HasNewCombiner.scala index 99a0722c3d1b..e5a8c3de1ff4 100644 --- a/src/library/scala/collection/generic/HasNewCombiner.scala +++ b/src/library/scala/collection/generic/HasNewCombiner.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ImmutableMapFactory.scala b/src/library/scala/collection/generic/ImmutableMapFactory.scala index 87a1f0c6f1bb..8d414802bd1f 100644 --- a/src/library/scala/collection/generic/ImmutableMapFactory.scala +++ b/src/library/scala/collection/generic/ImmutableMapFactory.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ImmutableSetFactory.scala b/src/library/scala/collection/generic/ImmutableSetFactory.scala index a72caf263392..ce3e8e192f7a 100644 --- a/src/library/scala/collection/generic/ImmutableSetFactory.scala +++ b/src/library/scala/collection/generic/ImmutableSetFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala index 61ab647b7817..06fa481859b4 100644 --- a/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala +++ b/src/library/scala/collection/generic/ImmutableSortedMapFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala index fd41d17b7294..30fa8215af61 100644 --- a/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala +++ b/src/library/scala/collection/generic/ImmutableSortedSetFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/IndexedSeqFactory.scala b/src/library/scala/collection/generic/IndexedSeqFactory.scala index ddc0141aa9df..39409313f55c 100644 --- a/src/library/scala/collection/generic/IndexedSeqFactory.scala +++ b/src/library/scala/collection/generic/IndexedSeqFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/IsSeqLike.scala b/src/library/scala/collection/generic/IsSeqLike.scala index 4c857ad1bb10..917e15e29d3a 100644 --- a/src/library/scala/collection/generic/IsSeqLike.scala +++ b/src/library/scala/collection/generic/IsSeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/IsTraversableLike.scala b/src/library/scala/collection/generic/IsTraversableLike.scala index 22cef555cc0c..3a50bb3582ac 100644 --- a/src/library/scala/collection/generic/IsTraversableLike.scala +++ b/src/library/scala/collection/generic/IsTraversableLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/IsTraversableOnce.scala b/src/library/scala/collection/generic/IsTraversableOnce.scala index 3ee586ae631d..01c45ceb7798 100644 --- a/src/library/scala/collection/generic/IsTraversableOnce.scala +++ b/src/library/scala/collection/generic/IsTraversableOnce.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/IterableForwarder.scala b/src/library/scala/collection/generic/IterableForwarder.scala index f97215fbf9d1..7905ff054549 100644 --- a/src/library/scala/collection/generic/IterableForwarder.scala +++ b/src/library/scala/collection/generic/IterableForwarder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/MapFactory.scala b/src/library/scala/collection/generic/MapFactory.scala index 7c2d660de2f3..ded046302eb2 100644 --- a/src/library/scala/collection/generic/MapFactory.scala +++ b/src/library/scala/collection/generic/MapFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/MutableMapFactory.scala b/src/library/scala/collection/generic/MutableMapFactory.scala index 70d03035949a..e9648f261c0d 100644 --- a/src/library/scala/collection/generic/MutableMapFactory.scala +++ b/src/library/scala/collection/generic/MutableMapFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/MutableSetFactory.scala b/src/library/scala/collection/generic/MutableSetFactory.scala index 63944657fc29..001b1c387545 100644 --- a/src/library/scala/collection/generic/MutableSetFactory.scala +++ b/src/library/scala/collection/generic/MutableSetFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/MutableSortedMapFactory.scala b/src/library/scala/collection/generic/MutableSortedMapFactory.scala index b6fa933ca804..bd1454d7c136 100644 --- a/src/library/scala/collection/generic/MutableSortedMapFactory.scala +++ b/src/library/scala/collection/generic/MutableSortedMapFactory.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package generic diff --git a/src/library/scala/collection/generic/MutableSortedSetFactory.scala b/src/library/scala/collection/generic/MutableSortedSetFactory.scala index 9bb12c231757..ae7fa89fa649 100644 --- a/src/library/scala/collection/generic/MutableSortedSetFactory.scala +++ b/src/library/scala/collection/generic/MutableSortedSetFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/OrderedTraversableFactory.scala b/src/library/scala/collection/generic/OrderedTraversableFactory.scala index 7657aff2aaa1..7ffc3e0529cf 100644 --- a/src/library/scala/collection/generic/OrderedTraversableFactory.scala +++ b/src/library/scala/collection/generic/OrderedTraversableFactory.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ParFactory.scala b/src/library/scala/collection/generic/ParFactory.scala index 901e9fc239ce..702349388c92 100644 --- a/src/library/scala/collection/generic/ParFactory.scala +++ b/src/library/scala/collection/generic/ParFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ParMapFactory.scala b/src/library/scala/collection/generic/ParMapFactory.scala index 70797c83e2f8..d7b5368cd4bb 100644 --- a/src/library/scala/collection/generic/ParMapFactory.scala +++ b/src/library/scala/collection/generic/ParMapFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/ParSetFactory.scala b/src/library/scala/collection/generic/ParSetFactory.scala index 1341ddcb3846..b23a132bb4a8 100644 --- a/src/library/scala/collection/generic/ParSetFactory.scala +++ b/src/library/scala/collection/generic/ParSetFactory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/SeqFactory.scala b/src/library/scala/collection/generic/SeqFactory.scala index 35cce11a79aa..918d2308823b 100644 --- a/src/library/scala/collection/generic/SeqFactory.scala +++ b/src/library/scala/collection/generic/SeqFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/SeqForwarder.scala b/src/library/scala/collection/generic/SeqForwarder.scala index a7d4912bf701..d1511e2fb9f8 100644 --- a/src/library/scala/collection/generic/SeqForwarder.scala +++ b/src/library/scala/collection/generic/SeqForwarder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/SetFactory.scala b/src/library/scala/collection/generic/SetFactory.scala index 5e50844cc9f5..8b21cf1de3bd 100644 --- a/src/library/scala/collection/generic/SetFactory.scala +++ b/src/library/scala/collection/generic/SetFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Shrinkable.scala b/src/library/scala/collection/generic/Shrinkable.scala index 682d7d3ed66b..c9083a47c91b 100644 --- a/src/library/scala/collection/generic/Shrinkable.scala +++ b/src/library/scala/collection/generic/Shrinkable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Signalling.scala b/src/library/scala/collection/generic/Signalling.scala index 021d289c9da6..adda134d2a2c 100644 --- a/src/library/scala/collection/generic/Signalling.scala +++ b/src/library/scala/collection/generic/Signalling.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Sizing.scala b/src/library/scala/collection/generic/Sizing.scala index 73584ce82e6a..43be8cb83ba5 100644 --- a/src/library/scala/collection/generic/Sizing.scala +++ b/src/library/scala/collection/generic/Sizing.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/SliceInterval.scala b/src/library/scala/collection/generic/SliceInterval.scala index 82acdd13716f..2dd6409b540b 100644 --- a/src/library/scala/collection/generic/SliceInterval.scala +++ b/src/library/scala/collection/generic/SliceInterval.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Sorted.scala b/src/library/scala/collection/generic/Sorted.scala index b2e63daabaa9..fb428397a68e 100644 --- a/src/library/scala/collection/generic/Sorted.scala +++ b/src/library/scala/collection/generic/Sorted.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/SortedMapFactory.scala b/src/library/scala/collection/generic/SortedMapFactory.scala index afa11e9ab140..9ed4872e39bb 100644 --- a/src/library/scala/collection/generic/SortedMapFactory.scala +++ b/src/library/scala/collection/generic/SortedMapFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/SortedSetFactory.scala b/src/library/scala/collection/generic/SortedSetFactory.scala index c734830e0b2c..205cf9d1ff8d 100644 --- a/src/library/scala/collection/generic/SortedSetFactory.scala +++ b/src/library/scala/collection/generic/SortedSetFactory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/Subtractable.scala b/src/library/scala/collection/generic/Subtractable.scala index 9365de7949b5..d0e51b5b7c19 100644 --- a/src/library/scala/collection/generic/Subtractable.scala +++ b/src/library/scala/collection/generic/Subtractable.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/TraversableFactory.scala b/src/library/scala/collection/generic/TraversableFactory.scala index c56865e429f3..80d05d46692b 100644 --- a/src/library/scala/collection/generic/TraversableFactory.scala +++ b/src/library/scala/collection/generic/TraversableFactory.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/TraversableForwarder.scala b/src/library/scala/collection/generic/TraversableForwarder.scala index 2bf995750b31..311406a45100 100644 --- a/src/library/scala/collection/generic/TraversableForwarder.scala +++ b/src/library/scala/collection/generic/TraversableForwarder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/generic/package.scala b/src/library/scala/collection/generic/package.scala index 015c3455db9e..0625db6fed04 100644 --- a/src/library/scala/collection/generic/package.scala +++ b/src/library/scala/collection/generic/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection diff --git a/src/library/scala/collection/immutable/BitSet.scala b/src/library/scala/collection/immutable/BitSet.scala index 244b1fc15a61..5f491e66c721 100644 --- a/src/library/scala/collection/immutable/BitSet.scala +++ b/src/library/scala/collection/immutable/BitSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala index e9b277b9c412..65d096e03ff0 100644 --- a/src/library/scala/collection/immutable/DefaultMap.scala +++ b/src/library/scala/collection/immutable/DefaultMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 79c4ac2d14b8..37a4c48e8ef9 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index c6ee0e152ebe..bf981fdc483a 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala index 06a44b2bf3fd..c588f1abd30f 100644 --- a/src/library/scala/collection/immutable/IndexedSeq.scala +++ b/src/library/scala/collection/immutable/IndexedSeq.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index b2039f1be73c..db4032d0c4ad 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Iterable.scala b/src/library/scala/collection/immutable/Iterable.scala index df322396d0c6..ac0dc50dfc4e 100644 --- a/src/library/scala/collection/immutable/Iterable.scala +++ b/src/library/scala/collection/immutable/Iterable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala index 2109bd5211ca..954aec7eaf3b 100644 --- a/src/library/scala/collection/immutable/LinearSeq.scala +++ b/src/library/scala/collection/immutable/LinearSeq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 0f13e34358e2..7288ad239159 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 2e6325c027c4..3c15aa769e3f 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/ListSet.scala b/src/library/scala/collection/immutable/ListSet.scala index b63f575a0fbd..759e37ac11f7 100644 --- a/src/library/scala/collection/immutable/ListSet.scala +++ b/src/library/scala/collection/immutable/ListSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index e67f9e69b555..873851fed4a4 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index 01bc62b1eef0..040836350f92 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index 56c412ed3d5f..29945fe95c77 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala index 9538dfbea521..75e5859be70c 100644 --- a/src/library/scala/collection/immutable/MapProxy.scala +++ b/src/library/scala/collection/immutable/MapProxy.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 36491c9404c1..d29c853f06b5 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index 01854b17978a..097337cc3534 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 67d5c8ef7501..20f0ed72cc2f 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index eb8a484a81b3..56b796656912 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.immutable diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index cbc8a28ef790..5c6c01cf95c2 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Seq.scala b/src/library/scala/collection/immutable/Seq.scala index 38855ca6b060..4f68edec7529 100644 --- a/src/library/scala/collection/immutable/Seq.scala +++ b/src/library/scala/collection/immutable/Seq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index 0f16f97cb0b4..ce89591eb018 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/SetProxy.scala b/src/library/scala/collection/immutable/SetProxy.scala index b421b48597e0..c86bfe4df025 100644 --- a/src/library/scala/collection/immutable/SetProxy.scala +++ b/src/library/scala/collection/immutable/SetProxy.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 2a954cd63fe9..0071ed3d33a3 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/SortedSet.scala b/src/library/scala/collection/immutable/SortedSet.scala index 0607e5a557fb..8cc1c0aaeac4 100644 --- a/src/library/scala/collection/immutable/SortedSet.scala +++ b/src/library/scala/collection/immutable/SortedSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala index 51a59174697a..956a4b97c95e 100644 --- a/src/library/scala/collection/immutable/Stack.scala +++ b/src/library/scala/collection/immutable/Stack.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 4900cd9c20d7..301e73a0dece 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/StreamView.scala b/src/library/scala/collection/immutable/StreamView.scala index 127ed76eb59d..843d7084f798 100644 --- a/src/library/scala/collection/immutable/StreamView.scala +++ b/src/library/scala/collection/immutable/StreamView.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package immutable diff --git a/src/library/scala/collection/immutable/StreamViewLike.scala b/src/library/scala/collection/immutable/StreamViewLike.scala index 4d7eaeff2a21..c36035934e15 100644 --- a/src/library/scala/collection/immutable/StreamViewLike.scala +++ b/src/library/scala/collection/immutable/StreamViewLike.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package immutable diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index ff31ab449b5e..116295826eb8 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/StringOps.scala b/src/library/scala/collection/immutable/StringOps.scala index 77333badf97b..0a7feff309ee 100644 --- a/src/library/scala/collection/immutable/StringOps.scala +++ b/src/library/scala/collection/immutable/StringOps.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Traversable.scala b/src/library/scala/collection/immutable/Traversable.scala index 114e5c063255..56a54a41e375 100644 --- a/src/library/scala/collection/immutable/Traversable.scala +++ b/src/library/scala/collection/immutable/Traversable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index be7d705f5db7..a902d4745d34 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index a70599621d19..38cee881482b 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/TrieIterator.scala b/src/library/scala/collection/immutable/TrieIterator.scala index d7335e80f18d..d1998ca4d1a1 100644 --- a/src/library/scala/collection/immutable/TrieIterator.scala +++ b/src/library/scala/collection/immutable/TrieIterator.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 1093084b9d49..e68ab7980cf0 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/immutable/WrappedString.scala b/src/library/scala/collection/immutable/WrappedString.scala index 8726bd2ed903..effb169c5728 100644 --- a/src/library/scala/collection/immutable/WrappedString.scala +++ b/src/library/scala/collection/immutable/WrappedString.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/AnyRefMap.scala b/src/library/scala/collection/mutable/AnyRefMap.scala index 3550afeda4a6..aaccb053dc05 100644 --- a/src/library/scala/collection/mutable/AnyRefMap.scala +++ b/src/library/scala/collection/mutable/AnyRefMap.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package mutable diff --git a/src/library/scala/collection/mutable/ArrayBuffer.scala b/src/library/scala/collection/mutable/ArrayBuffer.scala index 382da333c216..92f157bfd74f 100644 --- a/src/library/scala/collection/mutable/ArrayBuffer.scala +++ b/src/library/scala/collection/mutable/ArrayBuffer.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ArrayBuilder.scala b/src/library/scala/collection/mutable/ArrayBuilder.scala index 10c1c94f7054..9e9f05e66e20 100644 --- a/src/library/scala/collection/mutable/ArrayBuilder.scala +++ b/src/library/scala/collection/mutable/ArrayBuilder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ArrayLike.scala b/src/library/scala/collection/mutable/ArrayLike.scala index d923065c4b4d..e5ec996eab0d 100644 --- a/src/library/scala/collection/mutable/ArrayLike.scala +++ b/src/library/scala/collection/mutable/ArrayLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ArrayOps.scala b/src/library/scala/collection/mutable/ArrayOps.scala index 60a08e358200..0d67933db94f 100644 --- a/src/library/scala/collection/mutable/ArrayOps.scala +++ b/src/library/scala/collection/mutable/ArrayOps.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ArraySeq.scala b/src/library/scala/collection/mutable/ArraySeq.scala index 99afcd8c8164..562401a83d87 100644 --- a/src/library/scala/collection/mutable/ArraySeq.scala +++ b/src/library/scala/collection/mutable/ArraySeq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ArrayStack.scala b/src/library/scala/collection/mutable/ArrayStack.scala index 9b52d9898c0a..5679d49e0ae1 100644 --- a/src/library/scala/collection/mutable/ArrayStack.scala +++ b/src/library/scala/collection/mutable/ArrayStack.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/BitSet.scala b/src/library/scala/collection/mutable/BitSet.scala index 93d5ad76e34c..42a00ee3186a 100644 --- a/src/library/scala/collection/mutable/BitSet.scala +++ b/src/library/scala/collection/mutable/BitSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Buffer.scala b/src/library/scala/collection/mutable/Buffer.scala index d2d1b1b907ae..e9a3bfed0817 100644 --- a/src/library/scala/collection/mutable/Buffer.scala +++ b/src/library/scala/collection/mutable/Buffer.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/BufferLike.scala b/src/library/scala/collection/mutable/BufferLike.scala index d96182d12413..09214575b2bc 100644 --- a/src/library/scala/collection/mutable/BufferLike.scala +++ b/src/library/scala/collection/mutable/BufferLike.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/BufferProxy.scala b/src/library/scala/collection/mutable/BufferProxy.scala index 6af0256e2d95..b77e1d294003 100644 --- a/src/library/scala/collection/mutable/BufferProxy.scala +++ b/src/library/scala/collection/mutable/BufferProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Builder.scala b/src/library/scala/collection/mutable/Builder.scala index 528f78bd98fe..4008d6464a37 100644 --- a/src/library/scala/collection/mutable/Builder.scala +++ b/src/library/scala/collection/mutable/Builder.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Cloneable.scala b/src/library/scala/collection/mutable/Cloneable.scala index 8b2f3f70de21..2ac9081c45c2 100644 --- a/src/library/scala/collection/mutable/Cloneable.scala +++ b/src/library/scala/collection/mutable/Cloneable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/DefaultEntry.scala b/src/library/scala/collection/mutable/DefaultEntry.scala index 66db45866c5f..6417b54ba41a 100644 --- a/src/library/scala/collection/mutable/DefaultEntry.scala +++ b/src/library/scala/collection/mutable/DefaultEntry.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/DefaultMapModel.scala b/src/library/scala/collection/mutable/DefaultMapModel.scala index ef6904ea0955..e469455125cd 100644 --- a/src/library/scala/collection/mutable/DefaultMapModel.scala +++ b/src/library/scala/collection/mutable/DefaultMapModel.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/DoubleLinkedList.scala b/src/library/scala/collection/mutable/DoubleLinkedList.scala index 5af84983d7e3..3cdb2f5c9b65 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedList.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedList.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala index 212569804003..6aa80f174f30 100644 --- a/src/library/scala/collection/mutable/DoubleLinkedListLike.scala +++ b/src/library/scala/collection/mutable/DoubleLinkedListLike.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala index a6d5dbd04212..982057aa9b70 100644 --- a/src/library/scala/collection/mutable/FlatHashTable.scala +++ b/src/library/scala/collection/mutable/FlatHashTable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/GrowingBuilder.scala b/src/library/scala/collection/mutable/GrowingBuilder.scala index 3354a1978f92..fcf7f03c5b45 100644 --- a/src/library/scala/collection/mutable/GrowingBuilder.scala +++ b/src/library/scala/collection/mutable/GrowingBuilder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/HashEntry.scala b/src/library/scala/collection/mutable/HashEntry.scala index 4c0f6a93e8e1..65e7958ab7d5 100644 --- a/src/library/scala/collection/mutable/HashEntry.scala +++ b/src/library/scala/collection/mutable/HashEntry.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package mutable diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index 396c8b6643f5..372539f57530 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index 41ceeceeca3e..75282195043e 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index bb95f476f50a..dc499561e0af 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/History.scala b/src/library/scala/collection/mutable/History.scala index 776806a0dcab..76bc07898525 100644 --- a/src/library/scala/collection/mutable/History.scala +++ b/src/library/scala/collection/mutable/History.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala index 355d5092738f..c9ee72a9e02c 100644 --- a/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableMapAdaptor.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala index 93131d12c985..c3dce8a68480 100644 --- a/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala +++ b/src/library/scala/collection/mutable/ImmutableSetAdaptor.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/IndexedSeq.scala b/src/library/scala/collection/mutable/IndexedSeq.scala index 3d9630eea70a..a3fbd1bc77d9 100644 --- a/src/library/scala/collection/mutable/IndexedSeq.scala +++ b/src/library/scala/collection/mutable/IndexedSeq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/IndexedSeqLike.scala b/src/library/scala/collection/mutable/IndexedSeqLike.scala index f902e10a5c8a..4419a391e4a4 100644 --- a/src/library/scala/collection/mutable/IndexedSeqLike.scala +++ b/src/library/scala/collection/mutable/IndexedSeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala index 09f0712862d5..7924bd15d32d 100644 --- a/src/library/scala/collection/mutable/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/mutable/IndexedSeqOptimized.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/IndexedSeqView.scala b/src/library/scala/collection/mutable/IndexedSeqView.scala index 91079b937807..2f094680c62b 100644 --- a/src/library/scala/collection/mutable/IndexedSeqView.scala +++ b/src/library/scala/collection/mutable/IndexedSeqView.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Iterable.scala b/src/library/scala/collection/mutable/Iterable.scala index 92313c9ccd84..ba55be2ace9c 100644 --- a/src/library/scala/collection/mutable/Iterable.scala +++ b/src/library/scala/collection/mutable/Iterable.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package mutable diff --git a/src/library/scala/collection/mutable/LazyBuilder.scala b/src/library/scala/collection/mutable/LazyBuilder.scala index f0a5e6971a88..409696f139ae 100644 --- a/src/library/scala/collection/mutable/LazyBuilder.scala +++ b/src/library/scala/collection/mutable/LazyBuilder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LinearSeq.scala b/src/library/scala/collection/mutable/LinearSeq.scala index 77e56b971647..1d48258ecc07 100644 --- a/src/library/scala/collection/mutable/LinearSeq.scala +++ b/src/library/scala/collection/mutable/LinearSeq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LinkedEntry.scala b/src/library/scala/collection/mutable/LinkedEntry.scala index 296e7fde181e..6828b51e366e 100644 --- a/src/library/scala/collection/mutable/LinkedEntry.scala +++ b/src/library/scala/collection/mutable/LinkedEntry.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index a731b1bbdc07..4e216060e845 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index fb91e1629a1f..b801204ae763 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LinkedList.scala b/src/library/scala/collection/mutable/LinkedList.scala index 9b815d0bbc93..7d051fc33946 100644 --- a/src/library/scala/collection/mutable/LinkedList.scala +++ b/src/library/scala/collection/mutable/LinkedList.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LinkedListLike.scala b/src/library/scala/collection/mutable/LinkedListLike.scala index 2caef41dcbb1..3653729237b4 100644 --- a/src/library/scala/collection/mutable/LinkedListLike.scala +++ b/src/library/scala/collection/mutable/LinkedListLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index 145431db25ed..050020618c95 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ListMap.scala b/src/library/scala/collection/mutable/ListMap.scala index e963af4a8aa2..9857fae2c7f7 100644 --- a/src/library/scala/collection/mutable/ListMap.scala +++ b/src/library/scala/collection/mutable/ListMap.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/LongMap.scala b/src/library/scala/collection/mutable/LongMap.scala index ecbb1952af7b..6a4cb61ff246 100644 --- a/src/library/scala/collection/mutable/LongMap.scala +++ b/src/library/scala/collection/mutable/LongMap.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package mutable diff --git a/src/library/scala/collection/mutable/Map.scala b/src/library/scala/collection/mutable/Map.scala index 460a8b8f77f8..17377559e46e 100644 --- a/src/library/scala/collection/mutable/Map.scala +++ b/src/library/scala/collection/mutable/Map.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/MapBuilder.scala b/src/library/scala/collection/mutable/MapBuilder.scala index cfc3079f41cd..fb289f768fef 100644 --- a/src/library/scala/collection/mutable/MapBuilder.scala +++ b/src/library/scala/collection/mutable/MapBuilder.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/MapLike.scala b/src/library/scala/collection/mutable/MapLike.scala index b00a5c115ec6..b24a98eb8b24 100644 --- a/src/library/scala/collection/mutable/MapLike.scala +++ b/src/library/scala/collection/mutable/MapLike.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/MapProxy.scala b/src/library/scala/collection/mutable/MapProxy.scala index a43cca6e0ec4..d88e07fa672d 100644 --- a/src/library/scala/collection/mutable/MapProxy.scala +++ b/src/library/scala/collection/mutable/MapProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/MultiMap.scala b/src/library/scala/collection/mutable/MultiMap.scala index b2789041bcc5..c4408dad29f9 100644 --- a/src/library/scala/collection/mutable/MultiMap.scala +++ b/src/library/scala/collection/mutable/MultiMap.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/MutableList.scala b/src/library/scala/collection/mutable/MutableList.scala index 6ed9c730967b..8749c808d060 100644 --- a/src/library/scala/collection/mutable/MutableList.scala +++ b/src/library/scala/collection/mutable/MutableList.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ObservableBuffer.scala b/src/library/scala/collection/mutable/ObservableBuffer.scala index 5bc03c2eff4f..8d9e27006407 100644 --- a/src/library/scala/collection/mutable/ObservableBuffer.scala +++ b/src/library/scala/collection/mutable/ObservableBuffer.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ObservableMap.scala b/src/library/scala/collection/mutable/ObservableMap.scala index 38f7ed2d76a4..ef490f0a835e 100644 --- a/src/library/scala/collection/mutable/ObservableMap.scala +++ b/src/library/scala/collection/mutable/ObservableMap.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ObservableSet.scala b/src/library/scala/collection/mutable/ObservableSet.scala index ea23426f327e..6852b1ee3e2c 100644 --- a/src/library/scala/collection/mutable/ObservableSet.scala +++ b/src/library/scala/collection/mutable/ObservableSet.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/OpenHashMap.scala b/src/library/scala/collection/mutable/OpenHashMap.scala index 16e5866c4f01..4d81587499dd 100644 --- a/src/library/scala/collection/mutable/OpenHashMap.scala +++ b/src/library/scala/collection/mutable/OpenHashMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/PriorityQueue.scala b/src/library/scala/collection/mutable/PriorityQueue.scala index 5fe34b753394..c7b6f244c44a 100644 --- a/src/library/scala/collection/mutable/PriorityQueue.scala +++ b/src/library/scala/collection/mutable/PriorityQueue.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Publisher.scala b/src/library/scala/collection/mutable/Publisher.scala index 883effb8b1e3..93a4d7b3b943 100644 --- a/src/library/scala/collection/mutable/Publisher.scala +++ b/src/library/scala/collection/mutable/Publisher.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Queue.scala b/src/library/scala/collection/mutable/Queue.scala index 9a3b4215d571..df72aefc8372 100644 --- a/src/library/scala/collection/mutable/Queue.scala +++ b/src/library/scala/collection/mutable/Queue.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/QueueProxy.scala b/src/library/scala/collection/mutable/QueueProxy.scala index d19942e0d1d9..87765b0b0145 100644 --- a/src/library/scala/collection/mutable/QueueProxy.scala +++ b/src/library/scala/collection/mutable/QueueProxy.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/RedBlackTree.scala b/src/library/scala/collection/mutable/RedBlackTree.scala index 953c0435177b..4cf953c08a53 100644 --- a/src/library/scala/collection/mutable/RedBlackTree.scala +++ b/src/library/scala/collection/mutable/RedBlackTree.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.collection.mutable import scala.annotation.tailrec diff --git a/src/library/scala/collection/mutable/ResizableArray.scala b/src/library/scala/collection/mutable/ResizableArray.scala index eb4c2042ed1f..9d181531a0b9 100644 --- a/src/library/scala/collection/mutable/ResizableArray.scala +++ b/src/library/scala/collection/mutable/ResizableArray.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/ReusableBuilder.scala b/src/library/scala/collection/mutable/ReusableBuilder.scala index dee2cd6393db..993e87c209cd 100644 --- a/src/library/scala/collection/mutable/ReusableBuilder.scala +++ b/src/library/scala/collection/mutable/ReusableBuilder.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/RevertibleHistory.scala b/src/library/scala/collection/mutable/RevertibleHistory.scala index a8713ace33fa..41106e1d4d77 100644 --- a/src/library/scala/collection/mutable/RevertibleHistory.scala +++ b/src/library/scala/collection/mutable/RevertibleHistory.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Seq.scala b/src/library/scala/collection/mutable/Seq.scala index eafde70a2dd2..f50dfb62bd27 100644 --- a/src/library/scala/collection/mutable/Seq.scala +++ b/src/library/scala/collection/mutable/Seq.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SeqLike.scala b/src/library/scala/collection/mutable/SeqLike.scala index 6987066f2bf2..9a161a15e042 100644 --- a/src/library/scala/collection/mutable/SeqLike.scala +++ b/src/library/scala/collection/mutable/SeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Set.scala b/src/library/scala/collection/mutable/Set.scala index 97574718e8c3..2a4440827ca0 100644 --- a/src/library/scala/collection/mutable/Set.scala +++ b/src/library/scala/collection/mutable/Set.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SetBuilder.scala b/src/library/scala/collection/mutable/SetBuilder.scala index 5d1e9ffc3adf..51ac55871bda 100644 --- a/src/library/scala/collection/mutable/SetBuilder.scala +++ b/src/library/scala/collection/mutable/SetBuilder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SetLike.scala b/src/library/scala/collection/mutable/SetLike.scala index 1fde3c3feced..7a0b2c16ac6f 100644 --- a/src/library/scala/collection/mutable/SetLike.scala +++ b/src/library/scala/collection/mutable/SetLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SetProxy.scala b/src/library/scala/collection/mutable/SetProxy.scala index ffed1b775e41..d7cecb1976cb 100644 --- a/src/library/scala/collection/mutable/SetProxy.scala +++ b/src/library/scala/collection/mutable/SetProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SortedMap.scala b/src/library/scala/collection/mutable/SortedMap.scala index c7f21a67f86f..b214b0efd4ba 100644 --- a/src/library/scala/collection/mutable/SortedMap.scala +++ b/src/library/scala/collection/mutable/SortedMap.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package mutable diff --git a/src/library/scala/collection/mutable/SortedSet.scala b/src/library/scala/collection/mutable/SortedSet.scala index 304469916db5..75486e11f7fb 100644 --- a/src/library/scala/collection/mutable/SortedSet.scala +++ b/src/library/scala/collection/mutable/SortedSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Stack.scala b/src/library/scala/collection/mutable/Stack.scala index ad117762155e..874b6960a483 100644 --- a/src/library/scala/collection/mutable/Stack.scala +++ b/src/library/scala/collection/mutable/Stack.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/StackProxy.scala b/src/library/scala/collection/mutable/StackProxy.scala index b8bfa3d3ecb9..81f578eb575b 100644 --- a/src/library/scala/collection/mutable/StackProxy.scala +++ b/src/library/scala/collection/mutable/StackProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/StringBuilder.scala b/src/library/scala/collection/mutable/StringBuilder.scala index 6bfda879555a..fb66d97168bd 100644 --- a/src/library/scala/collection/mutable/StringBuilder.scala +++ b/src/library/scala/collection/mutable/StringBuilder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Subscriber.scala b/src/library/scala/collection/mutable/Subscriber.scala index 929f44ab3f36..4f205b7fff25 100644 --- a/src/library/scala/collection/mutable/Subscriber.scala +++ b/src/library/scala/collection/mutable/Subscriber.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SynchronizedBuffer.scala b/src/library/scala/collection/mutable/SynchronizedBuffer.scala index 7d1984052907..165ac9e72ec9 100644 --- a/src/library/scala/collection/mutable/SynchronizedBuffer.scala +++ b/src/library/scala/collection/mutable/SynchronizedBuffer.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SynchronizedMap.scala b/src/library/scala/collection/mutable/SynchronizedMap.scala index 0c5f0d969fcb..7b5ebfc9652b 100644 --- a/src/library/scala/collection/mutable/SynchronizedMap.scala +++ b/src/library/scala/collection/mutable/SynchronizedMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SynchronizedQueue.scala b/src/library/scala/collection/mutable/SynchronizedQueue.scala index f626aa99176b..af16dfa66175 100644 --- a/src/library/scala/collection/mutable/SynchronizedQueue.scala +++ b/src/library/scala/collection/mutable/SynchronizedQueue.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SynchronizedSet.scala b/src/library/scala/collection/mutable/SynchronizedSet.scala index 399d2112bff1..b73ea7501817 100644 --- a/src/library/scala/collection/mutable/SynchronizedSet.scala +++ b/src/library/scala/collection/mutable/SynchronizedSet.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/SynchronizedStack.scala b/src/library/scala/collection/mutable/SynchronizedStack.scala index 1eec10fb124d..555bab709079 100644 --- a/src/library/scala/collection/mutable/SynchronizedStack.scala +++ b/src/library/scala/collection/mutable/SynchronizedStack.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Traversable.scala b/src/library/scala/collection/mutable/Traversable.scala index d7ea376d285d..511a60049dfa 100644 --- a/src/library/scala/collection/mutable/Traversable.scala +++ b/src/library/scala/collection/mutable/Traversable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/TreeMap.scala b/src/library/scala/collection/mutable/TreeMap.scala index ce0db0c40800..5a13be23e3e9 100644 --- a/src/library/scala/collection/mutable/TreeMap.scala +++ b/src/library/scala/collection/mutable/TreeMap.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package collection package mutable diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 843bdae45bb9..cbc6fe5208e4 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/Undoable.scala b/src/library/scala/collection/mutable/Undoable.scala index cadc87c08516..aa0c06dabfd2 100644 --- a/src/library/scala/collection/mutable/Undoable.scala +++ b/src/library/scala/collection/mutable/Undoable.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/UnrolledBuffer.scala b/src/library/scala/collection/mutable/UnrolledBuffer.scala index b49d009a177e..e8f2bd98d65e 100644 --- a/src/library/scala/collection/mutable/UnrolledBuffer.scala +++ b/src/library/scala/collection/mutable/UnrolledBuffer.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.mutable diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala index 4d1b3397c408..cb7bfa38368e 100644 --- a/src/library/scala/collection/mutable/WeakHashMap.scala +++ b/src/library/scala/collection/mutable/WeakHashMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index 0bfc1ab5ae1b..26f955f0a855 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala index 5bc581145096..2566c7852d4d 100644 --- a/src/library/scala/collection/mutable/WrappedArrayBuilder.scala +++ b/src/library/scala/collection/mutable/WrappedArrayBuilder.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/package.scala b/src/library/scala/collection/package.scala index 6df254c0e0df..ae1b600be5a5 100644 --- a/src/library/scala/collection/package.scala +++ b/src/library/scala/collection/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index abccf5d402c4..49c188b2e632 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParIterable.scala b/src/library/scala/collection/parallel/ParIterable.scala index a5ba8c49adec..fd888c5e573f 100644 --- a/src/library/scala/collection/parallel/ParIterable.scala +++ b/src/library/scala/collection/parallel/ParIterable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 2e60089df5d6..a246b35caf7f 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParMap.scala b/src/library/scala/collection/parallel/ParMap.scala index 70afe5174bef..f0ef2022fd57 100644 --- a/src/library/scala/collection/parallel/ParMap.scala +++ b/src/library/scala/collection/parallel/ParMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index a3ac38858754..5d176dda4d5f 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParSeq.scala b/src/library/scala/collection/parallel/ParSeq.scala index 2c883ba8fe1c..6c0939f9fe40 100644 --- a/src/library/scala/collection/parallel/ParSeq.scala +++ b/src/library/scala/collection/parallel/ParSeq.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index 60fa1858e70f..b6d104d402ab 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/ParSet.scala b/src/library/scala/collection/parallel/ParSet.scala index ba3d23f0e47e..a49561cf1fd2 100644 --- a/src/library/scala/collection/parallel/ParSet.scala +++ b/src/library/scala/collection/parallel/ParSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/ParSetLike.scala b/src/library/scala/collection/parallel/ParSetLike.scala index 4feda5ff07fe..24568bdefb7e 100644 --- a/src/library/scala/collection/parallel/ParSetLike.scala +++ b/src/library/scala/collection/parallel/ParSetLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/PreciseSplitter.scala b/src/library/scala/collection/parallel/PreciseSplitter.scala index 4b22934a29ba..b87389f239ab 100644 --- a/src/library/scala/collection/parallel/PreciseSplitter.scala +++ b/src/library/scala/collection/parallel/PreciseSplitter.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index 63d63d9ef3d0..d02a9a5974b9 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/Splitter.scala b/src/library/scala/collection/parallel/Splitter.scala index 8329f15d88a6..28e3e524a3aa 100644 --- a/src/library/scala/collection/parallel/Splitter.scala +++ b/src/library/scala/collection/parallel/Splitter.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/TaskSupport.scala b/src/library/scala/collection/parallel/TaskSupport.scala index 4d633253cebe..90907f176d1b 100644 --- a/src/library/scala/collection/parallel/TaskSupport.scala +++ b/src/library/scala/collection/parallel/TaskSupport.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 03cb19a052a9..61482f7a8d3d 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index f50718343c70..fdd096af8129 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.immutable diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index 44f2b30a399f..013b09527101 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.immutable diff --git a/src/library/scala/collection/parallel/immutable/ParIterable.scala b/src/library/scala/collection/parallel/immutable/ParIterable.scala index 417622facced..fa1e21523069 100644 --- a/src/library/scala/collection/parallel/immutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/immutable/ParIterable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/immutable/ParMap.scala b/src/library/scala/collection/parallel/immutable/ParMap.scala index 65bb2e12c545..2537da71bcd4 100644 --- a/src/library/scala/collection/parallel/immutable/ParMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index 56e587ae00db..3bd0b4961566 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.immutable diff --git a/src/library/scala/collection/parallel/immutable/ParSeq.scala b/src/library/scala/collection/parallel/immutable/ParSeq.scala index f0502fbbcbdc..31f33950e74d 100644 --- a/src/library/scala/collection/parallel/immutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/immutable/ParSeq.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/immutable/ParSet.scala b/src/library/scala/collection/parallel/immutable/ParSet.scala index 7837d6f264eb..f509dde5192c 100644 --- a/src/library/scala/collection/parallel/immutable/ParSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala index 44f0371fe7ca..c81c88a624f6 100644 --- a/src/library/scala/collection/parallel/immutable/ParVector.scala +++ b/src/library/scala/collection/parallel/immutable/ParVector.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala index 3cafdba5f702..3fdd77068e92 100644 --- a/src/library/scala/collection/parallel/immutable/package.scala +++ b/src/library/scala/collection/parallel/immutable/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala index cc25b5b4b253..c0052d54d703 100644 --- a/src/library/scala/collection/parallel/mutable/LazyCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/LazyCombiner.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index 6b55da698ef3..68d2f267e3f8 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala index 62165ae0d2d4..b108f32eaf87 100644 --- a/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParFlatHashTable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 1d1ca0d1751e..4e699f936f94 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index d9f79d5873d9..94c0109326be 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala index 423b891d4871..aceb9e0217b3 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/mutable/ParIterable.scala b/src/library/scala/collection/parallel/mutable/ParIterable.scala index 4659149106ea..5cb5cf20e488 100644 --- a/src/library/scala/collection/parallel/mutable/ParIterable.scala +++ b/src/library/scala/collection/parallel/mutable/ParIterable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/mutable/ParMap.scala b/src/library/scala/collection/parallel/mutable/ParMap.scala index 8110f9dc0a0a..27093089c16a 100644 --- a/src/library/scala/collection/parallel/mutable/ParMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/mutable/ParMapLike.scala b/src/library/scala/collection/parallel/mutable/ParMapLike.scala index 5d99394a50cd..28f76fc54b2e 100644 --- a/src/library/scala/collection/parallel/mutable/ParMapLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParMapLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/mutable/ParSeq.scala b/src/library/scala/collection/parallel/mutable/ParSeq.scala index 35be2669f863..29d2889bc7f4 100644 --- a/src/library/scala/collection/parallel/mutable/ParSeq.scala +++ b/src/library/scala/collection/parallel/mutable/ParSeq.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/ParSet.scala b/src/library/scala/collection/parallel/mutable/ParSet.scala index 4e2d3e0e4cd5..bef46205961c 100644 --- a/src/library/scala/collection/parallel/mutable/ParSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParSet.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/ParSetLike.scala b/src/library/scala/collection/parallel/mutable/ParSetLike.scala index 08aa3b024bcb..9a35a522d1ee 100644 --- a/src/library/scala/collection/parallel/mutable/ParSetLike.scala +++ b/src/library/scala/collection/parallel/mutable/ParSetLike.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala index c72e4ae3aa79..8a15d694fa70 100644 --- a/src/library/scala/collection/parallel/mutable/ParTrieMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParTrieMap.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala index 6883457fef73..60138d44735d 100644 --- a/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/ResizableParArrayCombiner.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala index e71e61f2f122..483c7343f084 100644 --- a/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala +++ b/src/library/scala/collection/parallel/mutable/UnrolledParArrayCombiner.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel.mutable diff --git a/src/library/scala/collection/parallel/mutable/package.scala b/src/library/scala/collection/parallel/mutable/package.scala index 81121d93983e..0094bfd0be74 100644 --- a/src/library/scala/collection/parallel/mutable/package.scala +++ b/src/library/scala/collection/parallel/mutable/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection.parallel diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index eaa87b675af8..d276e451fb7d 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/script/Location.scala b/src/library/scala/collection/script/Location.scala index 0797b355ec10..afac787f0d23 100644 --- a/src/library/scala/collection/script/Location.scala +++ b/src/library/scala/collection/script/Location.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/script/Message.scala b/src/library/scala/collection/script/Message.scala index 8912084f6ac6..61543d10a10c 100644 --- a/src/library/scala/collection/script/Message.scala +++ b/src/library/scala/collection/script/Message.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/collection/script/Scriptable.scala b/src/library/scala/collection/script/Scriptable.scala index 840f2b980368..6fdf954342e2 100644 --- a/src/library/scala/collection/script/Scriptable.scala +++ b/src/library/scala/collection/script/Scriptable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package collection diff --git a/src/library/scala/compat/Platform.scala b/src/library/scala/compat/Platform.scala index f3745bc1897a..74def3a5255e 100644 --- a/src/library/scala/compat/Platform.scala +++ b/src/library/scala/compat/Platform.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package compat diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala index dff83874ba76..4714b351944b 100644 --- a/src/library/scala/concurrent/Awaitable.scala +++ b/src/library/scala/concurrent/Awaitable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/BatchingExecutor.scala b/src/library/scala/concurrent/BatchingExecutor.scala index fd31f3470e9f..e4792fb7ac6c 100644 --- a/src/library/scala/concurrent/BatchingExecutor.scala +++ b/src/library/scala/concurrent/BatchingExecutor.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/BlockContext.scala b/src/library/scala/concurrent/BlockContext.scala index 2b8ed4c7caa9..6e87f9a775c4 100644 --- a/src/library/scala/concurrent/BlockContext.scala +++ b/src/library/scala/concurrent/BlockContext.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/Channel.scala b/src/library/scala/concurrent/Channel.scala index 8a2e69192f62..fddb77cc0dfc 100644 --- a/src/library/scala/concurrent/Channel.scala +++ b/src/library/scala/concurrent/Channel.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala index 476fa88d44fa..0ac51a1cf8db 100644 --- a/src/library/scala/concurrent/DelayedLazyVal.scala +++ b/src/library/scala/concurrent/DelayedLazyVal.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index 5cc9aaf96d08..f53add40f1de 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index df00a75c4586..19762042f4b0 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/JavaConversions.scala b/src/library/scala/concurrent/JavaConversions.scala index 91e55d30cbef..13fe4303f4f3 100644 --- a/src/library/scala/concurrent/JavaConversions.scala +++ b/src/library/scala/concurrent/JavaConversions.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/Lock.scala b/src/library/scala/concurrent/Lock.scala index 06938c7e4b94..89e4feddf685 100644 --- a/src/library/scala/concurrent/Lock.scala +++ b/src/library/scala/concurrent/Lock.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala index 894b134e8331..a82ac719ca7c 100644 --- a/src/library/scala/concurrent/Promise.scala +++ b/src/library/scala/concurrent/Promise.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/SyncChannel.scala b/src/library/scala/concurrent/SyncChannel.scala index 6aae1fbe0702..1f18dc602c68 100644 --- a/src/library/scala/concurrent/SyncChannel.scala +++ b/src/library/scala/concurrent/SyncChannel.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/SyncVar.scala b/src/library/scala/concurrent/SyncVar.scala index e1370471e559..5c548e672def 100644 --- a/src/library/scala/concurrent/SyncVar.scala +++ b/src/library/scala/concurrent/SyncVar.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent diff --git a/src/library/scala/concurrent/duration/Deadline.scala b/src/library/scala/concurrent/duration/Deadline.scala index a25a4786027f..0f7975071a79 100644 --- a/src/library/scala/concurrent/duration/Deadline.scala +++ b/src/library/scala/concurrent/duration/Deadline.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent.duration diff --git a/src/library/scala/concurrent/duration/Duration.scala b/src/library/scala/concurrent/duration/Duration.scala index 1654e69950f8..82de8f9f130c 100644 --- a/src/library/scala/concurrent/duration/Duration.scala +++ b/src/library/scala/concurrent/duration/Duration.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent.duration diff --git a/src/library/scala/concurrent/duration/DurationConversions.scala b/src/library/scala/concurrent/duration/DurationConversions.scala index 74afa0ca1cdd..789723e301ed 100644 --- a/src/library/scala/concurrent/duration/DurationConversions.scala +++ b/src/library/scala/concurrent/duration/DurationConversions.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent.duration diff --git a/src/library/scala/concurrent/duration/package.scala b/src/library/scala/concurrent/duration/package.scala index d166975445de..7373384f8d44 100644 --- a/src/library/scala/concurrent/duration/package.scala +++ b/src/library/scala/concurrent/duration/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.concurrent import scala.language.implicitConversions diff --git a/src/library/scala/concurrent/forkjoin/package.scala b/src/library/scala/concurrent/forkjoin/package.scala index 889890e30bd8..a7fca7431825 100644 --- a/src/library/scala/concurrent/forkjoin/package.scala +++ b/src/library/scala/concurrent/forkjoin/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2015, LAMP/EPFL and Typesafe, Inc. ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent import java.util.{concurrent => juc} diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 4c83a9b8032f..4473f122267b 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent.impl diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index f5e0df261aeb..c63da62e1e48 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.concurrent.impl diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index 0695ee335194..042b1ab636d0 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala index a57745dbea78..b35288a22915 100644 --- a/src/library/scala/deprecated.scala +++ b/src/library/scala/deprecated.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/deprecatedInheritance.scala b/src/library/scala/deprecatedInheritance.scala index 994eac9ed839..4dc2e44f4977 100644 --- a/src/library/scala/deprecatedInheritance.scala +++ b/src/library/scala/deprecatedInheritance.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/deprecatedName.scala b/src/library/scala/deprecatedName.scala index f8c6bd32ad77..33f601890038 100644 --- a/src/library/scala/deprecatedName.scala +++ b/src/library/scala/deprecatedName.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2010-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/deprecatedOverriding.scala b/src/library/scala/deprecatedOverriding.scala index 5be6830b2752..30a5e82dfcca 100644 --- a/src/library/scala/deprecatedOverriding.scala +++ b/src/library/scala/deprecatedOverriding.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/inline.scala b/src/library/scala/inline.scala index 98e5f140525a..6c28c20e7daf 100644 --- a/src/library/scala/inline.scala +++ b/src/library/scala/inline.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/io/AnsiColor.scala b/src/library/scala/io/AnsiColor.scala index df589bc66c0d..161188b4a6c1 100644 --- a/src/library/scala/io/AnsiColor.scala +++ b/src/library/scala/io/AnsiColor.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package io diff --git a/src/library/scala/io/BufferedSource.scala b/src/library/scala/io/BufferedSource.scala index 33b5a1468eca..23fe9115765d 100644 --- a/src/library/scala/io/BufferedSource.scala +++ b/src/library/scala/io/BufferedSource.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.io diff --git a/src/library/scala/io/Codec.scala b/src/library/scala/io/Codec.scala index 0de79a679127..3b7b4e9e1907 100644 --- a/src/library/scala/io/Codec.scala +++ b/src/library/scala/io/Codec.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package io diff --git a/src/library/scala/io/Position.scala b/src/library/scala/io/Position.scala index 0435ca95ad8e..6e9a2ce12ae6 100644 --- a/src/library/scala/io/Position.scala +++ b/src/library/scala/io/Position.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package io diff --git a/src/library/scala/io/Source.scala b/src/library/scala/io/Source.scala index 17260b5b1e44..1cbfb1182dce 100644 --- a/src/library/scala/io/Source.scala +++ b/src/library/scala/io/Source.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package io diff --git a/src/library/scala/io/StdIn.scala b/src/library/scala/io/StdIn.scala index 0f9656436b8f..6324da5c2ede 100644 --- a/src/library/scala/io/StdIn.scala +++ b/src/library/scala/io/StdIn.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package io diff --git a/src/library/scala/language.scala b/src/library/scala/language.scala index 391f1ac90301..b81f75a0dc4e 100644 --- a/src/library/scala/language.scala +++ b/src/library/scala/language.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/languageFeature.scala b/src/library/scala/languageFeature.scala index 51118b43be30..891f0d7d19d1 100644 --- a/src/library/scala/languageFeature.scala +++ b/src/library/scala/languageFeature.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index cb6af7553386..5e8c65fdd5eb 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 9bf0dc331821..627b23f31821 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/Equiv.scala b/src/library/scala/math/Equiv.scala index 49b60653fb3c..a3aa6f984e01 100644 --- a/src/library/scala/math/Equiv.scala +++ b/src/library/scala/math/Equiv.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/Fractional.scala b/src/library/scala/math/Fractional.scala index b7e0ed5471b8..9b57f1a06c7e 100644 --- a/src/library/scala/math/Fractional.scala +++ b/src/library/scala/math/Fractional.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/Integral.scala b/src/library/scala/math/Integral.scala index 44009fd4a227..3d7a8135bc4c 100644 --- a/src/library/scala/math/Integral.scala +++ b/src/library/scala/math/Integral.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/Numeric.scala b/src/library/scala/math/Numeric.scala index 9245798c17d1..937dd2da24da 100644 --- a/src/library/scala/math/Numeric.scala +++ b/src/library/scala/math/Numeric.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/Ordered.scala b/src/library/scala/math/Ordered.scala index 1f3d10e083bd..7e000f09de9c 100644 --- a/src/library/scala/math/Ordered.scala +++ b/src/library/scala/math/Ordered.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/Ordering.scala b/src/library/scala/math/Ordering.scala index a0a2ea77adc0..d3ed5b7d3ff7 100644 --- a/src/library/scala/math/Ordering.scala +++ b/src/library/scala/math/Ordering.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/PartialOrdering.scala b/src/library/scala/math/PartialOrdering.scala index 5c9f0877bf6e..66ea68303c04 100644 --- a/src/library/scala/math/PartialOrdering.scala +++ b/src/library/scala/math/PartialOrdering.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/PartiallyOrdered.scala b/src/library/scala/math/PartiallyOrdered.scala index 6f09a1d5a737..05d52d42a199 100644 --- a/src/library/scala/math/PartiallyOrdered.scala +++ b/src/library/scala/math/PartiallyOrdered.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/ScalaNumber.java b/src/library/scala/math/ScalaNumber.java index f03ba7bf081a..b81745574287 100644 --- a/src/library/scala/math/ScalaNumber.java +++ b/src/library/scala/math/ScalaNumber.java @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.math; diff --git a/src/library/scala/math/ScalaNumericConversions.scala b/src/library/scala/math/ScalaNumericConversions.scala index 0006133b13a8..81e6b8f3d5b2 100644 --- a/src/library/scala/math/ScalaNumericConversions.scala +++ b/src/library/scala/math/ScalaNumericConversions.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package math diff --git a/src/library/scala/math/package.scala b/src/library/scala/math/package.scala index 546efef114f1..31d196eda93c 100644 --- a/src/library/scala/math/package.scala +++ b/src/library/scala/math/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/native.scala b/src/library/scala/native.scala index 49d3ced805dd..b0676d503423 100644 --- a/src/library/scala/native.scala +++ b/src/library/scala/native.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/noinline.scala b/src/library/scala/noinline.scala index b4b0b2727bf2..89192fd66f0b 100644 --- a/src/library/scala/noinline.scala +++ b/src/library/scala/noinline.scala @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala index 224112c11cdb..38d12a629ec7 100644 --- a/src/library/scala/package.scala +++ b/src/library/scala/package.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /** * Core Scala types. They are always available without an explicit import. diff --git a/src/library/scala/ref/PhantomReference.scala b/src/library/scala/ref/PhantomReference.scala index 80e77bd9d5a6..15ce0fb63896 100644 --- a/src/library/scala/ref/PhantomReference.scala +++ b/src/library/scala/ref/PhantomReference.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.ref diff --git a/src/library/scala/ref/Reference.scala b/src/library/scala/ref/Reference.scala index 6377dddcd3ca..5da0a62f5e1b 100644 --- a/src/library/scala/ref/Reference.scala +++ b/src/library/scala/ref/Reference.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.ref diff --git a/src/library/scala/ref/ReferenceQueue.scala b/src/library/scala/ref/ReferenceQueue.scala index 89215ef35d43..e3e7befd14e5 100644 --- a/src/library/scala/ref/ReferenceQueue.scala +++ b/src/library/scala/ref/ReferenceQueue.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.ref diff --git a/src/library/scala/ref/ReferenceWrapper.scala b/src/library/scala/ref/ReferenceWrapper.scala index 3da1f2ea7c9c..54c4a43448c4 100644 --- a/src/library/scala/ref/ReferenceWrapper.scala +++ b/src/library/scala/ref/ReferenceWrapper.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.ref diff --git a/src/library/scala/ref/SoftReference.scala b/src/library/scala/ref/SoftReference.scala index 5e60f0078892..32e3def47c26 100644 --- a/src/library/scala/ref/SoftReference.scala +++ b/src/library/scala/ref/SoftReference.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.ref diff --git a/src/library/scala/ref/WeakReference.scala b/src/library/scala/ref/WeakReference.scala index 9dcc0bbe5f97..51ed4701eb8b 100644 --- a/src/library/scala/ref/WeakReference.scala +++ b/src/library/scala/ref/WeakReference.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.ref diff --git a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala index d2ae10747d7b..94c61bbbfbb5 100644 --- a/src/library/scala/reflect/ClassManifestDeprecatedApis.scala +++ b/src/library/scala/reflect/ClassManifestDeprecatedApis.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index 4194ae0905a4..faa647d76867 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 3579f4731026..1e9b0a2a55d3 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/library/scala/reflect/NameTransformer.scala b/src/library/scala/reflect/NameTransformer.scala index bdf5165df5a8..cbe75a9c5313 100644 --- a/src/library/scala/reflect/NameTransformer.scala +++ b/src/library/scala/reflect/NameTransformer.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/library/scala/reflect/NoManifest.scala b/src/library/scala/reflect/NoManifest.scala index 2ef946c80cd9..840e39709014 100644 --- a/src/library/scala/reflect/NoManifest.scala +++ b/src/library/scala/reflect/NoManifest.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/library/scala/reflect/OptManifest.scala b/src/library/scala/reflect/OptManifest.scala index b69f55483cd9..09a79f725695 100644 --- a/src/library/scala/reflect/OptManifest.scala +++ b/src/library/scala/reflect/OptManifest.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/library/scala/reflect/ScalaLongSignature.java b/src/library/scala/reflect/ScalaLongSignature.java index 94cf504aa45d..f749e33bf3fa 100644 --- a/src/library/scala/reflect/ScalaLongSignature.java +++ b/src/library/scala/reflect/ScalaLongSignature.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect; import java.lang.annotation.ElementType; diff --git a/src/library/scala/reflect/ScalaSignature.java b/src/library/scala/reflect/ScalaSignature.java index 217ae8e8f72e..99d8c0387b62 100644 --- a/src/library/scala/reflect/ScalaSignature.java +++ b/src/library/scala/reflect/ScalaSignature.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect; import java.lang.annotation.ElementType; diff --git a/src/library/scala/reflect/macros/internal/macroImpl.scala b/src/library/scala/reflect/macros/internal/macroImpl.scala index b281fb7d12b3..b57419b16b80 100644 --- a/src/library/scala/reflect/macros/internal/macroImpl.scala +++ b/src/library/scala/reflect/macros/internal/macroImpl.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.macros package internal diff --git a/src/library/scala/reflect/package.scala b/src/library/scala/reflect/package.scala index 88cdfb0ed495..42b5c3b3dba1 100644 --- a/src/library/scala/reflect/package.scala +++ b/src/library/scala/reflect/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala import java.lang.reflect.{ AccessibleObject => jAccessibleObject } diff --git a/src/library/scala/remote.scala b/src/library/scala/remote.scala index 7265a1519454..a8921006713a 100644 --- a/src/library/scala/remote.scala +++ b/src/library/scala/remote.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala index 630966d0d41d..2b3dd8fb1b27 100644 --- a/src/library/scala/runtime/AbstractPartialFunction.scala +++ b/src/library/scala/runtime/AbstractPartialFunction.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/BooleanRef.java b/src/library/scala/runtime/BooleanRef.java index 92e8055351f7..60341a3d7e73 100644 --- a/src/library/scala/runtime/BooleanRef.java +++ b/src/library/scala/runtime/BooleanRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/BoxedUnit.java b/src/library/scala/runtime/BoxedUnit.java index f436b7c2094a..c190763f4db2 100644 --- a/src/library/scala/runtime/BoxedUnit.java +++ b/src/library/scala/runtime/BoxedUnit.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/BoxesRunTime.java b/src/library/scala/runtime/BoxesRunTime.java index 6b3874fc1f6f..002d0f332de0 100644 --- a/src/library/scala/runtime/BoxesRunTime.java +++ b/src/library/scala/runtime/BoxesRunTime.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/ByteRef.java b/src/library/scala/runtime/ByteRef.java index 27d3259db370..dfc91c4d19fd 100644 --- a/src/library/scala/runtime/ByteRef.java +++ b/src/library/scala/runtime/ByteRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/CharRef.java b/src/library/scala/runtime/CharRef.java index 31956f5b5576..a0448b0bba24 100644 --- a/src/library/scala/runtime/CharRef.java +++ b/src/library/scala/runtime/CharRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/DoubleRef.java b/src/library/scala/runtime/DoubleRef.java index 0c7d9156d6b8..1b2d0421cf4b 100644 --- a/src/library/scala/runtime/DoubleRef.java +++ b/src/library/scala/runtime/DoubleRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/FloatRef.java b/src/library/scala/runtime/FloatRef.java index f0e1d5f8f36c..c3d037d5651a 100644 --- a/src/library/scala/runtime/FloatRef.java +++ b/src/library/scala/runtime/FloatRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/IntRef.java b/src/library/scala/runtime/IntRef.java index adcf474aaea4..95c3cccaa774 100644 --- a/src/library/scala/runtime/IntRef.java +++ b/src/library/scala/runtime/IntRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/LambdaDeserialize.java b/src/library/scala/runtime/LambdaDeserialize.java index 4c5198cc483c..f927699f7b0e 100644 --- a/src/library/scala/runtime/LambdaDeserialize.java +++ b/src/library/scala/runtime/LambdaDeserialize.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.runtime; diff --git a/src/library/scala/runtime/LambdaDeserializer.scala b/src/library/scala/runtime/LambdaDeserializer.scala index 3c36f30cf8fd..ec283193a786 100644 --- a/src/library/scala/runtime/LambdaDeserializer.scala +++ b/src/library/scala/runtime/LambdaDeserializer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.runtime import java.lang.invoke._ diff --git a/src/library/scala/runtime/LazyRef.scala b/src/library/scala/runtime/LazyRef.scala index 6057afef7594..60a17b3d1c95 100644 --- a/src/library/scala/runtime/LazyRef.scala +++ b/src/library/scala/runtime/LazyRef.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL and Lightbend, Inc ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime diff --git a/src/library/scala/runtime/LongRef.java b/src/library/scala/runtime/LongRef.java index 51426ab8f693..ef35b4dd01ed 100644 --- a/src/library/scala/runtime/LongRef.java +++ b/src/library/scala/runtime/LongRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/MethodCache.scala b/src/library/scala/runtime/MethodCache.scala index a8fdfc10595d..9406efe3c5bb 100644 --- a/src/library/scala/runtime/MethodCache.scala +++ b/src/library/scala/runtime/MethodCache.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/NonLocalReturnControl.scala b/src/library/scala/runtime/NonLocalReturnControl.scala index a926956acf8b..c14fe6b3fb56 100644 --- a/src/library/scala/runtime/NonLocalReturnControl.scala +++ b/src/library/scala/runtime/NonLocalReturnControl.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/Nothing$.scala b/src/library/scala/runtime/Nothing$.scala index 4ecc5362231f..314ffc2e6240 100644 --- a/src/library/scala/runtime/Nothing$.scala +++ b/src/library/scala/runtime/Nothing$.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/Null$.scala b/src/library/scala/runtime/Null$.scala index 87ce0a249831..a56f4c2df8b2 100644 --- a/src/library/scala/runtime/Null$.scala +++ b/src/library/scala/runtime/Null$.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/ObjectRef.java b/src/library/scala/runtime/ObjectRef.java index b34f81c9c89c..452985490662 100644 --- a/src/library/scala/runtime/ObjectRef.java +++ b/src/library/scala/runtime/ObjectRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/RichBoolean.scala b/src/library/scala/runtime/RichBoolean.scala index 4f867960a095..ca7fd39cddae 100644 --- a/src/library/scala/runtime/RichBoolean.scala +++ b/src/library/scala/runtime/RichBoolean.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichByte.scala b/src/library/scala/runtime/RichByte.scala index ce658d2277ee..998b1fbef6fd 100644 --- a/src/library/scala/runtime/RichByte.scala +++ b/src/library/scala/runtime/RichByte.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichChar.scala b/src/library/scala/runtime/RichChar.scala index 71ea3a21e1a7..72554741a7a5 100644 --- a/src/library/scala/runtime/RichChar.scala +++ b/src/library/scala/runtime/RichChar.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichDouble.scala b/src/library/scala/runtime/RichDouble.scala index 9d7a55d5cd7f..6f99e8442c5c 100644 --- a/src/library/scala/runtime/RichDouble.scala +++ b/src/library/scala/runtime/RichDouble.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichException.scala b/src/library/scala/runtime/RichException.scala index 2863fb6d7ce2..0e2168ddf709 100644 --- a/src/library/scala/runtime/RichException.scala +++ b/src/library/scala/runtime/RichException.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichFloat.scala b/src/library/scala/runtime/RichFloat.scala index 93777f2405fa..dbccd5bd0b78 100644 --- a/src/library/scala/runtime/RichFloat.scala +++ b/src/library/scala/runtime/RichFloat.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichInt.scala b/src/library/scala/runtime/RichInt.scala index 37d236dfe90a..4d1ae66976a4 100644 --- a/src/library/scala/runtime/RichInt.scala +++ b/src/library/scala/runtime/RichInt.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichLong.scala b/src/library/scala/runtime/RichLong.scala index 233ce231b4c8..1cf00fbaf74e 100644 --- a/src/library/scala/runtime/RichLong.scala +++ b/src/library/scala/runtime/RichLong.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/RichShort.scala b/src/library/scala/runtime/RichShort.scala index b35beff7eb4c..f15698fb2949 100644 --- a/src/library/scala/runtime/RichShort.scala +++ b/src/library/scala/runtime/RichShort.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/ScalaNumberProxy.scala b/src/library/scala/runtime/ScalaNumberProxy.scala index 16ad26582323..4f809efca975 100644 --- a/src/library/scala/runtime/ScalaNumberProxy.scala +++ b/src/library/scala/runtime/ScalaNumberProxy.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index b90d6f43e42a..711c044f858a 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/SeqCharSequence.scala b/src/library/scala/runtime/SeqCharSequence.scala index 7751bf815c8a..a53e0001147d 100644 --- a/src/library/scala/runtime/SeqCharSequence.scala +++ b/src/library/scala/runtime/SeqCharSequence.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/ShortRef.java b/src/library/scala/runtime/ShortRef.java index e5e8de3d8b82..9862d03fdf36 100644 --- a/src/library/scala/runtime/ShortRef.java +++ b/src/library/scala/runtime/ShortRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/Statics.java b/src/library/scala/runtime/Statics.java index 83e0ec6bd7d2..ae62c21d2a77 100644 --- a/src/library/scala/runtime/Statics.java +++ b/src/library/scala/runtime/Statics.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.runtime; /** Not for public consumption. Usage by the runtime only. diff --git a/src/library/scala/runtime/StringAdd.scala b/src/library/scala/runtime/StringAdd.scala index 37f077bcadfa..04881769ed63 100644 --- a/src/library/scala/runtime/StringAdd.scala +++ b/src/library/scala/runtime/StringAdd.scala @@ -1,10 +1,14 @@ -/* *\ -** ________ ___ __ ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ |_| ** -** ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/StringFormat.scala b/src/library/scala/runtime/StringFormat.scala index 5376c3f98201..481c6c6b2962 100644 --- a/src/library/scala/runtime/StringFormat.scala +++ b/src/library/scala/runtime/StringFormat.scala @@ -1,10 +1,14 @@ -/* *\ -** ________ ___ __ ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ |_| ** -** ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/StructuralCallSite.java b/src/library/scala/runtime/StructuralCallSite.java index f73b4f08e622..36da98a49d72 100644 --- a/src/library/scala/runtime/StructuralCallSite.java +++ b/src/library/scala/runtime/StructuralCallSite.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.runtime; diff --git a/src/library/scala/runtime/SymbolLiteral.java b/src/library/scala/runtime/SymbolLiteral.java index d57204165d8d..3638dca3eda3 100644 --- a/src/library/scala/runtime/SymbolLiteral.java +++ b/src/library/scala/runtime/SymbolLiteral.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.runtime; import java.lang.invoke.*; diff --git a/src/library/scala/runtime/TraitSetter.java b/src/library/scala/runtime/TraitSetter.java index d8dd8c6b04ba..967b7033fa99 100644 --- a/src/library/scala/runtime/TraitSetter.java +++ b/src/library/scala/runtime/TraitSetter.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.runtime; /** A marker annotation to tag a setter of a mutable variable in a trait diff --git a/src/library/scala/runtime/Tuple2Zipped.scala b/src/library/scala/runtime/Tuple2Zipped.scala index 52dd1da09e82..fee5618e5375 100644 --- a/src/library/scala/runtime/Tuple2Zipped.scala +++ b/src/library/scala/runtime/Tuple2Zipped.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2016, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/Tuple3Zipped.scala b/src/library/scala/runtime/Tuple3Zipped.scala index a4a86f8e55b0..94f094b10d59 100644 --- a/src/library/scala/runtime/Tuple3Zipped.scala +++ b/src/library/scala/runtime/Tuple3Zipped.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package runtime diff --git a/src/library/scala/runtime/VolatileBooleanRef.java b/src/library/scala/runtime/VolatileBooleanRef.java index ef5b69111880..50b49a05b3d6 100644 --- a/src/library/scala/runtime/VolatileBooleanRef.java +++ b/src/library/scala/runtime/VolatileBooleanRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileByteRef.java b/src/library/scala/runtime/VolatileByteRef.java index d792b0a386a8..016bc890c617 100644 --- a/src/library/scala/runtime/VolatileByteRef.java +++ b/src/library/scala/runtime/VolatileByteRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileCharRef.java b/src/library/scala/runtime/VolatileCharRef.java index 555b17128321..6e39a80cdda6 100644 --- a/src/library/scala/runtime/VolatileCharRef.java +++ b/src/library/scala/runtime/VolatileCharRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileDoubleRef.java b/src/library/scala/runtime/VolatileDoubleRef.java index 1932055c6add..b61f0ffc198d 100644 --- a/src/library/scala/runtime/VolatileDoubleRef.java +++ b/src/library/scala/runtime/VolatileDoubleRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileFloatRef.java b/src/library/scala/runtime/VolatileFloatRef.java index 3a81be114602..e14ed0123cba 100644 --- a/src/library/scala/runtime/VolatileFloatRef.java +++ b/src/library/scala/runtime/VolatileFloatRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileIntRef.java b/src/library/scala/runtime/VolatileIntRef.java index ae015bc8b122..2553f59a39cb 100644 --- a/src/library/scala/runtime/VolatileIntRef.java +++ b/src/library/scala/runtime/VolatileIntRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileLongRef.java b/src/library/scala/runtime/VolatileLongRef.java index e596f5aa6964..5e0ebf7f1dd1 100644 --- a/src/library/scala/runtime/VolatileLongRef.java +++ b/src/library/scala/runtime/VolatileLongRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileObjectRef.java b/src/library/scala/runtime/VolatileObjectRef.java index 6063501ffb0e..77b770ec1314 100644 --- a/src/library/scala/runtime/VolatileObjectRef.java +++ b/src/library/scala/runtime/VolatileObjectRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/VolatileShortRef.java b/src/library/scala/runtime/VolatileShortRef.java index 0a2825941fbf..9d84f9d1314a 100644 --- a/src/library/scala/runtime/VolatileShortRef.java +++ b/src/library/scala/runtime/VolatileShortRef.java @@ -1,12 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.runtime; diff --git a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java index 622dbabcf111..f17aa30006f4 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcB$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcB$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java index ad9a14ffa8fc..961c6123a715 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcC$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcC$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java index 291b50db4bd0..ea523556ec3a 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcD$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java index 73b31dea0f0f..232dd7c2b7d7 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcF$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java index f9b2d659ad31..ce7efc254580 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcI$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java index 73c41976b7a3..d3407ea5b648 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java index 5fbabb2358e2..2e98a0666127 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcS$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcS$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java index 735843796ce8..b5c427ac9e5b 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcV$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcV$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java index 01234c1728a2..f5c8f9162356 100644 --- a/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java +++ b/src/library/scala/runtime/java8/JFunction0$mcZ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java index 07b85eed59a9..dbdc62c1797e 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java index f09edd2ce25f..e442cba96f5c 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java index 3cf40cb74998..a541f949695b 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java index 4023f30bc052..938eeb8dd608 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java index d4608958383a..3efccac1f425 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java index 6c591800cadc..cb01b24028b6 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java index 666919591438..325ee40d7f5a 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java index cd953677aec6..b65ed4897dbc 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcFJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java index 37f686493681..ad1f94e5c4f1 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcID$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java index 8a7656a286d5..09fd883167fd 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcIF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java index 792627b4005e..226ab78904ee 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcII$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java index 01c47a67dac7..3f32f6d67740 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java index d8d5274ca1db..da09801f57a9 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java index cc1fad36d04a..4f0a57dd1ded 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java index fe941dd61a7e..ec2ad51ab265 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java index 7034115bad79..8c4a8b198908 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java index dde9f557226b..d423bad709a0 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java index 0ffd80621f13..878f0e84a03b 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java index 2543d23e3134..e004a9820ab9 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java index 7564175402df..fa5eaab36029 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcVJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java index ce5bd300297d..aa4af07a7701 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java index baa691e5480f..8d319747633a 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZF$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java index bf04b5922b93..41b469ac6073 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java index 808eea87b8c7..ce8794cf80be 100644 --- a/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction1$mcZJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java index 80ab5203d954..4a3ae9f43101 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java index 8e92338b825b..1a55fce1ec79 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java index 3d4f4a7cded9..44645e1d9589 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java index bd6652e51ac1..6ada9c9903b2 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java index d06a246d3385..9030379ae3ec 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java index cda23c4dcd74..d1e386d7d2c3 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java index 723efd8451eb..007fc1a5d25e 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java index c90352ef301b..e599ea8ae6ff 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java index 33612197878e..59a5369d793d 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcDJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java index 2b9236b5d1b4..8037e1daf1ff 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java index 2c564962a716..d44f63ab127f 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java index a0785f4cd2c6..f9db67bc4ca7 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java index ba67ddb5931d..b46abeb21b63 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java index d58284b7522d..c2a7e363fd21 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java index 4bc6eeb9085f..70333183785d 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java index f2435e23f7fd..97f08283b0c4 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java index 1362d00e9408..c2cf343bb1cc 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java index c9bcf515b736..5c66d8fcd3f0 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcFJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java index 28693910a57c..b54e0d5dfebe 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java index 50c775fbd9fc..f618c54d6b80 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java index 3231aa7a888b..8022caac201b 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java index 01568b2fd626..345c09d78030 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java index e0fba76675cc..a6a3fd7760cf 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java index 7155548e9f01..eb71410a18a5 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java index f541cfdef405..1e2c3e5ad7d0 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java index e484efe42778..fe59c998c6cd 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java index ec3538779cb6..10099ed319b6 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcIJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java index b13502de5b30..14921383cc07 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java index 9ec9adda6003..dc998df44210 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java index 68ef9ead143a..493ada4e5b1c 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java index 29c9c5e3d300..4f99b940eb2b 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java index bb23086125bf..dd3d5c2e98e5 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java index 649fe2432562..d9f4801f2ba8 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java index 8e6071d44817..cc2e12a96306 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java index 61366ac26de1..ee5c626fedca 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java index a44e97318e11..d145a115d44d 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcJJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java index 8e7cbd7d1bd2..292e0a18211b 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java index 1dee353d6b32..77c331523d26 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java index 0b9560868472..0c32c921e8a4 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java index f0ed7e7e9789..f0a3a7a19d35 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java index 52d7922cc1aa..f9c715e9a5b4 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java index ac256bf163d6..e3ef11544333 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java index 6e2dea3fbfef..f344e8a47e95 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java index d1cba439e667..8a5329a37249 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java index 67f848a60e7c..bd1e1be8dfbe 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcVJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java index b430c5f1343f..2d4f462c9b08 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZDD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java index 01fb8ba003e6..7f06d9cfe768 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZDI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java index a7d28e3cfc71..9e0bc7f7fffa 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZDJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java index e77719bf7568..a2433ad05c5f 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZID$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java index 5f1f83aaf8b2..089c3f6292b5 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZII$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java index 38fabd6f691c..fab7a530c3ca 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZIJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java index 59c82cb01e60..dca526b7fb7a 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZJD$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java index 3e73b8a794e8..28e0243c88de 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZJI$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java index 96a14e98a531..864c7139dfe3 100644 --- a/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java +++ b/src/library/scala/runtime/java8/JFunction2$mcZJJ$sp.java @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /* * Copyright (C) 2012-2015 Typesafe Inc. diff --git a/src/library/scala/runtime/package.scala b/src/library/scala/runtime/package.scala index e4472b3ea183..b1b5f473f864 100644 --- a/src/library/scala/runtime/package.scala +++ b/src/library/scala/runtime/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package object runtime { } diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala index cb7793536cab..af9dcd0cc91e 100644 --- a/src/library/scala/specialized.scala +++ b/src/library/scala/specialized.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/sys/BooleanProp.scala b/src/library/scala/sys/BooleanProp.scala index b0008b41fd41..94e4c7c56fd7 100644 --- a/src/library/scala/sys/BooleanProp.scala +++ b/src/library/scala/sys/BooleanProp.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/Prop.scala b/src/library/scala/sys/Prop.scala index bad3f32713ee..35b260951cf6 100644 --- a/src/library/scala/sys/Prop.scala +++ b/src/library/scala/sys/Prop.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/PropImpl.scala b/src/library/scala/sys/PropImpl.scala index 3b451ab1d932..3a73d3df6aa1 100644 --- a/src/library/scala/sys/PropImpl.scala +++ b/src/library/scala/sys/PropImpl.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/ShutdownHookThread.scala b/src/library/scala/sys/ShutdownHookThread.scala index 9de15387f049..aa2d2a50d05e 100644 --- a/src/library/scala/sys/ShutdownHookThread.scala +++ b/src/library/scala/sys/ShutdownHookThread.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/SystemProperties.scala b/src/library/scala/sys/SystemProperties.scala index 8142d01fb812..799921b99151 100644 --- a/src/library/scala/sys/SystemProperties.scala +++ b/src/library/scala/sys/SystemProperties.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/package.scala b/src/library/scala/sys/package.scala index 1d0687b887e6..61453ae74f52 100644 --- a/src/library/scala/sys/package.scala +++ b/src/library/scala/sys/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index b39ae77c62c8..31acd4aa73dc 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala index 9e0ea6e71a04..485ca97fa0f5 100644 --- a/src/library/scala/sys/process/Process.scala +++ b/src/library/scala/sys/process/Process.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index d0745e5833c7..d598b2b8ca2c 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/ProcessBuilderImpl.scala b/src/library/scala/sys/process/ProcessBuilderImpl.scala index 0df2e648e0e1..fdaeb2e59e1a 100644 --- a/src/library/scala/sys/process/ProcessBuilderImpl.scala +++ b/src/library/scala/sys/process/ProcessBuilderImpl.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala index eedf667c88cf..154b4632839e 100644 --- a/src/library/scala/sys/process/ProcessIO.scala +++ b/src/library/scala/sys/process/ProcessIO.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/ProcessImpl.scala b/src/library/scala/sys/process/ProcessImpl.scala index a7afecf44007..35b873979dba 100644 --- a/src/library/scala/sys/process/ProcessImpl.scala +++ b/src/library/scala/sys/process/ProcessImpl.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala index 60728940070c..4666d476152c 100644 --- a/src/library/scala/sys/process/ProcessLogger.scala +++ b/src/library/scala/sys/process/ProcessLogger.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package sys diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index c341786ad87d..07445af4df70 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // Developer note: // scala -J-Dscala.process.debug diff --git a/src/library/scala/text/Document.scala b/src/library/scala/text/Document.scala index 0c747c99a83d..56b99e5e4d41 100644 --- a/src/library/scala/text/Document.scala +++ b/src/library/scala/text/Document.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.text diff --git a/src/library/scala/throws.scala b/src/library/scala/throws.scala index 5de4b8edd30a..484742cc3b7c 100644 --- a/src/library/scala/throws.scala +++ b/src/library/scala/throws.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/transient.scala b/src/library/scala/transient.scala index ec87439093bc..ed815b16031a 100644 --- a/src/library/scala/transient.scala +++ b/src/library/scala/transient.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/unchecked.scala b/src/library/scala/unchecked.scala index 9dff6a9ee627..00136bbb4ede 100644 --- a/src/library/scala/unchecked.scala +++ b/src/library/scala/unchecked.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/library/scala/util/DynamicVariable.scala b/src/library/scala/util/DynamicVariable.scala index 4b2d1a542a14..ef2e47a33e10 100644 --- a/src/library/scala/util/DynamicVariable.scala +++ b/src/library/scala/util/DynamicVariable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/Either.scala b/src/library/scala/util/Either.scala index 09d1de71cf1e..8fec3a409660 100644 --- a/src/library/scala/util/Either.scala +++ b/src/library/scala/util/Either.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/MurmurHash.scala b/src/library/scala/util/MurmurHash.scala index 6cf445b9ac2b..35571ad4bc13 100644 --- a/src/library/scala/util/MurmurHash.scala +++ b/src/library/scala/util/MurmurHash.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index 1905974b6237..d70d2d174345 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/Random.scala b/src/library/scala/util/Random.scala index 16d18d7d6df0..116391cf434a 100644 --- a/src/library/scala/util/Random.scala +++ b/src/library/scala/util/Random.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/Sorting.scala b/src/library/scala/util/Sorting.scala index 7005a892fb00..97811a3c69fd 100644 --- a/src/library/scala/util/Sorting.scala +++ b/src/library/scala/util/Sorting.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2015, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/Try.scala b/src/library/scala/util/Try.scala index 00e9585c38e2..6ae8eadacb8f 100644 --- a/src/library/scala/util/Try.scala +++ b/src/library/scala/util/Try.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2008-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/control/Breaks.scala b/src/library/scala/util/control/Breaks.scala index 5524b10afa7f..87deedc25478 100644 --- a/src/library/scala/util/control/Breaks.scala +++ b/src/library/scala/util/control/Breaks.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.control diff --git a/src/library/scala/util/control/ControlThrowable.scala b/src/library/scala/util/control/ControlThrowable.scala index 7ed3d95cd3ca..c9d545c34b0f 100644 --- a/src/library/scala/util/control/ControlThrowable.scala +++ b/src/library/scala/util/control/ControlThrowable.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.control diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala index 64f491d7f0b8..b63c57973042 100644 --- a/src/library/scala/util/control/Exception.scala +++ b/src/library/scala/util/control/Exception.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/control/NoStackTrace.scala b/src/library/scala/util/control/NoStackTrace.scala index 3647af4ac388..b3788db4530f 100644 --- a/src/library/scala/util/control/NoStackTrace.scala +++ b/src/library/scala/util/control/NoStackTrace.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.control diff --git a/src/library/scala/util/control/NonFatal.scala b/src/library/scala/util/control/NonFatal.scala index 9d3dfea07455..a499229f2bc4 100644 --- a/src/library/scala/util/control/NonFatal.scala +++ b/src/library/scala/util/control/NonFatal.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.control diff --git a/src/library/scala/util/control/TailCalls.scala b/src/library/scala/util/control/TailCalls.scala index fe8866ec3fb2..bdc25170fa66 100644 --- a/src/library/scala/util/control/TailCalls.scala +++ b/src/library/scala/util/control/TailCalls.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.control diff --git a/src/library/scala/util/hashing/ByteswapHashing.scala b/src/library/scala/util/hashing/ByteswapHashing.scala index 470479725bd4..21ff35fa5163 100644 --- a/src/library/scala/util/hashing/ByteswapHashing.scala +++ b/src/library/scala/util/hashing/ByteswapHashing.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.hashing diff --git a/src/library/scala/util/hashing/Hashing.scala b/src/library/scala/util/hashing/Hashing.scala index 2b72c1dbe3de..d995e22d8c57 100644 --- a/src/library/scala/util/hashing/Hashing.scala +++ b/src/library/scala/util/hashing/Hashing.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.hashing diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala index fa725903e319..285e9407746b 100644 --- a/src/library/scala/util/hashing/MurmurHash3.scala +++ b/src/library/scala/util/hashing/MurmurHash3.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util.hashing diff --git a/src/library/scala/util/hashing/package.scala b/src/library/scala/util/hashing/package.scala index 2c8e0154fc31..f8ca83cf5339 100644 --- a/src/library/scala/util/hashing/package.scala +++ b/src/library/scala/util/hashing/package.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package util diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 8423d3a11968..4614bf5bf658 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ /** * This package is concerned with regular expression (regex) matching against strings, diff --git a/src/library/scala/volatile.scala b/src/library/scala/volatile.scala index c612732329f2..ac3e80ef5f03 100644 --- a/src/library/scala/volatile.scala +++ b/src/library/scala/volatile.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala index 445d3c89c2b3..831fe5fadc59 100644 --- a/src/partest-extras/scala/tools/partest/ASMConverters.scala +++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.partest import scala.collection.JavaConverters._ diff --git a/src/partest-extras/scala/tools/partest/AsmNode.scala b/src/partest-extras/scala/tools/partest/AsmNode.scala index e6a91498d1df..89291ad5b106 100644 --- a/src/partest-extras/scala/tools/partest/AsmNode.scala +++ b/src/partest-extras/scala/tools/partest/AsmNode.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.partest import scala.collection.JavaConverters._ diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index 93ac14a98ed6..b016778bf428 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.partest import scala.collection.JavaConverters._ diff --git a/src/partest-extras/scala/tools/partest/IcodeComparison.scala b/src/partest-extras/scala/tools/partest/IcodeComparison.scala index 1430db886e51..07f011e18f3c 100644 --- a/src/partest-extras/scala/tools/partest/IcodeComparison.scala +++ b/src/partest-extras/scala/tools/partest/IcodeComparison.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/JavapTest.scala b/src/partest-extras/scala/tools/partest/JavapTest.scala index cfca49b3a74d..d2c126138bb1 100644 --- a/src/partest-extras/scala/tools/partest/JavapTest.scala +++ b/src/partest-extras/scala/tools/partest/JavapTest.scala @@ -1,3 +1,14 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/ParserTest.scala b/src/partest-extras/scala/tools/partest/ParserTest.scala index e4c92e3dc390..2ee796305881 100644 --- a/src/partest-extras/scala/tools/partest/ParserTest.scala +++ b/src/partest-extras/scala/tools/partest/ParserTest.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/ReplTest.scala b/src/partest-extras/scala/tools/partest/ReplTest.scala index d039f2ec6ab3..affaef59d726 100644 --- a/src/partest-extras/scala/tools/partest/ReplTest.scala +++ b/src/partest-extras/scala/tools/partest/ReplTest.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala b/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala index 1008be5b87b0..af457c6dd6b8 100644 --- a/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala +++ b/src/partest-extras/scala/tools/partest/ScaladocJavaModelTest.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.partest import scala.tools.nsc.doc.Universe diff --git a/src/partest-extras/scala/tools/partest/ScaladocModelTest.scala b/src/partest-extras/scala/tools/partest/ScaladocModelTest.scala index 44c1146a1457..c780982fa7f4 100644 --- a/src/partest-extras/scala/tools/partest/ScaladocModelTest.scala +++ b/src/partest-extras/scala/tools/partest/ScaladocModelTest.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Vlad Ureche +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/ScriptTest.scala b/src/partest-extras/scala/tools/partest/ScriptTest.scala index 3000d751e1cc..3fdc32ac6774 100644 --- a/src/partest-extras/scala/tools/partest/ScriptTest.scala +++ b/src/partest-extras/scala/tools/partest/ScriptTest.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/SigTest.scala b/src/partest-extras/scala/tools/partest/SigTest.scala index a516daa629b5..b0a2c5f16f81 100644 --- a/src/partest-extras/scala/tools/partest/SigTest.scala +++ b/src/partest-extras/scala/tools/partest/SigTest.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest diff --git a/src/partest-extras/scala/tools/partest/StubErrorMessageTest.scala b/src/partest-extras/scala/tools/partest/StubErrorMessageTest.scala index f713b79e7553..9c74a2c596c2 100644 --- a/src/partest-extras/scala/tools/partest/StubErrorMessageTest.scala +++ b/src/partest-extras/scala/tools/partest/StubErrorMessageTest.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.partest trait StubErrorMessageTest extends StoreReporterDirectTest { diff --git a/src/partest-extras/scala/tools/partest/Util.scala b/src/partest-extras/scala/tools/partest/Util.scala index 511997ea35b1..b4f3d1e7b4da 100644 --- a/src/partest-extras/scala/tools/partest/Util.scala +++ b/src/partest-extras/scala/tools/partest/Util.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.partest import scala.language.experimental.macros diff --git a/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala index 18dd74020825..e6598714128b 100644 --- a/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala +++ b/src/partest-extras/scala/tools/partest/instrumented/Instrumentation.scala @@ -1,6 +1,13 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Grzegorz Kossakowski +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest.instrumented diff --git a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java index 848103f5ccf8..d67e7d3572ac 100644 --- a/src/partest-extras/scala/tools/partest/instrumented/Profiler.java +++ b/src/partest-extras/scala/tools/partest/instrumented/Profiler.java @@ -1,6 +1,13 @@ -/* NEST (New Scala Test) - * Copyright 2007-2013 LAMP/EPFL - * @author Grzegorz Kossakowski +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.partest.instrumented; diff --git a/src/reflect/scala/reflect/api/Annotations.scala b/src/reflect/scala/reflect/api/Annotations.scala index 5122e37dc1c2..fa0a77b47062 100644 --- a/src/reflect/scala/reflect/api/Annotations.scala +++ b/src/reflect/scala/reflect/api/Annotations.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Constants.scala b/src/reflect/scala/reflect/api/Constants.scala index 776283f67068..81ddc81c15ec 100644 --- a/src/reflect/scala/reflect/api/Constants.scala +++ b/src/reflect/scala/reflect/api/Constants.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/api/Exprs.scala b/src/reflect/scala/reflect/api/Exprs.scala index ad03718898d7..bc3781412bab 100644 --- a/src/reflect/scala/reflect/api/Exprs.scala +++ b/src/reflect/scala/reflect/api/Exprs.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/api/FlagSets.scala b/src/reflect/scala/reflect/api/FlagSets.scala index 14852c0231ce..a571398d9196 100644 --- a/src/reflect/scala/reflect/api/FlagSets.scala +++ b/src/reflect/scala/reflect/api/FlagSets.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/ImplicitTags.scala b/src/reflect/scala/reflect/api/ImplicitTags.scala index aca0692d0d07..073c71e3b89b 100644 --- a/src/reflect/scala/reflect/api/ImplicitTags.scala +++ b/src/reflect/scala/reflect/api/ImplicitTags.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Internals.scala b/src/reflect/scala/reflect/api/Internals.scala index 10b46d2778a3..248aba274603 100644 --- a/src/reflect/scala/reflect/api/Internals.scala +++ b/src/reflect/scala/reflect/api/Internals.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/JavaUniverse.scala b/src/reflect/scala/reflect/api/JavaUniverse.scala index 88107ea117d2..bb28b87459d0 100644 --- a/src/reflect/scala/reflect/api/JavaUniverse.scala +++ b/src/reflect/scala/reflect/api/JavaUniverse.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Liftables.scala b/src/reflect/scala/reflect/api/Liftables.scala index c6352905d1c6..5bccb63b4aae 100644 --- a/src/reflect/scala/reflect/api/Liftables.scala +++ b/src/reflect/scala/reflect/api/Liftables.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Mirror.scala b/src/reflect/scala/reflect/api/Mirror.scala index 96aab48e75eb..98180498e429 100644 --- a/src/reflect/scala/reflect/api/Mirror.scala +++ b/src/reflect/scala/reflect/api/Mirror.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Mirrors.scala b/src/reflect/scala/reflect/api/Mirrors.scala index adaf829b32f0..379e82706bd7 100644 --- a/src/reflect/scala/reflect/api/Mirrors.scala +++ b/src/reflect/scala/reflect/api/Mirrors.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Names.scala b/src/reflect/scala/reflect/api/Names.scala index 35009d7f591a..818ff985729a 100644 --- a/src/reflect/scala/reflect/api/Names.scala +++ b/src/reflect/scala/reflect/api/Names.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Position.scala b/src/reflect/scala/reflect/api/Position.scala index 2e02d4a26f2f..b6cc08146fcb 100644 --- a/src/reflect/scala/reflect/api/Position.scala +++ b/src/reflect/scala/reflect/api/Position.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Positions.scala b/src/reflect/scala/reflect/api/Positions.scala index 63ad605656cc..c224f644401e 100644 --- a/src/reflect/scala/reflect/api/Positions.scala +++ b/src/reflect/scala/reflect/api/Positions.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Printers.scala b/src/reflect/scala/reflect/api/Printers.scala index 257dd6c43ea9..37402441248f 100644 --- a/src/reflect/scala/reflect/api/Printers.scala +++ b/src/reflect/scala/reflect/api/Printers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Quasiquotes.scala b/src/reflect/scala/reflect/api/Quasiquotes.scala index 554b43afaf04..ac09b441b8f5 100644 --- a/src/reflect/scala/reflect/api/Quasiquotes.scala +++ b/src/reflect/scala/reflect/api/Quasiquotes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package api diff --git a/src/reflect/scala/reflect/api/Scopes.scala b/src/reflect/scala/reflect/api/Scopes.scala index c9142fba477f..2bf5b82fa7ac 100644 --- a/src/reflect/scala/reflect/api/Scopes.scala +++ b/src/reflect/scala/reflect/api/Scopes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/StandardDefinitions.scala b/src/reflect/scala/reflect/api/StandardDefinitions.scala index 50954f5edadb..80756be1a70c 100644 --- a/src/reflect/scala/reflect/api/StandardDefinitions.scala +++ b/src/reflect/scala/reflect/api/StandardDefinitions.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/StandardLiftables.scala b/src/reflect/scala/reflect/api/StandardLiftables.scala index ebf15e4f5727..ee06b512f805 100644 --- a/src/reflect/scala/reflect/api/StandardLiftables.scala +++ b/src/reflect/scala/reflect/api/StandardLiftables.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package api diff --git a/src/reflect/scala/reflect/api/StandardNames.scala b/src/reflect/scala/reflect/api/StandardNames.scala index 38667ae15300..8c3a7507bc82 100644 --- a/src/reflect/scala/reflect/api/StandardNames.scala +++ b/src/reflect/scala/reflect/api/StandardNames.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler -* Copyright 2005-2013 LAMP/EPFL -* @author Martin Odersky -*/ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Symbols.scala b/src/reflect/scala/reflect/api/Symbols.scala index f2cea382763c..65be68470eab 100644 --- a/src/reflect/scala/reflect/api/Symbols.scala +++ b/src/reflect/scala/reflect/api/Symbols.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/TreeCreator.scala b/src/reflect/scala/reflect/api/TreeCreator.scala index 000eaa1aa614..056e1c8bcbcd 100644 --- a/src/reflect/scala/reflect/api/TreeCreator.scala +++ b/src/reflect/scala/reflect/api/TreeCreator.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Trees.scala b/src/reflect/scala/reflect/api/Trees.scala index a2d11cc60e27..0012646aa8fb 100644 --- a/src/reflect/scala/reflect/api/Trees.scala +++ b/src/reflect/scala/reflect/api/Trees.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/TypeCreator.scala b/src/reflect/scala/reflect/api/TypeCreator.scala index cbd55b942865..8718d6a285e5 100644 --- a/src/reflect/scala/reflect/api/TypeCreator.scala +++ b/src/reflect/scala/reflect/api/TypeCreator.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index cad318dbedcf..cdcd8b6926eb 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/api/Types.scala b/src/reflect/scala/reflect/api/Types.scala index 9e05a7f979fc..5f4fb72a0510 100644 --- a/src/reflect/scala/reflect/api/Types.scala +++ b/src/reflect/scala/reflect/api/Types.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/Universe.scala b/src/reflect/scala/reflect/api/Universe.scala index a3d1d291eb70..1ac9815cff9e 100644 --- a/src/reflect/scala/reflect/api/Universe.scala +++ b/src/reflect/scala/reflect/api/Universe.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package api diff --git a/src/reflect/scala/reflect/api/package.scala b/src/reflect/scala/reflect/api/package.scala index a8f409e123f1..9faa876411ea 100644 --- a/src/reflect/scala/reflect/api/package.scala +++ b/src/reflect/scala/reflect/api/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala index 9a6caff16064..3076b3be02ed 100644 --- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 411d6e01382f..db8c265a1cef 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index d165840aa385..74dc92927ca9 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala index ef9646b80fa2..aed2acd935f5 100644 --- a/src/reflect/scala/reflect/internal/CapturedVariables.scala +++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Chars.scala b/src/reflect/scala/reflect/internal/Chars.scala index daee8a49ee03..b1f9203daaa9 100644 --- a/src/reflect/scala/reflect/internal/Chars.scala +++ b/src/reflect/scala/reflect/internal/Chars.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/ClassfileConstants.scala b/src/reflect/scala/reflect/internal/ClassfileConstants.scala index a4223c1cb500..f9fe73f093ac 100644 --- a/src/reflect/scala/reflect/internal/ClassfileConstants.scala +++ b/src/reflect/scala/reflect/internal/ClassfileConstants.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Constants.scala b/src/reflect/scala/reflect/internal/Constants.scala index 89ee962d452d..5143c8102e50 100644 --- a/src/reflect/scala/reflect/internal/Constants.scala +++ b/src/reflect/scala/reflect/internal/Constants.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index bf490bb5e2cd..c071332ed2fb 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala index b9388db21221..b6e4a1ef64aa 100644 --- a/src/reflect/scala/reflect/internal/Depth.scala +++ b/src/reflect/scala/reflect/internal/Depth.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index f9b9c8bd9f50..776f4e31fa65 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/FatalError.scala b/src/reflect/scala/reflect/internal/FatalError.scala index 08a9a635afca..759acd116f37 100644 --- a/src/reflect/scala/reflect/internal/FatalError.scala +++ b/src/reflect/scala/reflect/internal/FatalError.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect.internal case class FatalError(msg: String) extends Exception(msg) diff --git a/src/reflect/scala/reflect/internal/FlagSets.scala b/src/reflect/scala/reflect/internal/FlagSets.scala index b6521634fbac..2593fbeb71b3 100644 --- a/src/reflect/scala/reflect/internal/FlagSets.scala +++ b/src/reflect/scala/reflect/internal/FlagSets.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Flags.scala b/src/reflect/scala/reflect/internal/Flags.scala index 77b733098d77..ee64912ac093 100644 --- a/src/reflect/scala/reflect/internal/Flags.scala +++ b/src/reflect/scala/reflect/internal/Flags.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/FreshNames.scala b/src/reflect/scala/reflect/internal/FreshNames.scala index 9c69153769b6..e59c7781b8d0 100644 --- a/src/reflect/scala/reflect/internal/FreshNames.scala +++ b/src/reflect/scala/reflect/internal/FreshNames.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/HasFlags.scala b/src/reflect/scala/reflect/internal/HasFlags.scala index dfada48c5e5d..b298a6954af1 100644 --- a/src/reflect/scala/reflect/internal/HasFlags.scala +++ b/src/reflect/scala/reflect/internal/HasFlags.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Importers.scala b/src/reflect/scala/reflect/internal/Importers.scala index c56f2b26b2d9..93c0093b6e8c 100644 --- a/src/reflect/scala/reflect/internal/Importers.scala +++ b/src/reflect/scala/reflect/internal/Importers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/InfoTransformers.scala b/src/reflect/scala/reflect/internal/InfoTransformers.scala index 3814259e22f9..8023f9f8fb32 100644 --- a/src/reflect/scala/reflect/internal/InfoTransformers.scala +++ b/src/reflect/scala/reflect/internal/InfoTransformers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Internals.scala b/src/reflect/scala/reflect/internal/Internals.scala index a07441e3ca21..31f97bb0df08 100644 --- a/src/reflect/scala/reflect/internal/Internals.scala +++ b/src/reflect/scala/reflect/internal/Internals.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/JDK9Reflectors.java b/src/reflect/scala/reflect/internal/JDK9Reflectors.java index 6112cbaf062c..e0deddf114a7 100644 --- a/src/reflect/scala/reflect/internal/JDK9Reflectors.java +++ b/src/reflect/scala/reflect/internal/JDK9Reflectors.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.internal; import java.io.IOException; diff --git a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala index b9cc16793394..687a6ed8cd81 100644 --- a/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala +++ b/src/reflect/scala/reflect/internal/JMethodOrConstructor.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/JavaAccFlags.scala b/src/reflect/scala/reflect/internal/JavaAccFlags.scala index 4cc57c9280e3..8b07833c213c 100644 --- a/src/reflect/scala/reflect/internal/JavaAccFlags.scala +++ b/src/reflect/scala/reflect/internal/JavaAccFlags.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2017 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Kinds.scala b/src/reflect/scala/reflect/internal/Kinds.scala index 0c8f22b0ca93..f8fb514936c9 100644 --- a/src/reflect/scala/reflect/internal/Kinds.scala +++ b/src/reflect/scala/reflect/internal/Kinds.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index 6b1063ccd9ba..befaa49175a1 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/MissingRequirementError.scala b/src/reflect/scala/reflect/internal/MissingRequirementError.scala index 66dbf535d7bb..b31cfc41eed7 100644 --- a/src/reflect/scala/reflect/internal/MissingRequirementError.scala +++ b/src/reflect/scala/reflect/internal/MissingRequirementError.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Mode.scala b/src/reflect/scala/reflect/internal/Mode.scala index 557ec9c16287..50e80d488429 100644 --- a/src/reflect/scala/reflect/internal/Mode.scala +++ b/src/reflect/scala/reflect/internal/Mode.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index eaffadb6b96c..fc6596a52c3f 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Phase.scala b/src/reflect/scala/reflect/internal/Phase.scala index aa3ce8387247..7fee98e30507 100644 --- a/src/reflect/scala/reflect/internal/Phase.scala +++ b/src/reflect/scala/reflect/internal/Phase.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Positions.scala b/src/reflect/scala/reflect/internal/Positions.scala index f7c488c7d365..341fafed83a7 100644 --- a/src/reflect/scala/reflect/internal/Positions.scala +++ b/src/reflect/scala/reflect/internal/Positions.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Precedence.scala b/src/reflect/scala/reflect/internal/Precedence.scala index 1430838b9de6..8395e4f885f4 100644 --- a/src/reflect/scala/reflect/internal/Precedence.scala +++ b/src/reflect/scala/reflect/internal/Precedence.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Printers.scala b/src/reflect/scala/reflect/internal/Printers.scala index 15773728fb15..b1b47d4bd4c2 100644 --- a/src/reflect/scala/reflect/internal/Printers.scala +++ b/src/reflect/scala/reflect/internal/Printers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ // todo. we need to unify this prettyprinter with NodePrinters diff --git a/src/reflect/scala/reflect/internal/PrivateWithin.scala b/src/reflect/scala/reflect/internal/PrivateWithin.scala index 8d81963d1322..841baa3b1109 100644 --- a/src/reflect/scala/reflect/internal/PrivateWithin.scala +++ b/src/reflect/scala/reflect/internal/PrivateWithin.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 28b01eb59906..0fe6038128d1 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Reporting.scala b/src/reflect/scala/reflect/internal/Reporting.scala index 56a627f41724..37a7025f5071 100644 --- a/src/reflect/scala/reflect/internal/Reporting.scala +++ b/src/reflect/scala/reflect/internal/Reporting.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL, Typesafe Inc. - * @author Adriaan Moors +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala index 009bc39d4ce2..a22a11eaf453 100644 --- a/src/reflect/scala/reflect/internal/Required.scala +++ b/src/reflect/scala/reflect/internal/Required.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 0c5bf0be4534..4500b0906929 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/StdAttachments.scala b/src/reflect/scala/reflect/internal/StdAttachments.scala index e704632b4991..8384b12e96d8 100644 --- a/src/reflect/scala/reflect/internal/StdAttachments.scala +++ b/src/reflect/scala/reflect/internal/StdAttachments.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/StdCreators.scala b/src/reflect/scala/reflect/internal/StdCreators.scala index a0084dc95cbf..24c50aa5f379 100644 --- a/src/reflect/scala/reflect/internal/StdCreators.scala +++ b/src/reflect/scala/reflect/internal/StdCreators.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index a4bad5789371..da4138fa45b7 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/SymbolPairs.scala b/src/reflect/scala/reflect/internal/SymbolPairs.scala index 35c0f621deb5..98c0056d3c84 100644 --- a/src/reflect/scala/reflect/internal/SymbolPairs.scala +++ b/src/reflect/scala/reflect/internal/SymbolPairs.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 93ff7dcf7d24..99fd5edd7ac7 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index d4f94a77cc6e..2a59046ab4c9 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1,4 +1,16 @@ - /* NSC -- new Scala compiler +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +/* NSC -- new Scala compiler * Copyright 2005-2013 LAMP/EPFL * @author Martin Odersky */ diff --git a/src/reflect/scala/reflect/internal/TreeGen.scala b/src/reflect/scala/reflect/internal/TreeGen.scala index 6a5d1ca4c4f5..def719082754 100644 --- a/src/reflect/scala/reflect/internal/TreeGen.scala +++ b/src/reflect/scala/reflect/internal/TreeGen.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/TreeInfo.scala b/src/reflect/scala/reflect/internal/TreeInfo.scala index 4e62da7650af..4e4bd2d80adf 100644 --- a/src/reflect/scala/reflect/internal/TreeInfo.scala +++ b/src/reflect/scala/reflect/internal/TreeInfo.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index f94e16a0afb7..ba4cb968fd94 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/TypeDebugging.scala b/src/reflect/scala/reflect/internal/TypeDebugging.scala index 58359e66d92a..56a4a52d16f1 100644 --- a/src/reflect/scala/reflect/internal/TypeDebugging.scala +++ b/src/reflect/scala/reflect/internal/TypeDebugging.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e7e4840b050c..0eb82db019a5 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala index ecc5d99a4085..fee270d6c50f 100644 --- a/src/reflect/scala/reflect/internal/Variance.scala +++ b/src/reflect/scala/reflect/internal/Variance.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 1c9c2ca5c6c5..bbdb8d28a98e 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/annotations/package.scala b/src/reflect/scala/reflect/internal/annotations/package.scala index 8a42f1479d96..46cdb8914554 100644 --- a/src/reflect/scala/reflect/internal/annotations/package.scala +++ b/src/reflect/scala/reflect/internal/annotations/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.internal package object annotations { diff --git a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala index f1227a4349e4..8932a31fd9cc 100644 --- a/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala +++ b/src/reflect/scala/reflect/internal/annotations/uncheckedBounds.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package internal package annotations diff --git a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala index 241638e88e31..fdc6a9b233c5 100644 --- a/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala +++ b/src/reflect/scala/reflect/internal/pickling/ByteCodecs.scala @@ -1,10 +1,15 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2007-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.internal.pickling diff --git a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala index a814256f8e84..3a633ab84bf8 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleBuffer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala index ce0ceec688dc..1d15546c97d9 100644 --- a/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala +++ b/src/reflect/scala/reflect/internal/pickling/PickleFormat.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/pickling/Translations.scala b/src/reflect/scala/reflect/internal/pickling/Translations.scala index d924cb3a0c7c..dcd4bb9298db 100644 --- a/src/reflect/scala/reflect/internal/pickling/Translations.scala +++ b/src/reflect/scala/reflect/internal/pickling/Translations.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 2710bbca34b3..7b82aa3e9f24 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala index 859f703d97b2..03c4dea76bc9 100644 --- a/src/reflect/scala/reflect/internal/settings/AbsSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/AbsSettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala index 5ef0de9022ba..068dd680c99e 100644 --- a/src/reflect/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/reflect/scala/reflect/internal/settings/MutableSettings.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + // $Id$ package scala diff --git a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala index f8799604075d..7c0d353e4479 100644 --- a/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala +++ b/src/reflect/scala/reflect/internal/tpe/CommonOwners.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index cbf87fc0c615..85be6f12f34a 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2014 LAMP/EPFL - * @author Jason Zaugg +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.reflect.internal package tpe diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 814e1640e0b6..f5c89217953c 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index 9a44a6d30041..c481ae38fa00 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index 2697824fd584..9fd742c2eb03 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index eeb7672950a9..3f4449a0bc50 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala index a062fc820910..454165a9eeda 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeToStrings.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/transform/Erasure.scala b/src/reflect/scala/reflect/internal/transform/Erasure.scala index aab6d72e7493..5a77d1be1d53 100644 --- a/src/reflect/scala/reflect/internal/transform/Erasure.scala +++ b/src/reflect/scala/reflect/internal/transform/Erasure.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/transform/PostErasure.scala b/src/reflect/scala/reflect/internal/transform/PostErasure.scala index dd4f0448182f..724c6d17180f 100644 --- a/src/reflect/scala/reflect/internal/transform/PostErasure.scala +++ b/src/reflect/scala/reflect/internal/transform/PostErasure.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package internal package transform diff --git a/src/reflect/scala/reflect/internal/transform/Transforms.scala b/src/reflect/scala/reflect/internal/transform/Transforms.scala index 28da7e84fdba..e611a232fcb1 100644 --- a/src/reflect/scala/reflect/internal/transform/Transforms.scala +++ b/src/reflect/scala/reflect/internal/transform/Transforms.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index aa0b4d4fc71c..b86e74e83aa0 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala index 49ab0cb30e9b..0375bde1639f 100644 --- a/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/AbstractFileClassLoader.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java index 6001c6fb73bd..415f91f9a8ff 100644 --- a/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java +++ b/src/reflect/scala/reflect/internal/util/AlmostFinalValue.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.internal.util; import java.lang.invoke.MethodHandle; diff --git a/src/reflect/scala/reflect/internal/util/BooleanContainer.java b/src/reflect/scala/reflect/internal/util/BooleanContainer.java index 394c25055408..dd1d9cfd826e 100644 --- a/src/reflect/scala/reflect/internal/util/BooleanContainer.java +++ b/src/reflect/scala/reflect/internal/util/BooleanContainer.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.internal.util; /** diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 970a5d300f8f..1ae031595b9a 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala index 83fbf251bad5..bcf13f181d78 100644 --- a/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala +++ b/src/reflect/scala/reflect/internal/util/FreshNameCreator.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.reflect.internal diff --git a/src/reflect/scala/reflect/internal/util/HashSet.scala b/src/reflect/scala/reflect/internal/util/HashSet.scala index b4178e055d97..42c7f9256aed 100644 --- a/src/reflect/scala/reflect/internal/util/HashSet.scala +++ b/src/reflect/scala/reflect/internal/util/HashSet.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/JavaClearable.scala b/src/reflect/scala/reflect/internal/util/JavaClearable.scala index 10de913c8f24..2b287ea927bd 100644 --- a/src/reflect/scala/reflect/internal/util/JavaClearable.scala +++ b/src/reflect/scala/reflect/internal/util/JavaClearable.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.internal.util import java.lang.ref.WeakReference diff --git a/src/reflect/scala/reflect/internal/util/Origins.scala b/src/reflect/scala/reflect/internal/util/Origins.scala index 4c425457a784..5f3e49e30180 100644 --- a/src/reflect/scala/reflect/internal/util/Origins.scala +++ b/src/reflect/scala/reflect/internal/util/Origins.scala @@ -1,6 +1,13 @@ -/* NSC -- new scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala index ece34966a44b..27891f58124e 100644 --- a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2017 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.reflect.internal.util import java.nio.ByteBuffer diff --git a/src/reflect/scala/reflect/internal/util/Position.scala b/src/reflect/scala/reflect/internal/util/Position.scala index 05577cba9b38..168b3ae49d9b 100644 --- a/src/reflect/scala/reflect/internal/util/Position.scala +++ b/src/reflect/scala/reflect/internal/util/Position.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala index c18a54e014aa..9913f158f716 100644 --- a/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala +++ b/src/reflect/scala/reflect/internal/util/ScalaClassLoader.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/Set.scala b/src/reflect/scala/reflect/internal/util/Set.scala index 635bfb05e404..4728f7ddc339 100644 --- a/src/reflect/scala/reflect/internal/util/Set.scala +++ b/src/reflect/scala/reflect/internal/util/Set.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package reflect.internal.util diff --git a/src/reflect/scala/reflect/internal/util/SourceFile.scala b/src/reflect/scala/reflect/internal/util/SourceFile.scala index 18deb7d139ab..f16fe96247ac 100644 --- a/src/reflect/scala/reflect/internal/util/SourceFile.scala +++ b/src/reflect/scala/reflect/internal/util/SourceFile.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2018 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/internal/util/Statistics.scala b/src/reflect/scala/reflect/internal/util/Statistics.scala index e4a3f6f64ffb..f3dc3cc57cae 100644 --- a/src/reflect/scala/reflect/internal/util/Statistics.scala +++ b/src/reflect/scala/reflect/internal/util/Statistics.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.internal.util diff --git a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java index 77b1a5a0deaa..dc9021471d87 100644 --- a/src/reflect/scala/reflect/internal/util/StatisticsStatics.java +++ b/src/reflect/scala/reflect/internal/util/StatisticsStatics.java @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.internal.util; import scala.reflect.internal.util.AlmostFinalValue; diff --git a/src/reflect/scala/reflect/internal/util/StringOps.scala b/src/reflect/scala/reflect/internal/util/StringOps.scala index 2fee6b0f82e9..c07e59804b9f 100644 --- a/src/reflect/scala/reflect/internal/util/StringOps.scala +++ b/src/reflect/scala/reflect/internal/util/StringOps.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala index 35858cdc780d..c07e8c002c57 100644 --- a/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala +++ b/src/reflect/scala/reflect/internal/util/StripMarginInterpolator.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/util/TableDef.scala b/src/reflect/scala/reflect/internal/util/TableDef.scala index 8708442c853f..b6e98c6eedee 100644 --- a/src/reflect/scala/reflect/internal/util/TableDef.scala +++ b/src/reflect/scala/reflect/internal/util/TableDef.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.internal.util diff --git a/src/reflect/scala/reflect/internal/util/ThreeValues.scala b/src/reflect/scala/reflect/internal/util/ThreeValues.scala index 18410510cb74..bbc75dc1e0a4 100644 --- a/src/reflect/scala/reflect/internal/util/ThreeValues.scala +++ b/src/reflect/scala/reflect/internal/util/ThreeValues.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.internal.util diff --git a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala index e48c35908f68..1e6236b49b38 100644 --- a/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala +++ b/src/reflect/scala/reflect/internal/util/TraceSymbolActivity.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.internal package util diff --git a/src/reflect/scala/reflect/internal/util/TriState.scala b/src/reflect/scala/reflect/internal/util/TriState.scala index 4074d974d2f0..384fabe6a1ca 100644 --- a/src/reflect/scala/reflect/internal/util/TriState.scala +++ b/src/reflect/scala/reflect/internal/util/TriState.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala index 422a43a365d5..f45c8dcf2a97 100644 --- a/src/reflect/scala/reflect/internal/util/WeakHashSet.scala +++ b/src/reflect/scala/reflect/internal/util/WeakHashSet.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.internal.util diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala index 9b5fd3798d70..7dd8899e004a 100644 --- a/src/reflect/scala/reflect/internal/util/package.scala +++ b/src/reflect/scala/reflect/internal/util/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package internal diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 4b627a836119..714f4f4b5274 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -1,9 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ - package scala package reflect package io diff --git a/src/reflect/scala/reflect/io/Directory.scala b/src/reflect/scala/reflect/io/Directory.scala index 2b965e6d6951..24415a3cdacb 100644 --- a/src/reflect/scala/reflect/io/Directory.scala +++ b/src/reflect/scala/reflect/io/Directory.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/reflect/scala/reflect/io/File.scala b/src/reflect/scala/reflect/io/File.scala index 206861adb37c..a091b3c45dc5 100644 --- a/src/reflect/scala/reflect/io/File.scala +++ b/src/reflect/scala/reflect/io/File.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/reflect/scala/reflect/io/FileOperationException.scala b/src/reflect/scala/reflect/io/FileOperationException.scala index fdfe0234e097..49430c6428cb 100644 --- a/src/reflect/scala/reflect/io/FileOperationException.scala +++ b/src/reflect/scala/reflect/io/FileOperationException.scala @@ -1,11 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2003-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package reflect diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala index 0e4b9690cabd..fd3b6f6f7917 100644 --- a/src/reflect/scala/reflect/io/IOStats.scala +++ b/src/reflect/scala/reflect/io/IOStats.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect.io diff --git a/src/reflect/scala/reflect/io/NoAbstractFile.scala b/src/reflect/scala/reflect/io/NoAbstractFile.scala index 18eca7698d27..3183a1d53e39 100644 --- a/src/reflect/scala/reflect/io/NoAbstractFile.scala +++ b/src/reflect/scala/reflect/io/NoAbstractFile.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/io/Path.scala b/src/reflect/scala/reflect/io/Path.scala index 14c1ebb2b5db..ea4f4d4a8531 100644 --- a/src/reflect/scala/reflect/io/Path.scala +++ b/src/reflect/scala/reflect/io/Path.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 1b8b72fc78d6..75ba6e852023 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/io/Streamable.scala b/src/reflect/scala/reflect/io/Streamable.scala index bc4031ca9b8d..beda92614dab 100644 --- a/src/reflect/scala/reflect/io/Streamable.scala +++ b/src/reflect/scala/reflect/io/Streamable.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/io/VirtualDirectory.scala b/src/reflect/scala/reflect/io/VirtualDirectory.scala index aa6ceaa09fab..7cc558b6647f 100644 --- a/src/reflect/scala/reflect/io/VirtualDirectory.scala +++ b/src/reflect/scala/reflect/io/VirtualDirectory.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/io/VirtualFile.scala b/src/reflect/scala/reflect/io/VirtualFile.scala index 1cb4f2fe6f5a..41652f1ae9b6 100644 --- a/src/reflect/scala/reflect/io/VirtualFile.scala +++ b/src/reflect/scala/reflect/io/VirtualFile.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index a7f74724491b..ee109799f3d5 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/macros/Aliases.scala b/src/reflect/scala/reflect/macros/Aliases.scala index 64819a860124..b03a7067e1ca 100644 --- a/src/reflect/scala/reflect/macros/Aliases.scala +++ b/src/reflect/scala/reflect/macros/Aliases.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala index 09ba1c16bf7a..15dc568b8eef 100644 --- a/src/reflect/scala/reflect/macros/Attachments.scala +++ b/src/reflect/scala/reflect/macros/Attachments.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Enclosures.scala b/src/reflect/scala/reflect/macros/Enclosures.scala index 798fed2a1536..362600b665d5 100644 --- a/src/reflect/scala/reflect/macros/Enclosures.scala +++ b/src/reflect/scala/reflect/macros/Enclosures.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Evals.scala b/src/reflect/scala/reflect/macros/Evals.scala index 9b6223a44015..42350b075d69 100644 --- a/src/reflect/scala/reflect/macros/Evals.scala +++ b/src/reflect/scala/reflect/macros/Evals.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/ExprUtils.scala b/src/reflect/scala/reflect/macros/ExprUtils.scala index 3e2655b722c9..6cd146627867 100644 --- a/src/reflect/scala/reflect/macros/ExprUtils.scala +++ b/src/reflect/scala/reflect/macros/ExprUtils.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/FrontEnds.scala b/src/reflect/scala/reflect/macros/FrontEnds.scala index 8ad41382a805..ab59cf1dca43 100644 --- a/src/reflect/scala/reflect/macros/FrontEnds.scala +++ b/src/reflect/scala/reflect/macros/FrontEnds.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Infrastructure.scala b/src/reflect/scala/reflect/macros/Infrastructure.scala index 0f2d9ce4cf84..d61e26040a04 100644 --- a/src/reflect/scala/reflect/macros/Infrastructure.scala +++ b/src/reflect/scala/reflect/macros/Infrastructure.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Internals.scala b/src/reflect/scala/reflect/macros/Internals.scala index 75164344daa6..fae9d3b5ddb9 100644 --- a/src/reflect/scala/reflect/macros/Internals.scala +++ b/src/reflect/scala/reflect/macros/Internals.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Names.scala b/src/reflect/scala/reflect/macros/Names.scala index 0be3b129e6eb..79a3d90c44a3 100644 --- a/src/reflect/scala/reflect/macros/Names.scala +++ b/src/reflect/scala/reflect/macros/Names.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Parsers.scala b/src/reflect/scala/reflect/macros/Parsers.scala index 5fc0fd5078da..5a5a10e4e2fd 100644 --- a/src/reflect/scala/reflect/macros/Parsers.scala +++ b/src/reflect/scala/reflect/macros/Parsers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Reifiers.scala b/src/reflect/scala/reflect/macros/Reifiers.scala index e35a5c862238..c2cf2e3bdcd2 100644 --- a/src/reflect/scala/reflect/macros/Reifiers.scala +++ b/src/reflect/scala/reflect/macros/Reifiers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Typers.scala b/src/reflect/scala/reflect/macros/Typers.scala index 37a075dc9c4a..4e22608f597d 100644 --- a/src/reflect/scala/reflect/macros/Typers.scala +++ b/src/reflect/scala/reflect/macros/Typers.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/Universe.scala b/src/reflect/scala/reflect/macros/Universe.scala index 51a7566bb811..0757f3e8de94 100644 --- a/src/reflect/scala/reflect/macros/Universe.scala +++ b/src/reflect/scala/reflect/macros/Universe.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/blackbox/Context.scala b/src/reflect/scala/reflect/macros/blackbox/Context.scala index 205e3ad1c37f..3a5d10cd9bde 100644 --- a/src/reflect/scala/reflect/macros/blackbox/Context.scala +++ b/src/reflect/scala/reflect/macros/blackbox/Context.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/macros/package.scala b/src/reflect/scala/reflect/macros/package.scala index 3bb1bdf7e31c..7118643dd641 100644 --- a/src/reflect/scala/reflect/macros/package.scala +++ b/src/reflect/scala/reflect/macros/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect diff --git a/src/reflect/scala/reflect/macros/whitebox/Context.scala b/src/reflect/scala/reflect/macros/whitebox/Context.scala index 272991cba969..690e450c7675 100644 --- a/src/reflect/scala/reflect/macros/whitebox/Context.scala +++ b/src/reflect/scala/reflect/macros/whitebox/Context.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package macros diff --git a/src/reflect/scala/reflect/runtime/Gil.scala b/src/reflect/scala/reflect/runtime/Gil.scala index 0edb1e574826..3443fbe722be 100644 --- a/src/reflect/scala/reflect/runtime/Gil.scala +++ b/src/reflect/scala/reflect/runtime/Gil.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 436d652a62c6..0160578c0119 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/JavaUniverse.scala b/src/reflect/scala/reflect/runtime/JavaUniverse.scala index 81c662d2da89..3c8187a74fbc 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverse.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverse.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/ReflectSetup.scala b/src/reflect/scala/reflect/runtime/ReflectSetup.scala index 6a364ff0be82..3cabaa70f57a 100644 --- a/src/reflect/scala/reflect/runtime/ReflectSetup.scala +++ b/src/reflect/scala/reflect/runtime/ReflectSetup.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala index dd15a09b7e36..3ecfd00a65c8 100644 --- a/src/reflect/scala/reflect/runtime/ReflectionUtils.scala +++ b/src/reflect/scala/reflect/runtime/ReflectionUtils.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/reflect/scala/reflect/runtime/Settings.scala b/src/reflect/scala/reflect/runtime/Settings.scala index 6b129f6ec51a..85f70d88ee8d 100644 --- a/src/reflect/scala/reflect/runtime/Settings.scala +++ b/src/reflect/scala/reflect/runtime/Settings.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala index e48517699664..cbef3d3a0b54 100644 --- a/src/reflect/scala/reflect/runtime/SymbolLoaders.scala +++ b/src/reflect/scala/reflect/runtime/SymbolLoaders.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/SymbolTable.scala b/src/reflect/scala/reflect/runtime/SymbolTable.scala index eee21188982e..0d8a0bfd1a9b 100644 --- a/src/reflect/scala/reflect/runtime/SymbolTable.scala +++ b/src/reflect/scala/reflect/runtime/SymbolTable.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala index 1d298f4be9a1..3ce1330008f5 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedOps.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedOps.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala index aa9aab93d52d..93ee405e04bc 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedSymbols.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala index 1d02cc7e892b..8e33480d2876 100644 --- a/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala +++ b/src/reflect/scala/reflect/runtime/SynchronizedTypes.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala index 586b8a525732..ca99bb48909b 100644 --- a/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala +++ b/src/reflect/scala/reflect/runtime/ThreadLocalStorage.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/TwoWayCache.scala b/src/reflect/scala/reflect/runtime/TwoWayCache.scala index 6c1ca5b571cd..11f617cb9e5d 100644 --- a/src/reflect/scala/reflect/runtime/TwoWayCache.scala +++ b/src/reflect/scala/reflect/runtime/TwoWayCache.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala index 6ce0c0a728ea..9e1e013d39e7 100644 --- a/src/reflect/scala/reflect/runtime/TwoWayCaches.scala +++ b/src/reflect/scala/reflect/runtime/TwoWayCaches.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect package runtime diff --git a/src/reflect/scala/reflect/runtime/package.scala b/src/reflect/scala/reflect/runtime/package.scala index 77eb610a84e7..b4c8149d9d11 100644 --- a/src/reflect/scala/reflect/runtime/package.scala +++ b/src/reflect/scala/reflect/runtime/package.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala package reflect diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala index 3dc6f01c0a69..e7ad1bf9693d 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/FileBackedHistory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2015 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.interpreter.jline diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala index 89e849429de4..27f68bc111fa 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineDelimiter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.interpreter.jline diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala index 3bc259252abf..ac8dc2e2e906 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineHistory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.interpreter.jline diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala index dc04230d0b29..68c21c69e258 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala @@ -1,9 +1,14 @@ -/** NSC -- new Scala compiler - * - * Copyright 2005-2015 LAMP/EPFL - * @author Stepan Koltsov - * @author Adriaan Moors - */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.nsc.interpreter.jline diff --git a/src/repl/scala/tools/nsc/Interpreter.scala b/src/repl/scala/tools/nsc/Interpreter.scala index 434f19f21b04..2f7e724eb31f 100644 --- a/src/repl/scala/tools/nsc/Interpreter.scala +++ b/src/repl/scala/tools/nsc/Interpreter.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc import interpreter._ diff --git a/src/repl/scala/tools/nsc/InterpreterLoop.scala b/src/repl/scala/tools/nsc/InterpreterLoop.scala index 1dcc36174eb1..80a777339443 100644 --- a/src/repl/scala/tools/nsc/InterpreterLoop.scala +++ b/src/repl/scala/tools/nsc/InterpreterLoop.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc import interpreter._ diff --git a/src/repl/scala/tools/nsc/MainGenericRunner.scala b/src/repl/scala/tools/nsc/MainGenericRunner.scala index 44a9fc728194..6013d41f194f 100644 --- a/src/repl/scala/tools/nsc/MainGenericRunner.scala +++ b/src/repl/scala/tools/nsc/MainGenericRunner.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2006-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index 712219533d9b..e154335e7ff9 100644 --- a/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/repl/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala index df49e6a2e471..3f4b51d7d19b 100644 --- a/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala +++ b/src/repl/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/CommandLine.scala b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala index 0ab92ab769f0..32aa8ae9275e 100644 --- a/src/repl/scala/tools/nsc/interpreter/CommandLine.scala +++ b/src/repl/scala/tools/nsc/interpreter/CommandLine.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Lex Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Completion.scala b/src/repl/scala/tools/nsc/interpreter/Completion.scala index fa937d3067d7..aef8079b7ace 100644 --- a/src/repl/scala/tools/nsc/interpreter/Completion.scala +++ b/src/repl/scala/tools/nsc/interpreter/Completion.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala index 335ffe630d95..ad47c7c2a7ed 100644 --- a/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala +++ b/src/repl/scala/tools/nsc/interpreter/ExprTyper.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/IBindings.java b/src/repl/scala/tools/nsc/interpreter/IBindings.java index b4cee4b9571f..abe0267375c8 100644 --- a/src/repl/scala/tools/nsc/interpreter/IBindings.java +++ b/src/repl/scala/tools/nsc/interpreter/IBindings.java @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Raphael Jolly +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.interpreter; diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index 804915dd7a91..a32e2aa02ee7 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2016 LAMP/EPFL - * @author Alexander Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package tools.nsc package interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index fbc6e137d0c1..764bb4d48543 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2016 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/repl/scala/tools/nsc/interpreter/ISettings.scala b/src/repl/scala/tools/nsc/interpreter/ISettings.scala index 9541d08db15c..aa2a79bc1fd5 100644 --- a/src/repl/scala/tools/nsc/interpreter/ISettings.scala +++ b/src/repl/scala/tools/nsc/interpreter/ISettings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Alexander Spoon +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Imports.scala b/src/repl/scala/tools/nsc/interpreter/Imports.scala index 0cda9c4da3cb..38a391f769b7 100644 --- a/src/repl/scala/tools/nsc/interpreter/Imports.scala +++ b/src/repl/scala/tools/nsc/interpreter/Imports.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala index 7af491b390dc..e3f8ae991dbc 100644 --- a/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala +++ b/src/repl/scala/tools/nsc/interpreter/InteractiveReader.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Stepan Koltsov +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala index 034437fe5c24..c91263ea2538 100644 --- a/src/repl/scala/tools/nsc/interpreter/JavapClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/JavapClass.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package tools.nsc package interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/Logger.scala b/src/repl/scala/tools/nsc/interpreter/Logger.scala index 7407daf8d06a..13be296729f6 100644 --- a/src/repl/scala/tools/nsc/interpreter/Logger.scala +++ b/src/repl/scala/tools/nsc/interpreter/Logger.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala index fb2a1d54fafe..04ee11fbad72 100644 --- a/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala +++ b/src/repl/scala/tools/nsc/interpreter/LoopCommands.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala index f455e71476e1..058bfc756d07 100644 --- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/NamedParam.scala b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala index d59b07a45252..984288aa8389 100644 --- a/src/repl/scala/tools/nsc/interpreter/NamedParam.scala +++ b/src/repl/scala/tools/nsc/interpreter/NamedParam.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Naming.scala b/src/repl/scala/tools/nsc/interpreter/Naming.scala index e09c6f315e78..fc736fd880b3 100644 --- a/src/repl/scala/tools/nsc/interpreter/Naming.scala +++ b/src/repl/scala/tools/nsc/interpreter/Naming.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/repl/scala/tools/nsc/interpreter/Parsed.scala b/src/repl/scala/tools/nsc/interpreter/Parsed.scala index 5e58d3a2c4b4..eb32618c34dd 100644 --- a/src/repl/scala/tools/nsc/interpreter/Parsed.scala +++ b/src/repl/scala/tools/nsc/interpreter/Parsed.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Pasted.scala b/src/repl/scala/tools/nsc/interpreter/Pasted.scala index 7ab5e5bb4207..0e042078b6b4 100644 --- a/src/repl/scala/tools/nsc/interpreter/Pasted.scala +++ b/src/repl/scala/tools/nsc/interpreter/Pasted.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Phased.scala b/src/repl/scala/tools/nsc/interpreter/Phased.scala index da77be7a7924..3364a3ffd5a0 100644 --- a/src/repl/scala/tools/nsc/interpreter/Phased.scala +++ b/src/repl/scala/tools/nsc/interpreter/Phased.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Power.scala b/src/repl/scala/tools/nsc/interpreter/Power.scala index 7a244056706c..b022ab54c8df 100644 --- a/src/repl/scala/tools/nsc/interpreter/Power.scala +++ b/src/repl/scala/tools/nsc/interpreter/Power.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index d675563bc9ce..4c7f05318c53 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2015 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.interpreter import scala.reflect.internal.util.RangePosition diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala index 0ae867637421..e941192a9086 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.interpreter import scala.reflect.internal.util.StringOps diff --git a/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala index 046d6ecbfb8f..45715fd338cd 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplConfig.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ReplDir.scala b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala index 5d386b47b7ce..57a3297594ec 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplDir.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplDir.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 0bb9eb6a0b01..1273d6ac92fb 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala index 6aed4a04043d..529e15b02ddd 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplProps.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplProps.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala index 963ab83c8400..448cbb942f18 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplReporter.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2002-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala index 87ca05600c1f..4e5c2dd24969 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplStrings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala index 9346b0553f46..6e8f3b902a47 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplVals.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplVals.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Results.scala b/src/repl/scala/tools/nsc/interpreter/Results.scala index a4e1e25cbb3e..a9d9dd0b1b7c 100644 --- a/src/repl/scala/tools/nsc/interpreter/Results.scala +++ b/src/repl/scala/tools/nsc/interpreter/Results.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/RichClass.scala b/src/repl/scala/tools/nsc/interpreter/RichClass.scala index df900d143684..441b20af9cc2 100644 --- a/src/repl/scala/tools/nsc/interpreter/RichClass.scala +++ b/src/repl/scala/tools/nsc/interpreter/RichClass.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/Scripted.scala b/src/repl/scala/tools/nsc/interpreter/Scripted.scala index 8d87d98e5343..c3ba908d5a37 100644 --- a/src/repl/scala/tools/nsc/interpreter/Scripted.scala +++ b/src/repl/scala/tools/nsc/interpreter/Scripted.scala @@ -1,6 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2016 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package tools.nsc package interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala index 6c37d93e783d..5716944b2bbb 100644 --- a/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala +++ b/src/repl/scala/tools/nsc/interpreter/SimpleReader.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2017 LAMP/EPFL - * @author Stepan Koltsov +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala index ebbb397a0cba..f3d9fa56b8c5 100644 --- a/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala +++ b/src/repl/scala/tools/nsc/interpreter/StdReplTags.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/Tabulators.scala b/src/repl/scala/tools/nsc/interpreter/Tabulators.scala index 75bec168ebcd..f9694f5af262 100644 --- a/src/repl/scala/tools/nsc/interpreter/Tabulators.scala +++ b/src/repl/scala/tools/nsc/interpreter/Tabulators.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/package.scala b/src/repl/scala/tools/nsc/interpreter/package.scala index f03872fa2289..3e63d850b7e7 100644 --- a/src/repl/scala/tools/nsc/interpreter/package.scala +++ b/src/repl/scala/tools/nsc/interpreter/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/session/History.scala b/src/repl/scala/tools/nsc/interpreter/session/History.scala index 2028a13dfd0a..d05a08b86318 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/History.scala +++ b/src/repl/scala/tools/nsc/interpreter/session/History.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala index 92bf9d1df4f1..3fa1d88251ec 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala +++ b/src/repl/scala/tools/nsc/interpreter/session/SimpleHistory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/repl/scala/tools/nsc/interpreter/session/package.scala b/src/repl/scala/tools/nsc/interpreter/session/package.scala index 06e7f6207b58..52677c1d2c9d 100644 --- a/src/repl/scala/tools/nsc/interpreter/session/package.scala +++ b/src/repl/scala/tools/nsc/interpreter/session/package.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/ant/Scaladoc.scala b/src/scaladoc/scala/tools/ant/Scaladoc.scala index a285b287e74e..238da7d7218d 100644 --- a/src/scaladoc/scala/tools/ant/Scaladoc.scala +++ b/src/scaladoc/scala/tools/ant/Scaladoc.scala @@ -1,10 +1,14 @@ -/* __ *\ -** ________ ___ / / ___ Scala Ant Tasks ** -** / __/ __// _ | / / / _ | (c) 2005-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.ant diff --git a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala index e266f7beea03..9472d0be9cca 100644 --- a/src/scaladoc/scala/tools/nsc/ScalaDoc.scala +++ b/src/scaladoc/scala/tools/nsc/ScalaDoc.scala @@ -1,7 +1,13 @@ -/* scaladoc, a documentation generator for Scala - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky - * @author Geoffrey Washburn +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala index 8c646be9c676..d6e40f455617 100644 --- a/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/DocFactory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala index f03b848af618..020c978f42d0 100644 --- a/src/scaladoc/scala/tools/nsc/doc/DocParser.scala +++ b/src/scaladoc/scala/tools/nsc/doc/DocParser.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools diff --git a/src/scaladoc/scala/tools/nsc/doc/Index.scala b/src/scaladoc/scala/tools/nsc/doc/Index.scala index a11ca38a866a..90340c44b1f7 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Index.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Index.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.doc diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index 1524019b7b82..35dcbb7af93d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala index 6362227c110a..8600eaf27fa2 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocGlobal.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 8b83a5071ec5..5b815fa12404 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala index d03e54b9cb62..a73b5b3eac17 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Uncompilable.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Paul Phillips +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/Universe.scala b/src/scaladoc/scala/tools/nsc/doc/Universe.scala index edf5112d7b0a..7e7b674c66ff 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Universe.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Universe.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2013 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.doc diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index 3239735772c2..22ccccdd4775 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2018 LAMP/EPFL - * @author Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala index 98282d14a7a1..7703c4711d0f 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/LinkTo.scala @@ -1,5 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala index d7a370927c0d..e6593911a667 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/MemberLookupBase.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package doc package base diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala index d60aa1be43ca..ed5c51c6a7b9 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Body.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2018 LAMP/EPFL - * @author Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala index 07a50516790f..745fe29b11c6 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/comment/Comment.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala index b4ede6d358fe..aa5ac5843a4a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Generator.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.doc package doclet diff --git a/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala index ee8b7809e5b0..7000be250fd9 100644 --- a/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/doclet/Universer.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package doc package doclet diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala index 73a854e99503..2e1d196a0294 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/Doclet.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala index 56c55ad2aab7..71c4123b9f80 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index aafd95ba1ba6..ef5e0cc27b48 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala index f5bcf2494124..fd66211e6a17 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/Page.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/Page.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala index 640fda560ef6..f2c8bf967705 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/SyntaxHigh.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2010-2013 LAMP/EPFL - * @author Stephane Micheloud +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 527e6edb4323..767a79a97ae2 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2016 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda, Felix Mulder +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala index 28304e76c7a0..7ca2cd2be762 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/IndexScript.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2016 LAMP/EPFL - * @author David Bernard, Manohar Jonnalagedda, Felix Mulder +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc.doc diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala index 5f6cb7e7995d..786e0628f848 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/JSON.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.doc.html.page import JSONFormat.format diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala index 829bba3f3285..e9a1fbc81ad2 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramGenerator.scala @@ -1,7 +1,15 @@ -/** - * @author Damien Obrist - * @author Vlad Ureche +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc package doc package html diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala index 12c609af496a..de0bb6871a28 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DiagramStats.scala @@ -1,6 +1,15 @@ -/** - * @author Vlad Ureche +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc.doc package html.page.diagram diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala index 686332e9cbe3..de015d0f5bad 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotDiagramGenerator.scala @@ -1,7 +1,15 @@ -/** - * @author Damien Obrist - * @author Vlad Ureche +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala package tools package nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala index 9287bfbc2b0a..735a54e5b4aa 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/diagram/DotRunner.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package doc package html diff --git a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala index 66ce2137f29e..9e7b69c9773c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/CommentFactory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala index 33704be43f82..c7f5bfb990c3 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Entity.scala @@ -1,7 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda - * @author Gilles Dubochet +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala index ad53dc6bfaa0..c648008b9977 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/IndexModelFactory.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Pedro Furlanetto +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala diff --git a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala index 719d2a86db23..fc1b7ac46f29 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/MemberLookup.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package doc package model diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 918093f302e6..03376d8e9b73 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -1,4 +1,14 @@ -/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala index 4a282644b07a..f2c3c7fb8eaa 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryImplicitSupport.scala @@ -1,9 +1,13 @@ -/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL +/* + * Scala (https://www.scala-lang.org) * - * This trait finds implicit conversions for a class in the default scope and creates scaladoc entries for each of them. + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). * - * @author Vlad Ureche - * @author Adriaan Moors + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index ecc5330f016a..805604bfd58f 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -1,4 +1,14 @@ -/* NSC -- new Scala compiler -- Copyright 2007-2013 LAMP/EPFL */ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.nsc package doc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala index 5b4ec4a40be4..6fc2efe68513 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeEntity.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Chris James +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala index 05843751f622..82d694780584 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TreeFactory.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc package doc package model diff --git a/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala index cf5c1fb3fb00..4973426174b3 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/TypeEntity.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Manohar Jonnalagedda +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala index f712869a4b77..8f5f090fc40c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ValueArgument.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Gilles Dubochet +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala index 22580805aa01..12032d3f060a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/Visibility.scala @@ -1,6 +1,13 @@ -/* NSC -- new Scala compiler - * Copyright 2007-2013 LAMP/EPFL - * @author Gilles Dubochet +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ package scala.tools.nsc diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala index fa41bb205022..6116d945700d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/Diagram.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.doc package model package diagram diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala index 464cacc99a8f..7b00acf13453 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramDirectiveParser.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.doc package model package diagram diff --git a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala index f1545a4f335e..4c7d028af0da 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/diagram/DiagramFactory.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc.doc package model package diagram diff --git a/src/scalap/scala/tools/scalap/Arguments.scala b/src/scalap/scala/tools/scalap/Arguments.scala index de9c30b8af07..74759f0c81dd 100644 --- a/src/scalap/scala/tools/scalap/Arguments.scala +++ b/src/scalap/scala/tools/scalap/Arguments.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap diff --git a/src/scalap/scala/tools/scalap/ByteArrayReader.scala b/src/scalap/scala/tools/scalap/ByteArrayReader.scala index cf160871ddf3..d913c9072e59 100644 --- a/src/scalap/scala/tools/scalap/ByteArrayReader.scala +++ b/src/scalap/scala/tools/scalap/ByteArrayReader.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.scalap diff --git a/src/scalap/scala/tools/scalap/Classfile.scala b/src/scalap/scala/tools/scalap/Classfile.scala index 9549097ca633..3a2b5f5ba190 100644 --- a/src/scalap/scala/tools/scalap/Classfile.scala +++ b/src/scalap/scala/tools/scalap/Classfile.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala index c228b747c8cc..df3403b46079 100644 --- a/src/scalap/scala/tools/scalap/Classfiles.scala +++ b/src/scalap/scala/tools/scalap/Classfiles.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap diff --git a/src/scalap/scala/tools/scalap/CodeWriter.scala b/src/scalap/scala/tools/scalap/CodeWriter.scala index 07aba0b63256..78e8737331e4 100644 --- a/src/scalap/scala/tools/scalap/CodeWriter.scala +++ b/src/scalap/scala/tools/scalap/CodeWriter.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.scalap diff --git a/src/scalap/scala/tools/scalap/Decode.scala b/src/scalap/scala/tools/scalap/Decode.scala index 8e63c7f47ff7..acef4413ae82 100644 --- a/src/scalap/scala/tools/scalap/Decode.scala +++ b/src/scalap/scala/tools/scalap/Decode.scala @@ -1,9 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ // $Id$ diff --git a/src/scalap/scala/tools/scalap/JavaWriter.scala b/src/scalap/scala/tools/scalap/JavaWriter.scala index 75e2637d567b..21f1f93bc769 100644 --- a/src/scalap/scala/tools/scalap/JavaWriter.scala +++ b/src/scalap/scala/tools/scalap/JavaWriter.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala index bf18e0bb5438..42b0fdfb236a 100644 --- a/src/scalap/scala/tools/scalap/Main.scala +++ b/src/scalap/scala/tools/scalap/Main.scala @@ -1,9 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.scalap diff --git a/src/scalap/scala/tools/scalap/MetaParser.scala b/src/scalap/scala/tools/scalap/MetaParser.scala index b9dab0ad01ed..12b3f85a8822 100644 --- a/src/scalap/scala/tools/scalap/MetaParser.scala +++ b/src/scalap/scala/tools/scalap/MetaParser.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala package tools.scalap diff --git a/src/scalap/scala/tools/scalap/Properties.scala b/src/scalap/scala/tools/scalap/Properties.scala index 8f9a9d86064b..5058d9d5932b 100644 --- a/src/scalap/scala/tools/scalap/Properties.scala +++ b/src/scalap/scala/tools/scalap/Properties.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala index eed76c377454..bf4d81a05231 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ClassFileParser.scala @@ -1,3 +1,15 @@ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.scalap package scalax package rules diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala index 050317cb8209..e7b7c78a901f 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Flags.scala @@ -1,3 +1,15 @@ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.scalap package scalax package rules diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala index 2dd9123fff72..b268bd99c9de 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap package scalax diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index a7bf10673967..e3164f75c0d3 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -1,10 +1,14 @@ -/* ___ ____ ___ __ ___ ___ -** / _// __// _ | / / / _ | / _ \ Scala classfile decoder -** __\ \/ /__/ __ |/ /__/ __ |/ ___/ (c) 2003-2013, LAMP/EPFL -** /____/\___/_/ |_/____/_/ |_/_/ http://scala-lang.org/ -** -*/ - +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ package scala.tools.scalap package scalax diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala index 0595234addad..8b5616b36923 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/SourceFileAttributeParser.scala @@ -1,3 +1,15 @@ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.scalap package scalax package rules diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala index 6c38687649bd..b72e73acbe64 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala @@ -1,3 +1,15 @@ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.scalap package scalax package rules diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala index 22d90325cecc..85bf97543c8e 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Type.scala @@ -1,3 +1,15 @@ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.scalap package scalax package rules diff --git a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala b/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala index fa9fe51f37af..08c689b57fbd 100644 --- a/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala +++ b/src/scalap/scala/tools/scalap/scalax/util/StringUtil.scala @@ -1,3 +1,15 @@ +/* + * Scala classfile decoder (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.scalap package scalax package util From d6d3a07874adb4b2a5281e1bcc08b31d91feae6f Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Wed, 10 Oct 2018 12:25:54 -0700 Subject: [PATCH 1520/2793] [no-merge] Make xml pos consistent with scanner after resume XML parser uses current offset to compute end of token offset, which is wrong. This commit makes sure at least to back up to the CR of a line ending so that the position is contained by the position used by scanner after scanner.resume. nextToken adjusts the position of the line ending. --- .../tools/nsc/ast/parser/MarkupParsers.scala | 12 +++++-- .../scala/tools/nsc/parser/ParserTest.scala | 32 +++++++++++++++++++ 2 files changed, 41 insertions(+), 3 deletions(-) create mode 100644 test/junit/scala/tools/nsc/parser/ParserTest.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala index 46d533b03727..153a3179f1ea 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/MarkupParsers.scala @@ -61,12 +61,18 @@ trait MarkupParsers { else reportSyntaxError(msg) var input : CharArrayReader = _ + def lookahead(): BufferedIterator[Char] = (input.buf drop input.charOffset).iterator.buffered import parser.{ symbXMLBuilder => handle, o2p, r2p } - def curOffset : Int = input.charOffset - 1 + // consistent with scanner.nextToken in CRNL handling, + // but curOffset does not report correct position for last token (compare lastOffset) + def curOffset: Int = { + val res = input.charOffset - 1 + if (res > 0 && input.buf(res) == '\n' && input.buf(res-1) == '\r') res - 1 else res + } var tmppos : Position = NoPosition def ch = input.ch /** this method assign the next character to ch and advances in input */ @@ -350,12 +356,13 @@ trait MarkupParsers { /** Use a lookahead parser to run speculative body, and return the first char afterward. */ private def charComingAfter(body: => Unit): Char = { + val saved = input try { input = input.lookaheadReader body ch } - finally input = parser.in + finally input = saved } /** xLiteral = element { element } @@ -368,7 +375,6 @@ trait MarkupParsers { val ts = new ArrayBuffer[Tree] val start = curOffset - tmppos = o2p(curOffset) // Iuli: added this line, as it seems content_LT uses tmppos when creating trees content_LT(ts) // parse more XML? diff --git a/test/junit/scala/tools/nsc/parser/ParserTest.scala b/test/junit/scala/tools/nsc/parser/ParserTest.scala new file mode 100644 index 000000000000..e4fed1e7b1b7 --- /dev/null +++ b/test/junit/scala/tools/nsc/parser/ParserTest.scala @@ -0,0 +1,32 @@ +package scala.tools.nsc.parser + +import org.junit.Test +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.BytecodeTesting + +@RunWith(classOf[JUnit4]) +class ParserTest extends BytecodeTesting{ + override def compilerArgs: String = "-Ystop-after:parser -Yvalidate-pos:parser -Yrangepos" + @Test + def crlfRangePositionXml_t10321(): Unit = { + val code = + """ + |object Test { + | Nil.map { _ => + | + | + | } + |} + """.stripMargin + val crlfCode = code.linesIterator.map(_ + "\r\n").mkString + val lfCode = code.linesIterator.map(_ + "\n").mkString + assert(crlfCode != lfCode) + import compiler._, global._ + val run = new Run + run.compileSources(newSourceFile(lfCode) :: Nil) + assert(!reporter.hasErrors) + run.compileSources(newSourceFile(crlfCode) :: Nil) + } +} From 1709ffb860b0eb413bf8ebd24a4444eee557b520 Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 14 Oct 2018 16:37:14 -0700 Subject: [PATCH 1521/2793] [no-merge] Update types when treating parts When string interpolation parts are pre-treated for escapes, also update their constant types, which is relied upon by later transforms. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 7 +++---- test/files/run/t11196.scala | 8 ++++++++ 2 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 test/files/run/t11196.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index d817e0612996..caa657ee365f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1599,16 +1599,15 @@ abstract class RefChecks extends Transform { try { val treated = lits.mapConserve { lit => val stringVal = lit.asInstanceOf[Literal].value.stringValue - treeCopy.Literal(lit, Constant(StringContext.processEscapes(stringVal))) + val k = Constant(StringContext.processEscapes(stringVal)) + treeCopy.Literal(lit, k).setType(ConstantType(k)) } Some((treated, args)) } catch { - case _: StringContext.InvalidEscapeException => - None + case _: StringContext.InvalidEscapeException => None } } case _ => None - } } else None } diff --git a/test/files/run/t11196.scala b/test/files/run/t11196.scala new file mode 100644 index 000000000000..ec097fefcf53 --- /dev/null +++ b/test/files/run/t11196.scala @@ -0,0 +1,8 @@ + +object Test extends App { + assert(s"a\tb" == "a\tb") + def f = () => s"a\tb" + assert(f() == "a\tb") + def g(x: => String) = x + assert(g(s"a\tb") == "a\tb") +} From 210c296fbe6cca9ca86ee6a808eda92c06a27a05 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 5 Oct 2018 15:40:11 +0200 Subject: [PATCH 1522/2793] [backport] Factor typedFunction. Let's create a fast path for after typer, when we already know which SAM to target, and all parameter types are known Backported from b2edce8 --- .../scala/tools/nsc/typechecker/Typers.scala | 121 ++++++++++-------- 1 file changed, 70 insertions(+), 51 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2f828154c5d0..7d358ae3c24b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -16,7 +16,7 @@ package typechecker import scala.collection.{immutable, mutable} import scala.reflect.internal.util.{ListOfNil, Statistics, StatisticsStatics} import scala.reflect.internal.TypesStats -import mutable.ListBuffer +import mutable.{ArrayBuffer, ListBuffer} import symtab.Flags._ import Mode._ import scala.reflect.macros.whitebox @@ -2978,18 +2978,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val ptNorm = if (samMatchesFunctionBasedOnArity(sam, vparams)) samToFunctionType(pt, sam) else pt - val (argpts, respt) = + + val (argProtos, resProto) = ptNorm baseType FunctionSymbol match { case TypeRef(_, FunctionSymbol, args :+ res) => (args, res) case _ => (vparams map (if (pt == ErrorType) (_ => ErrorType) else (_ => NoType)), WildcardType) } if (!FunctionSymbol.exists) MaxFunctionArityError(fun) - else if (argpts.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argpts) + else if (argProtos.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argProtos) else { val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22 + // first, try to define param types from expected function's arg types if needed - foreach2(vparams, argpts) { (vparam, argpt) => + foreach2(vparams, argProtos) { (vparam, argpt) => if (vparam.tpt.isEmpty) { if (isFullyDefined(argpt)) vparam.tpt setType argpt else paramsMissingType += vparam @@ -2998,42 +3000,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - // If we're typing `(a1: T1, ..., aN: TN) => m(a1,..., aN)`, where some Ti are not fully defined, - // type `m` directly (undoing eta-expansion of method m) to determine the argument types. - // This tree is the result from one of: - // - manual eta-expansion with named arguments (x => f(x)); - // - wildcard-style eta expansion (`m(_, _,)`); - // - instantiateToMethodType adapting a tree of method type to a function type using etaExpand. - // - // Note that method values are a separate thing (`m _`): they have the idiosyncratic shape - // of `Typed(expr, Function(Nil, EmptyTree))` val ptUnrollingEtaExpansion = - if (paramsMissingType.nonEmpty && pt != ErrorType) fun.body match { - // we can compare arguments and parameters by name because there cannot be a binder between - // the function's valdefs and the Apply's arguments - case Apply(meth, args) if (vparams corresponds args) { case (p, Ident(name)) => p.name == name case _ => false } => - // We're looking for a method (as indicated by FUNmode in the silent typed below), - // so let's make sure our expected type is a MethodType - val methArgs = NoSymbol.newSyntheticValueParams(argpts map { case NoType => WildcardType case tp => tp }) - - val result = silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, respt))) - // we can't have results with undetermined type params - val resultMono = result filter (_ => context.undetparams.isEmpty) - resultMono map { methTyped => - // if context.undetparams is not empty, the method was polymorphic, - // so we need the missing arguments to infer its type. See #871 - val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams) - // println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt") - - // If we are sure this function type provides all the necessary info, so that we won't have - // any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`) - // and rest assured we won't end up right back here (and keep recursing) - if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) funPt - else null - } orElse { _ => null } - case _ => null - } else null - + if (paramsMissingType.isEmpty || pt == ErrorType) null + else typedFunctionInferParamTypes(fun, mode, pt, argProtos, resProto) if (ptUnrollingEtaExpansion ne null) typedFunction(fun, mode, ptUnrollingEtaExpansion) else { @@ -3059,23 +3028,73 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt) case _ => - val vparamSyms = vparams map { vparam => - enterSym(context, vparam) - if (context.retyping) context.scope enter vparam.symbol - vparam.symbol - } - val vparamsTyped = vparams mapConserve typedValDef - val formals = vparamSyms map (_.tpe) - val body1 = typed(fun.body, respt) - val restpe = packedType(body1, fun.symbol).deconst.resultType - val funtpe = phasedAppliedType(FunctionSymbol, formals :+ restpe) - - treeCopy.Function(fun, vparamsTyped, body1) setType funtpe + doTypedFunction(fun, resProto) } } } } + private def typedFunctionInferParamTypes(fun: Function, mode: Mode, pt: Type, argProtos: List[Type], resProto: Type) = { + val vparams = fun.vparams + + // If we're typing `(a1: T1, ..., aN: TN) => m(a1,..., aN)`, where some Ti are not fully defined, + // type `m` directly (undoing eta-expansion of method m) to determine the argument types. + // This tree is the result from one of: + // - manual eta-expansion with named arguments (x => f(x)); + // - wildcard-style eta expansion (`m(_, _,)`); + // - instantiateToMethodType adapting a tree of method type to a function type using etaExpand. + // + // Note that method values are a separate thing (`m _`): they have the idiosyncratic shape + // of `Typed(expr, Function(Nil, EmptyTree))` + fun.body match { + // we can compare arguments and parameters by name because there cannot be a binder between + // the function's valdefs and the Apply's arguments + case Apply(meth, args) if (vparams corresponds args) { case (p, Ident(name)) => p.name == name case _ => false } => + // We're looking for a method (as indicated by FUNmode in the silent typed below), + // so let's make sure our expected type is a MethodType + val methArgs = NoSymbol.newSyntheticValueParams(argProtos map { case NoType => WildcardType case tp => tp }) + + val result = silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, resProto))) + // we can't have results with undetermined type params + val resultMono = result filter (_ => context.undetparams.isEmpty) + resultMono map { methTyped => + val numVparams = vparams.length + // if context.undetparams is not empty, the method was polymorphic, + // so we need the missing arguments to infer its type. See #871 + val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams) + // println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt") + + // If we are sure this function type provides all the necessary info, so that we won't have + // any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`) + // and rest assured we won't end up right back here (and keep recursing) + if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) funPt + else null + } orElse { _ => null } + case _ => null + } + } + + private def doTypedFunction(fun: Function, bodyPt: Type) = { + val vparams = fun.vparams + val vparamSyms = vparams map { vparam => + enterSym(context, vparam) + if (context.retyping) context.scope enter vparam.symbol + vparam.symbol + } + val vparamsTyped = vparams mapConserve typedValDef + val bodyTyped = typed(fun.body, bodyPt) + + val funSym = FunctionClass(vparams.length) + val funTp = + if (phase.erasedTypes) funSym.tpe + else { + val resTp = packedType(bodyTyped, fun.symbol).deconst.resultType + appliedType(funSym, vparamSyms.map(_.tpe) :+ resTp) + } + + treeCopy.Function(fun, vparamsTyped, bodyTyped) setType funTp + } + def typedRefinement(templ: Template) { val stats = templ.body namer.enterSyms(stats) From 984502e54ac0a5553a2d4c0907566bcde27e4175 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 12 Oct 2018 11:30:59 +0200 Subject: [PATCH 1523/2793] [backport] Pull out `argsResProtosFromFun`, Drop impossible error check. We always produced `numVparams` argument prototypes, so there was no way we would ever call WrongNumberOfParametersError. Backported from a9f8c14 --- .../tools/nsc/typechecker/ContextErrors.scala | 5 - .../scala/tools/nsc/typechecker/Typers.scala | 215 +++++++++++------- test/files/neg/names-defaults-neg.check | 5 +- 3 files changed, 130 insertions(+), 95 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7aa71cfda051..0f35185ebc01 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -438,11 +438,6 @@ trait ContextErrors { setError(fun) } - def WrongNumberOfParametersError(tree: Tree, argpts: List[Type]) = { - issueNormalTypeError(tree, "wrong number of parameters; expected = " + argpts.length) - setError(tree) - } - def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type, withTupleAddendum: Boolean) = { def issue(what: String) = { val addendum: String = fun match { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9ce10c536dde..f11fd72a6a31 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2946,6 +2946,41 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => false } + /** + * Deconstruct an expected function-ish type `pt` into `numVparams` argument prototypes and a result prototype. + * + * If the expected type `pt` does not denote a function-ish type with arity `numVparams`, + * still return the expected number of ErrorType/NoType argument protos, and WildcardType for the result. + * + * @param pt + * @param numVparams + * @return (argProtos, resProto) where argProtos.lengthCompare(numVparams) == 0 + */ + private def argsResProtosFromFun(pt: Type, numVparams: Int): (List[Type], Type) = { + val FunctionSymbol = FunctionClass(numVparams) + + // In case of any non-trivial type slack between `pt` and the built-in function types, we go the SAM route, + // as a subclass could have (crazily) implemented the apply method and introduced another abstract method + // to serve as the vehicle. + val ptNorm = pt.typeSymbol match { + case NoSymbol => NoType + case FunctionSymbol | PartialFunctionClass => pt + case _ => + val sam = samOf(pt) + if (sam.exists && sam.info.params.lengthCompare(numVparams) == 0) + wildcardExtrapolation(normalize(pt memberInfo sam)) + else pt // allow type slack (pos/6221) + } + + ptNorm baseType FunctionSymbol match { + case TypeRef(_, _, args :+ res) => (args, res) // if it's a TypeRef, we know its symbol will be FunctionSymbol + case _ => { + val dummyPt = if (pt == ErrorType) ErrorType else NoType + (List.fill(numVparams)(dummyPt), WildcardType) // dummyPt is in CBN position + } + } + } + /** Type check a function literal. * * Based on the expected type pt, potentially synthesize an instance of @@ -2955,81 +2990,54 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper private def typedFunction(fun: Function, mode: Mode, pt: Type): Tree = { val vparams = fun.vparams val numVparams = vparams.length - val FunctionSymbol = - if (numVparams > definitions.MaxFunctionArity) NoSymbol - else FunctionClass(numVparams) - - val ptSym = pt.typeSymbol - - /* The Single Abstract Member of pt, unless pt is the built-in function type of the expected arity, - * as `(a => a): Int => Int` should not (yet) get the sam treatment. - */ - val sam = - if (ptSym == NoSymbol || ptSym == FunctionSymbol || ptSym == PartialFunctionClass) NoSymbol - else samOf(pt) - - /* The SAM case comes first so that this works: - * abstract class MyFun extends (Int => Int) - * (a => a): MyFun - * - * Note that the arity of the sam must correspond to the arity of the function. - * TODO: handle vararg sams? - */ - val ptNorm = - if (samMatchesFunctionBasedOnArity(sam, vparams)) samToFunctionType(pt, sam) - else pt - - val (argProtos, resProto) = - ptNorm baseType FunctionSymbol match { - case TypeRef(_, FunctionSymbol, args :+ res) => (args, res) - case _ => (vparams map (if (pt == ErrorType) (_ => ErrorType) else (_ => NoType)), WildcardType) - } - - // After typer, no need for further checks, parameter type inference or PartialFunction synthesis. - if (isPastTyper) doTypedFunction(fun, resProto) - else if (!FunctionSymbol.exists) MaxFunctionArityError(fun) - else if (argProtos.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argProtos) + if (numVparams > definitions.MaxFunctionArity) MaxFunctionArityError(fun) else { - val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22 + val (argProtos, resProto) = argsResProtosFromFun(pt, numVparams) + + // After typer, no need for further checks, parameter type inference or PartialFunction synthesis. + if (isPastTyper) doTypedFunction(fun, resProto) + else { + val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22 - // first, try to define param types from expected function's arg types if needed - foreach2(vparams, argProtos) { (vparam, argpt) => - if (vparam.tpt.isEmpty) { + // first, try to define param types from expected function's arg types if needed + foreach2(vparams, argProtos) { (vparam, argpt) => + if (vparam.tpt.isEmpty) { if (isFullyDefined(argpt)) vparam.tpt setType argpt else paramsMissingType += vparam - if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus + if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus + } } - } - if (paramsMissingType.nonEmpty && pt != ErrorType) { - // If we can resolve the missing parameter type by undoing eta-expansion and recursing, do that -- otherwise, report error and bail - typedFunctionUndoingEtaExpansion(fun, mode, pt, argProtos, resProto) orElse { - // we ran out of things to try, missing parameter types are an irrevocable error - var issuedMissingParameterTypeError = false - paramsMissingType.foreach { vparam => - vparam.tpt setType ErrorType - MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError) - issuedMissingParameterTypeError = true - } + if (paramsMissingType.nonEmpty && pt != ErrorType) { + // If we can resolve the missing parameter type by undoing eta-expansion and recursing, do that -- otherwise, report error and bail + typedFunctionUndoingEtaExpansion(fun, mode, pt, argProtos, resProto) orElse { + // we ran out of things to try, missing parameter types are an irrevocable error + var issuedMissingParameterTypeError = false + paramsMissingType.foreach { vparam => + vparam.tpt setType ErrorType + MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError) + issuedMissingParameterTypeError = true + } - doTypedFunction(fun, resProto) // TODO: why is it not enough to do setError(fun)? (for test case, see neg/t8675b.scala) - } - } else { - fun.body match { - // translate `x => x match { }` : PartialFunction to - // `new PartialFunction { def applyOrElse(x, default) = x match { } def isDefinedAt(x) = ... }` - case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) => - // go to outer context -- must discard the context that was created for the Function since we're discarding the function - // thus, its symbol, which serves as the current context.owner, is not the right owner - // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner) - val outerTyper = newTyper(context.outer) - val p = vparams.head - if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe - - outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt) - - case _ => doTypedFunction(fun, resProto) + doTypedFunction(fun, resProto) // TODO: why is it not enough to do setError(fun)? (for test case, see neg/t8675b.scala) + } + } else { + fun.body match { + // translate `x => x match { }` : PartialFunction to + // `new PartialFunction { def applyOrElse(x, default) = x match { } def isDefinedAt(x) = ... }` + case Match(sel, cases) if (sel ne EmptyTree) && (pt.typeSymbol == PartialFunctionClass) => + // go to outer context -- must discard the context that was created for the Function since we're discarding the function + // thus, its symbol, which serves as the current context.owner, is not the right owner + // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner) + val outerTyper = newTyper(context.outer) + val p = vparams.head + if (p.tpt.tpe == null) p.tpt setType outerTyper.typedType(p.tpt).tpe + + outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt) + + case _ => doTypedFunction(fun, resProto) + } } } } @@ -3059,27 +3067,62 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper fun.body match { // we can compare arguments and parameters by name because there cannot be a binder between // the function's valdefs and the Apply's arguments - case Apply(meth, args) if (vparams corresponds args) { case (p, Ident(name)) => p.name == name case _ => false } => - // We're looking for a method (as indicated by FUNmode in the silent typed below), - // so let's make sure our expected type is a MethodType - val methArgs = NoSymbol.newSyntheticValueParams(argProtos map { case NoType => WildcardType case tp => tp }) - - silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, resProto))).fold(EmptyTree: Tree){ methTyped => - if (context.undetparams.isEmpty) { - val numVparams = vparams.length + // If all vparams are constrained by the method application, see if we can derive expected types for them. + // Note that not all method arguments need be references to a function param. + case Apply(meth, args) => + // Map param with missing param type to the argument it's passed as in the eta-expanded method application + // This list specifies a way to compute the expected parameter type for each of our function's arguments in order. + // Either we already know it, and then we have a Type, or we don't, and then it's an index `idx` into + // the arguments passed to `meth`, so we can derive it from its MethodType + // (based on where the function's parameter is applied to `meth`) + val formalsFromApply = + vparams.map { vd => + if (!vd.tpt.isEmpty) Right(vd.tpt.tpe) + else Left(args.indexWhere { + case Ident(name) => name == vd.name + case _ => false // TODO: i think we need to deal with widening conversions too?? + }) + } + + // If some of the vparams without type annotation was not applied to `meth`, + // we're not going to learn enough from typing `meth` to determine them. + if (formalsFromApply.exists{ case Left(-1) => true case _ => false }) EmptyTree + else { + // We're looking for a method (as indicated by FUNmode in the silent typed below), + // so let's make sure our expected type is a MethodType (of the right arity, but we can't easily say more about the argument types) + val methArgs = NoSymbol.newSyntheticValueParams(args map { case _ => WildcardType }) + + silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, resProto))).fold(EmptyTree: Tree) { methTyped => // if context.undetparams is not empty, the method was polymorphic, // so we need the missing arguments to infer its type. See #871 - val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams) - // println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt") - - // If we are sure this function type provides all the necessary info, so that we won't have - // any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`) - // and rest assured we won't end up right back here (and keep recursing). - // Be careful to reuse methTyped -- it may have changed from meth (scala/bug#9745)! - if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) - typedFunction(treeCopy.Function(fun, vparams, treeCopy.Apply(fun.body, methTyped, args)), mode, funPt) - else EmptyTree - } else EmptyTree + if (context.undetparams.isEmpty) { + // If we are sure this function type provides all the necessary info, so that we won't have + // any undetermined argument types, recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`) + // and rest assured we won't end up right back here (and keep recursing). + // + // Be careful to reuse methTyped -- it may have changed from meth (scala/bug#9745)! + // + // TODO: CBN / varargs / implicits? should we use formalTypes? + normalize(methTyped.tpe) match { // we don't know how many of the vparams of our function were actually applied to the method + case TypeRef(_, _, argProtos :+ _) => + val argProtosRecovered = + formalsFromApply.map { + case Left(idx) => + val argPt = if (argProtos.isDefinedAt(idx)) argProtos(idx) else NoType // bounds check should not be needed due to expected type `MethodType(methArgs, resProto)` above + if (isFullyDefined(argPt)) argPt else NoType + case Right(tp) => tp + } + + if (argProtosRecovered contains NoType) EmptyTree // cannot safely recurse + else { + val funPt = functionType(argProtosRecovered, resProto) + // recursion is safe because now all parameter types can be derived from `argProtosRecovered` in the prototype `funPt` passed to typedFunction + typedFunction(treeCopy.Function(fun, vparams, treeCopy.Apply(fun.body, methTyped, args)), mode, funPt) + } + case _ => EmptyTree + } + } else EmptyTree + } } case _ => EmptyTree } diff --git a/test/files/neg/names-defaults-neg.check b/test/files/neg/names-defaults-neg.check index af164d90eaa9..83163abef5e2 100644 --- a/test/files/neg/names-defaults-neg.check +++ b/test/files/neg/names-defaults-neg.check @@ -142,9 +142,6 @@ names-defaults-neg.scala:138: error: not found: value get names-defaults-neg.scala:139: error: parameter 'a' is already specified at parameter position 1 val taf3 = testAnnFun(b = _: String, a = get(8)) ^ -names-defaults-neg.scala:140: error: missing parameter type for expanded function ((x$3: ) => testAnnFun(x$3, ((x$4) => b = x$4))) - val taf4: (Int, String) => Unit = testAnnFun(_, b = _) - ^ names-defaults-neg.scala:140: error: missing parameter type for expanded function ((x$4: ) => b = x$4) val taf4: (Int, String) => Unit = testAnnFun(_, b = _) ^ @@ -188,4 +185,4 @@ names-defaults-neg.scala:184: error: reference to x is ambiguous; it is both a m class u18 { var x: Int = u.f(x = 1) } ^ 6 warnings found -46 errors found +45 errors found From 93cd804a78516fb27576b616b5260112c10a0db6 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 12 Oct 2018 12:17:05 +0200 Subject: [PATCH 1524/2793] [backport] Recover fun param types from (partial) eta-expansion Generalize function parameter type inference to allow any subset (including permutation) of parameters with unknown types to be inferred from the method they are applied to. Before, we required all method arguments to come from the function's vparams, in order. Example: ``` scala> def repeat(x: Int, y: String) = y * x repeat: (x: Int, y: String)String scala> val sayAaah = repeat(_, "a") sayAaah: Int => String scala> val thrice = x => repeat(3, x) thrice: String => String scala> val repeatFlip = (x, y) => repeat(y, x) repeatFlip: (String, Int) => String ``` Backported from 967ab56 --- test/files/pos/eta_partial.scala | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 test/files/pos/eta_partial.scala diff --git a/test/files/pos/eta_partial.scala b/test/files/pos/eta_partial.scala new file mode 100644 index 000000000000..31b907a42e55 --- /dev/null +++ b/test/files/pos/eta_partial.scala @@ -0,0 +1,6 @@ +class Test { + def repeat(x: Int, y: String) = y * x + val sayAaah = repeat(_, "a") // partial eta-expansion recovers fun param types from method (place holder syntax) + val thrice = x => repeat(3, x) // partial eta-expansion recovers fun param types from method (explicit version) + val repeatFlip = (x, y) => repeat(y, x) // partial eta-expansion recovers fun param types from method (explicit version, two params) +} From 9614c3a6d6a47bbe23cba8171af32d212fe9c765 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 5 Oct 2018 17:22:33 +0200 Subject: [PATCH 1525/2793] [backport] Do less in typedFunction after typer Do more for typedFunction of eta-expanded method: keep typed method selection (fix scala/bug#9745, follow up for #6007) Test case taken from original PR. Backported from 64d4c24 --- .../scala/tools/nsc/typechecker/Typers.scala | 97 +++++++++++-------- test/files/neg/t9745.check | 19 ++++ test/files/neg/t9745.scala | 20 ++++ test/files/pos/t9745.scala | 14 +++ 4 files changed, 108 insertions(+), 42 deletions(-) create mode 100644 test/files/neg/t9745.check create mode 100644 test/files/neg/t9745.scala create mode 100644 test/files/pos/t9745.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 7d358ae3c24b..9ce10c536dde 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2985,7 +2985,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => (vparams map (if (pt == ErrorType) (_ => ErrorType) else (_ => NoType)), WildcardType) } - if (!FunctionSymbol.exists) MaxFunctionArityError(fun) + // After typer, no need for further checks, parameter type inference or PartialFunction synthesis. + if (isPastTyper) doTypedFunction(fun, resProto) + else if (!FunctionSymbol.exists) MaxFunctionArityError(fun) else if (argProtos.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argProtos) else { val paramsMissingType = mutable.ArrayBuffer.empty[ValDef] //.sizeHint(numVparams) probably useless, since initial size is 16 and max fun arity is 22 @@ -3000,20 +3002,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - val ptUnrollingEtaExpansion = - if (paramsMissingType.isEmpty || pt == ErrorType) null - else typedFunctionInferParamTypes(fun, mode, pt, argProtos, resProto) + if (paramsMissingType.nonEmpty && pt != ErrorType) { + // If we can resolve the missing parameter type by undoing eta-expansion and recursing, do that -- otherwise, report error and bail + typedFunctionUndoingEtaExpansion(fun, mode, pt, argProtos, resProto) orElse { + // we ran out of things to try, missing parameter types are an irrevocable error + var issuedMissingParameterTypeError = false + paramsMissingType.foreach { vparam => + vparam.tpt setType ErrorType + MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError) + issuedMissingParameterTypeError = true + } - if (ptUnrollingEtaExpansion ne null) typedFunction(fun, mode, ptUnrollingEtaExpansion) - else { - // we ran out of things to try, missing parameter types are an irrevocable error - var issuedMissingParameterTypeError = false - paramsMissingType.foreach { vparam => - vparam.tpt setType ErrorType - MissingParameterTypeError(fun, vparam, pt, withTupleAddendum = !issuedMissingParameterTypeError) - issuedMissingParameterTypeError = true + doTypedFunction(fun, resProto) // TODO: why is it not enough to do setError(fun)? (for test case, see neg/t8675b.scala) } - + } else { fun.body match { // translate `x => x match { }` : PartialFunction to // `new PartialFunction { def applyOrElse(x, default) = x match { } def isDefinedAt(x) = ... }` @@ -3027,25 +3029,33 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper outerTyper.synthesizePartialFunction(p.name, p.pos, paramSynthetic = false, fun.body, mode, pt) - case _ => - doTypedFunction(fun, resProto) + case _ => doTypedFunction(fun, resProto) } } } } - private def typedFunctionInferParamTypes(fun: Function, mode: Mode, pt: Type, argProtos: List[Type], resProto: Type) = { + /** Retry typedFunction when parameter types are missing, and they might be recovered from + * the method selection that was eta-expanded into `fun`. + * + * When typing `(a1: T1, ..., aN: TN) => m(a1,..., aN)`, where some Ti are not fully defined, + * type `m` directly (undoing eta-expansion of method m) to determine the argument types. + * We have to be careful to use the result of typing the method selection, as its tree + * may be rewritten. + * + * This tree is the result from one of: + * - manual eta-expansion with named arguments (x => f(x)); + * - wildcard-style eta expansion (`m(_, _,)`); + * - (I don't think it can result from etaExpand, because we know the argument types there.) + * + * Note that method values are a separate thing (`m _`): they have the idiosyncratic shape + * of `Typed(expr, Function(Nil, EmptyTree))` + * + * @return EmptyTree on failure, or a typed version of `fun` if we are successful + */ + private def typedFunctionUndoingEtaExpansion(fun: Function, mode: Mode, pt: Type, argProtos: List[Type], resProto: Type) = { val vparams = fun.vparams - // If we're typing `(a1: T1, ..., aN: TN) => m(a1,..., aN)`, where some Ti are not fully defined, - // type `m` directly (undoing eta-expansion of method m) to determine the argument types. - // This tree is the result from one of: - // - manual eta-expansion with named arguments (x => f(x)); - // - wildcard-style eta expansion (`m(_, _,)`); - // - instantiateToMethodType adapting a tree of method type to a function type using etaExpand. - // - // Note that method values are a separate thing (`m _`): they have the idiosyncratic shape - // of `Typed(expr, Function(Nil, EmptyTree))` fun.body match { // we can compare arguments and parameters by name because there cannot be a binder between // the function's valdefs and the Apply's arguments @@ -3054,26 +3064,28 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // so let's make sure our expected type is a MethodType val methArgs = NoSymbol.newSyntheticValueParams(argProtos map { case NoType => WildcardType case tp => tp }) - val result = silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, resProto))) - // we can't have results with undetermined type params - val resultMono = result filter (_ => context.undetparams.isEmpty) - resultMono map { methTyped => - val numVparams = vparams.length - // if context.undetparams is not empty, the method was polymorphic, - // so we need the missing arguments to infer its type. See #871 - val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams) - // println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt") - - // If we are sure this function type provides all the necessary info, so that we won't have - // any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`) - // and rest assured we won't end up right back here (and keep recursing) - if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) funPt - else null - } orElse { _ => null } - case _ => null + silent(_.typed(meth, mode.forFunMode, MethodType(methArgs, resProto))).fold(EmptyTree: Tree){ methTyped => + if (context.undetparams.isEmpty) { + val numVparams = vparams.length + // if context.undetparams is not empty, the method was polymorphic, + // so we need the missing arguments to infer its type. See #871 + val funPt = normalize(methTyped.tpe) baseType FunctionClass(numVparams) + // println(s"typeUnEtaExpanded $meth : ${methTyped.tpe} --> normalized: $funPt") + + // If we are sure this function type provides all the necessary info, so that we won't have + // any undetermined argument types, go ahead an recurse below (`typedFunction(fun, mode, ptUnrollingEtaExpansion)`) + // and rest assured we won't end up right back here (and keep recursing). + // Be careful to reuse methTyped -- it may have changed from meth (scala/bug#9745)! + if (isFunctionType(funPt) && funPt.typeArgs.iterator.take(numVparams).forall(isFullyDefined)) + typedFunction(treeCopy.Function(fun, vparams, treeCopy.Apply(fun.body, methTyped, args)), mode, funPt) + else EmptyTree + } else EmptyTree + } + case _ => EmptyTree } } + // Assuming the expected number of parameters, which all have type annotations, do the happy path. private def doTypedFunction(fun: Function, bodyPt: Type) = { val vparams = fun.vparams val vparamSyms = vparams map { vparam => @@ -4651,6 +4663,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper return tryTypedApply(fun setType newtpe, args) } } + // TODO: case to recurse into Function? def treesInResult(tree: Tree): List[Tree] = tree :: (tree match { case Block(_, r) => treesInResult(r) case Match(_, cases) => cases diff --git a/test/files/neg/t9745.check b/test/files/neg/t9745.check new file mode 100644 index 000000000000..687cc98d2707 --- /dev/null +++ b/test/files/neg/t9745.check @@ -0,0 +1,19 @@ +t9745.scala:2: error: missing parameter type for expanded function ((x$1: ) => Seq({ + .<$plus$eq: error>(1); + 42 +}).apply(x$1)) + val func = Seq { x += 1; 42 } apply _ + ^ +t9745.scala:8: error: missing parameter type + val g = x => f(y += 1)(x) + ^ +t9745.scala:14: error: missing parameter type + val g = x => f(x += 1)(x) + ^ +t9745.scala:19: error: missing parameter type + val g = (x, y) => f(42)(x, y) + ^ +t9745.scala:19: error: missing parameter type + val g = (x, y) => f(42)(x, y) + ^ +5 errors found diff --git a/test/files/neg/t9745.scala b/test/files/neg/t9745.scala new file mode 100644 index 000000000000..5f0cfc4462f9 --- /dev/null +++ b/test/files/neg/t9745.scala @@ -0,0 +1,20 @@ +class C { + val func = Seq { x += 1; 42 } apply _ +} + +class D { + var i = 0 + def f(n: Unit)(j: Int): Int = ??? + val g = x => f(y += 1)(x) +} + +class E { + var i = 0 + def f(n: Unit)(j: Int): Int = ??? + val g = x => f(x += 1)(x) +} + +class Convo { + def f(i: Int)(z: Any): Int = ??? + val g = (x, y) => f(42)(x, y) +} \ No newline at end of file diff --git a/test/files/pos/t9745.scala b/test/files/pos/t9745.scala new file mode 100644 index 000000000000..6b6443e4eb77 --- /dev/null +++ b/test/files/pos/t9745.scala @@ -0,0 +1,14 @@ +class C { + val func = Seq { var i = 0; i += 1; i } apply _ +} + +class D { + var i = 0 + def f(n: Unit)(j: Int): Int = ??? + val g = x => f(i += 1)(x) +} + +class Convo { + def f(i: Int)(z: Any): Int = ??? + val g = (x: Int, y: Int) => f(42)(x, y) +} \ No newline at end of file From 15ee5cf0a68130efb74bfaa2358e5f7683650e28 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 20 Aug 2018 21:39:11 -0400 Subject: [PATCH 1526/2793] reduce varargs allocation for appliedType --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 2 +- .../nsc/transform/ExtensionMethods.scala | 2 +- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../tools/nsc/typechecker/Checkable.scala | 4 +-- .../tools/nsc/typechecker/Implicits.scala | 2 +- .../reflect/internal/AnnotationInfos.scala | 2 +- .../reflect/internal/CapturedVariables.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 26 ++++++++++--------- 8 files changed, 22 insertions(+), 20 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index f44bd0b58ffd..9877076c25dc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -789,7 +789,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { if (needsAnnotation) { val c = Constant(definitions.RemoteExceptionClass.tpe) val arg = Literal(c) setType c.tpe - meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe), arg) + meth.addAnnotation(appliedType(definitions.ThrowsClass, c.tpe :: Nil), arg) } } diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index b97e54f10f81..5a73829165be 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -155,7 +155,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { // so must drop their variance. val tparamsFromClass = cloneSymbolsAtOwner(clazz.typeParams, extensionMeth) map (_ resetFlag COVARIANT | CONTRAVARIANT) - val thisParamType = appliedType(clazz, tparamsFromClass map (_.tpeHK): _*) + val thisParamType = appliedType(clazz, tparamsFromClass.map(_.tpeHK)) val thisParam = extensionMeth.newValueParameter(nme.SELF, extensionMeth.pos) setInfo thisParamType val resultType = MethodType(List(thisParam), dropNullaryMethod(methodResult)) val selfParamType = singleType(currentOwner.companionModule.thisType, thisParam) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 8a466ca3305d..192fe7601cf2 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -130,7 +130,7 @@ abstract class UnCurry extends InfoTransform /** The type of a non-local return expression with given argument type */ private def nonLocalReturnExceptionType(argtype: Type) = - appliedType(NonLocalReturnControlClass, argtype) + appliedType(NonLocalReturnControlClass, argtype :: Nil) /** A hashmap from method symbols to non-local return keys */ private val nonLocalReturnKeys = perRunCaches.newMap[Symbol, Symbol]() diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index ce9923ee7f05..3a4a1243d288 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -77,7 +77,7 @@ trait Checkable { def propagateKnownTypes(from: Type, to: Symbol): Type = { def tparams = to.typeParams val tvars = tparams map (p => TypeVar(p)) - val tvarType = appliedType(to, tvars: _*) + val tvarType = appliedType(to, tvars) val bases = from.baseClasses filter (to.baseClasses contains _) bases foreach { bc => @@ -104,7 +104,7 @@ trait Checkable { case (_, tvar) if tvar.instValid => tvar.constr.inst case (tparam, _) => tparam.tpeHK } - appliedType(to, resArgs: _*) + appliedType(to, resArgs) } private def isUnwarnableTypeArgSymbol(sym: Symbol) = ( diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 96c067c38b7f..08fa40241f26 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1329,7 +1329,7 @@ trait Implicits { /* Re-wraps a type in a manifest before calling inferImplicit on the result */ def findManifest(tp: Type, manifestClass: Symbol = if (full) FullManifestClass else PartialManifestClass) = - inferImplicitFor(appliedType(manifestClass, tp), tree, context).tree + inferImplicitFor(appliedType(manifestClass, tp :: Nil), tree, context).tree def findSubManifest(tp: Type) = findManifest(tp, if (full) FullManifestClass else OptManifestClass) def mot(tp0: Type, from: List[Symbol], to: List[Type]): SearchResult = { diff --git a/src/reflect/scala/reflect/internal/AnnotationInfos.scala b/src/reflect/scala/reflect/internal/AnnotationInfos.scala index 411d6e01382f..35fb8e69fa28 100644 --- a/src/reflect/scala/reflect/internal/AnnotationInfos.scala +++ b/src/reflect/scala/reflect/internal/AnnotationInfos.scala @@ -36,7 +36,7 @@ trait AnnotationInfos extends api.Annotations { self: SymbolTable => // monomorphic one by introducing existentials, see scala/bug#7009 for details existentialAbstraction(throwableSym.typeParams, throwableSym.tpe) } - this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil) + this withAnnotation AnnotationInfo(appliedType(ThrowsClass, throwableTpe :: Nil), List(Literal(Constant(throwableTpe))), Nil) } /** Tests for, get, or remove an annotation */ diff --git a/src/reflect/scala/reflect/internal/CapturedVariables.scala b/src/reflect/scala/reflect/internal/CapturedVariables.scala index ef9646b80fa2..d59ba0f0c2de 100644 --- a/src/reflect/scala/reflect/internal/CapturedVariables.scala +++ b/src/reflect/scala/reflect/internal/CapturedVariables.scala @@ -30,7 +30,7 @@ trait CapturedVariables { self: SymbolTable => def refType(valueRef: Map[Symbol, Symbol], objectRefClass: Symbol) = if (isPrimitiveValueClass(symClass) && symClass != UnitClass) valueRef(symClass).tpe else if (erasedTypes) objectRefClass.tpe - else appliedType(objectRefClass, tpe1) + else appliedType(objectRefClass, tpe1 :: Nil) if (vble.hasAnnotation(VolatileAttr)) refType(volatileRefClass, VolatileObjectRefClass) else refType(refClass, ObjectRefClass) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index bf490bb5e2cd..92e462d6c816 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -589,10 +589,10 @@ trait Definitions extends api.StandardDefinitions { private val symSet = new SymbolSet(seq.toList) def contains(sym: Symbol): Boolean = symSet.contains(sym) def apply(i: Int) = if (isDefinedAt(i)) seq(i - offset) else NoSymbol - def specificType(args: List[Type], others: Type*): Type = { + def specificType(args: List[Type], others: List[Type] = Nil): Type = { val arity = args.length if (!isDefinedAt(arity)) NoType - else appliedType(apply(arity), args ++ others: _*) + else appliedType(apply(arity), args ::: others) } } // would be created synthetically for the default args. We call all objects in this method from the generated code @@ -610,8 +610,8 @@ trait Definitions extends api.StandardDefinitions { /** Creators for TupleN, ProductN, FunctionN. */ def tupleType(elems: List[Type]) = TupleClass.specificType(elems) - def functionType(formals: List[Type], restpe: Type) = FunctionClass.specificType(formals, restpe) - def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe) + def functionType(formals: List[Type], restpe: Type) = FunctionClass.specificType(formals, restpe :: Nil) + def abstractFunctionType(formals: List[Type], restpe: Type) = AbstractFunctionClass.specificType(formals, restpe :: Nil) def wrapArrayMethodName(elemtp: Type): TermName = elemtp.typeSymbol match { case ByteClass => nme.wrapByteArray @@ -912,13 +912,13 @@ trait Definitions extends api.StandardDefinitions { } else NoSymbol } - def arrayType(arg: Type) = appliedType(ArrayClass, arg) - def byNameType(arg: Type) = appliedType(ByNameParamClass, arg) - def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp) - def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg) - def optionType(tp: Type) = appliedType(OptionClass, tp) - def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg) - def seqType(arg: Type) = appliedType(SeqClass, arg) + def arrayType(arg: Type) = appliedType(ArrayClass, arg :: Nil) + def byNameType(arg: Type) = appliedType(ByNameParamClass, arg :: Nil) + def iteratorOfType(tp: Type) = appliedType(IteratorClass, tp :: Nil) + def javaRepeatedType(arg: Type) = appliedType(JavaRepeatedParamClass, arg :: Nil) + def optionType(tp: Type) = appliedType(OptionClass, tp :: Nil) + def scalaRepeatedType(arg: Type) = appliedType(RepeatedParamClass, arg :: Nil) + def seqType(arg: Type) = appliedType(SeqClass, arg :: Nil) // For name-based pattern matching, derive the "element type" (type argument of Option/Seq) // from the relevant part of the signature of various members (get/head/apply/drop) @@ -955,7 +955,9 @@ trait Definitions extends api.StandardDefinitions { } } - def ClassType(arg: Type) = if (phase.erasedTypes) ClassClass.tpe else appliedType(ClassClass, arg) + def ClassType(arg: Type) = + if (phase.erasedTypes) ClassClass.tpe + else appliedType(ClassClass, arg :: Nil) /** Can we tell by inspecting the symbol that it will never * at any phase have type parameters? From 634d52e553551517c81ab8b4ea309cbc2d89c025 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Mon, 20 Aug 2018 21:41:11 -0400 Subject: [PATCH 1527/2793] flatten changeOwner arguments (it was only called with one pair) --- src/compiler/scala/tools/nsc/ast/TreeGen.scala | 2 +- .../scala/tools/nsc/transform/AccessorSynthesis.scala | 2 +- src/compiler/scala/tools/nsc/transform/Constructors.scala | 4 ++-- src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala | 2 +- src/compiler/scala/tools/nsc/transform/Fields.scala | 4 ++-- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala | 2 +- src/compiler/scala/tools/reflect/ToolBoxFactory.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 3 +++ 9 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0ba7dad971d3..6fd08a481f12 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -352,7 +352,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { }) val selfParam = ValDef(selfParamSym) val rhs = orig.rhs.substituteThis(newSym.owner, gen.mkAttributedIdent(selfParamSym)) // scala/scala-dev#186 intentionally leaving Ident($this) is unpositioned - .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym -> newSym) + .substituteSymbols(origParams, newSym.info.params.drop(1)).changeOwner(origSym, newSym) treeCopy.DefDef(orig, orig.mods, orig.name, orig.tparams, (selfParam :: orig.vparamss.head) :: Nil, orig.tpt, rhs).setSymbol(newSym) } diff --git a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala index 851482af6e59..c135de373e02 100644 --- a/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala +++ b/src/compiler/scala/tools/nsc/transform/AccessorSynthesis.scala @@ -278,7 +278,7 @@ trait AccessorSynthesis extends Transform with ast.TreeDSL { */ def expandLazyClassMember(lazyVar: global.Symbol, lazyAccessor: global.Symbol, transformedRhs: global.Tree): Tree = { val slowPathSym = slowPathFor(lazyAccessor) - val rhsAtSlowDef = transformedRhs.changeOwner(lazyAccessor -> slowPathSym) + val rhsAtSlowDef = transformedRhs.changeOwner(lazyAccessor, slowPathSym) val isUnit = isUnitGetter(lazyAccessor) val selectVar = if (isUnit) UNIT else Select(thisRef, lazyVar) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index b3e2e7ae6ba3..8cf0e4c7c2b6 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -250,7 +250,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme methodSym setInfoAndEnter MethodType(Nil, UnitTpe) // changeOwner needed because the `stats` contained in the DefDef were owned by the template, not long ago. - val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol -> methodSym) + val blk = Block(stats, gen.mkZero(UnitTpe)).changeOwner(impl.symbol, methodSym) val delayedDD = localTyper typed { DefDef(methodSym, Nil, blk) } delayedDD.asInstanceOf[DefDef] @@ -549,7 +549,7 @@ abstract class Constructors extends Statics with Transform with TypingTransforme // Move tree into constructor, take care of changing owner from `oldOwner` to `newOwner` (the primary constructor symbol) def apply(oldOwner: Symbol, newOwner: Symbol)(tree: Tree) = if (tree eq EmptyTree) tree - else transform(tree.changeOwner(oldOwner -> newOwner)) + else transform(tree.changeOwner(oldOwner, newOwner)) } // Assign `rhs` to class field / trait setter `assignSym` diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 5a73829165be..f21a28ccc72c 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -229,7 +229,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { .substituteSymbols(origTpeParams, extensionTpeParams) .substituteSymbols(origParams, extensionParams) .substituteThis(origThis, extensionThis) - .changeOwner(origMeth -> extensionMeth) + .changeOwner(origMeth, extensionMeth) new SubstututeRecursion(origMeth, extensionMeth, unit).transform(tree) } val castBody = diff --git a/src/compiler/scala/tools/nsc/transform/Fields.scala b/src/compiler/scala/tools/nsc/transform/Fields.scala index 029b7b951b4d..cf5cf75ba01a 100644 --- a/src/compiler/scala/tools/nsc/transform/Fields.scala +++ b/src/compiler/scala/tools/nsc/transform/Fields.scala @@ -600,7 +600,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor val computerSym = owner.newMethod(lazyName append nme.LAZY_SLOW_SUFFIX, pos, ARTIFACT | PRIVATE) setInfo MethodType(Nil, lazyValType) - val rhsAtComputer = rhs.changeOwner(lazySym -> computerSym) + val rhsAtComputer = rhs.changeOwner(lazySym, computerSym) val computer = mkAccessor(computerSym)(gen.mkSynchronized(Ident(holderSym))( If(initialized, getValue, @@ -690,7 +690,7 @@ abstract class Fields extends InfoTransform with ast.TreeDSL with TypingTransfor } def rhsAtOwner(stat: ValOrDefDef, newOwner: Symbol): Tree = - atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol -> newOwner))) + atOwner(newOwner)(super.transform(stat.rhs.changeOwner(stat.symbol, newOwner))) override def transform(stat: Tree): Tree = { val currOwner = currentOwner // often a class, but not necessarily diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 192fe7601cf2..51bb8296c978 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -336,7 +336,7 @@ abstract class UnCurry extends InfoTransform case body => val thunkFun = localTyper.typedPos(body.pos)(Function(Nil, body)).asInstanceOf[Function] log(s"Change owner from $currentOwner to ${thunkFun.symbol} in ${thunkFun.body}") - thunkFun.body.changeOwner((currentOwner, thunkFun.symbol)) + thunkFun.body.changeOwner(currentOwner, thunkFun.symbol) transformFunction(thunkFun) } } @@ -400,7 +400,7 @@ abstract class UnCurry extends InfoTransform debuglog("lifting tree at: " + (tree.pos)) val sym = currentOwner.newMethod(unit.freshTermName("liftedTree"), tree.pos) sym.setInfo(MethodType(List(), tree.tpe)) - tree.changeOwner(currentOwner -> sym) + tree.changeOwner(currentOwner, sym) localTyper.typedPos(tree.pos)(Block( List(DefDef(sym, ListOfNil, tree)), Apply(Ident(sym), Nil) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 0dbb0e860b25..7fc64af4a27b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -181,7 +181,7 @@ trait NamesDefaults { self: Analyzer => blockTyper.context.scope enter sym val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType) // it stays in Vegas: scala/bug#5720, scala/bug#5727 - qual changeOwner (blockTyper.context.owner -> sym) + qual changeOwner (blockTyper.context.owner, sym) val newQual = atPos(qual.pos.focus)(blockTyper.typedQualifier(Ident(sym.name))) val baseFunTransformed = atPos(baseFun.pos.makeTransparent) { diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 61166f4239b8..ed6d4e6625a1 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -234,7 +234,7 @@ abstract class ToolBoxFactory[U <: JavaUniverse](val u: U) { factorySelf => case _ => NoSymbol } trace("wrapping ")(defOwner(expr) -> meth) - val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth)) + val methdef = DefDef(meth, expr changeOwner (defOwner(expr), meth)) val moduledef = ModuleDef( obj, diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index f94e16a0afb7..4929ca23d75a 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -216,6 +216,9 @@ trait Trees extends api.Trees { } } + def changeOwner(from: Symbol, to: Symbol): Tree = + new ChangeOwnerTraverser(from, to) apply this + def shallowDuplicate: Tree = new ShallowDuplicator(this) transform this def shortClass: String = (getClass.getName split "[.$]").last From f8a9cc541a036fc681791e3218e03cb6363249eb Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 21 Aug 2018 16:25:04 -0400 Subject: [PATCH 1528/2793] remove debugging assertion --- .../scala/tools/nsc/transform/patmat/MatchOptimization.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index de41991c90ab..dd1872c67790 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -89,8 +89,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used) // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable val reused = new mutable.HashMap[TreeMaker, ReusedCondTreeMaker] - var okToCall = false - val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)} + val reusedOrOrig = (tm: TreeMaker) => reused.getOrElse(tm, tm) // maybe collapse: replace shared prefix of tree makers by a ReusingCondTreeMaker // once this has been computed, we'll know which tree makers are reused, @@ -128,7 +127,6 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains True-tests, which are dropped above) } - okToCall = true // TODO: remove (debugging) // replace original treemakers that are reused (as determined when computing collapsed), // by ReusedCondTreeMakers From b2c493d525f476d46095413832ad04a713c12bbc Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 22 Aug 2018 21:07:57 -0400 Subject: [PATCH 1529/2793] reduce allocation of Some objects for cached btype lookup --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 39 +++++++----- .../nsc/backend/jvm/BTypesFromClassfile.scala | 16 ++--- .../nsc/backend/jvm/BTypesFromSymbols.scala | 59 ++++++++----------- .../tools/nsc/backend/jvm/PostProcessor.scala | 4 +- .../nsc/backend/jvm/opt/CallGraphTest.scala | 5 +- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 9 +-- .../nsc/backend/jvm/opt/InlinerTest.scala | 3 +- 7 files changed, 66 insertions(+), 69 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index d2d1139a519a..b35796f6f736 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -6,7 +6,8 @@ package scala.tools.nsc package backend.jvm -import scala.collection.{concurrent, mutable} +import java.{util => ju} +import scala.collection.concurrent import scala.tools.asm import scala.tools.asm.Opcodes import scala.tools.nsc.backend.jvm.BTypes.{InlineInfo, InternalName} @@ -23,7 +24,7 @@ import scala.tools.nsc.backend.jvm.opt._ */ abstract class BTypes { val frontendAccess: PostProcessorFrontendAccess - import frontendAccess.{frontendSynch, recordPerRunCache} + import frontendAccess.{frontendSynch, recordPerRunJavaMapCache} val coreBTypes: CoreBTypes { val bTypes: BTypes.this.type } import coreBTypes._ @@ -35,13 +36,15 @@ abstract class BTypes { * `getCommonSuperClass`. In this method we need to obtain the ClassBType for a given internal * name. The method assumes that every class type that appears in the bytecode exists in the map */ - def cachedClassBType(internalName: InternalName): Option[ClassBType] = + // OPT: not returning Option[ClassBType] because the Some allocation shows up as a hotspot + def cachedClassBType(internalName: InternalName): ClassBType = classBTypeCache.get(internalName) // Concurrent maps because stack map frames are computed when in the class writer, which // might run on multiple classes concurrently. // Note usage should be private to this file, except for tests - val classBTypeCache: concurrent.Map[InternalName, ClassBType] = recordPerRunCache(FlatConcurrentHashMap.empty) + val classBTypeCache: ju.concurrent.ConcurrentHashMap[InternalName, ClassBType] = + recordPerRunJavaMapCache(new ju.concurrent.ConcurrentHashMap[InternalName, ClassBType]) /** * A BType is either a primitive type, a ClassBType, an ArrayBType of one of these, or a MethodType @@ -809,17 +812,23 @@ abstract class BTypes { def unapply(cr:ClassBType) = Some(cr.internalName) def apply(internalName: InternalName, fromSymbol: Boolean)(init: (ClassBType) => Either[NoClassBTypeInfo, ClassInfo]) = { - val newRes = if (fromSymbol) new ClassBTypeFromSymbol(internalName) else new ClassBTypeFromClassfile(internalName) - // synchronized s required to ensure proper initialisation if info. - // see comment on def info - newRes.synchronized { - classBTypeCache.putIfAbsent(internalName, newRes) match { - case None => - newRes._info = init(newRes) - newRes.checkInfoConsistency() - newRes - case Some(old) => - old + val cached = classBTypeCache.get(internalName) + if (cached ne null) cached + else { + val newRes = + if (fromSymbol) new ClassBTypeFromSymbol(internalName) + else new ClassBTypeFromClassfile(internalName) + // synchronized is required to ensure proper initialisation of info. + // see comment on def info + newRes.synchronized { + classBTypeCache.putIfAbsent(internalName, newRes) match { + case null => + newRes._info = init(newRes) + newRes.checkInfoConsistency() + newRes + case old => + old + } } } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index 095e5911313a..cd5f74519df9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -46,12 +46,10 @@ abstract class BTypesFromClassfile { * be found in the `byteCodeRepository`, the `info` of the resulting ClassBType is undefined. */ def classBTypeFromParsedClassfile(internalName: InternalName): ClassBType = { - cachedClassBType(internalName).getOrElse{ - ClassBType(internalName, false){ res:ClassBType => - byteCodeRepository.classNode(internalName) match { - case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) - case Right(c) => computeClassInfoFromClassNode(c, res) - } + ClassBType(internalName, fromSymbol = false) { res: ClassBType => + byteCodeRepository.classNode(internalName) match { + case Left(msg) => Left(NoClassBTypeInfoMissingBytecode(msg)) + case Right(c) => computeClassInfoFromClassNode(c, res) } } } @@ -60,10 +58,8 @@ abstract class BTypesFromClassfile { * Construct the [[ClassBType]] for a parsed classfile. */ def classBTypeFromClassNode(classNode: ClassNode): ClassBType = { - cachedClassBType(classNode.name).getOrElse { - ClassBType(classNode.name, false) { res: ClassBType => - computeClassInfoFromClassNode(classNode, res) - } + ClassBType(classNode.name, fromSymbol = false) { res: ClassBType => + computeClassInfoFromClassNode(classNode, res) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index c919c81a346c..073da11cffce 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -93,19 +93,12 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { else if (classSym == NullClass) srNullRef else { val internalName = classSym.javaBinaryNameString - cachedClassBType(internalName) match { - case Some(bType) => - if (currentRun.compiles(classSym)) - assert(bType fromSymbol, s"ClassBType for class being compiled was already created from a classfile: ${classSym.fullName}") - bType - case None => - // The new ClassBType is added to the map via its apply, before we set its info. This - // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. - ClassBType(internalName, true) { res:ClassBType => - if (completeSilentlyAndCheckErroneous(classSym)) - Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) - else computeClassInfo(classSym, res) - } + // The new ClassBType is added to the map via its apply, before we set its info. This + // allows initializing cyclic dependencies, see the comment on variable ClassBType._info. + ClassBType(internalName, fromSymbol = true) { res:ClassBType => + if (completeSilentlyAndCheckErroneous(classSym)) + Left(NoClassBTypeInfoClassSymbolInfoFailedSI9111(classSym.fullName)) + else computeClassInfo(classSym, res) } } } @@ -623,33 +616,29 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { def mirrorClassClassBType(moduleClassSym: Symbol): ClassBType = { assert(isTopLevelModuleClass(moduleClassSym), s"not a top-level module class: $moduleClassSym") val internalName = moduleClassSym.javaBinaryNameString.stripSuffix(nme.MODULE_SUFFIX_STRING) - cachedClassBType(internalName).getOrElse { - ClassBType(internalName, true) { c: ClassBType => - val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) - val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) - Right(ClassInfo( - superClass = Some(ObjectRef), - interfaces = Nil, - flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, - nestedClasses = nested, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class - } + ClassBType(internalName, fromSymbol = true) { c: ClassBType => + val shouldBeLazy = moduleClassSym.isJavaDefined || !currentRun.compiles(moduleClassSym) + val nested = Lazy.withLockOrEager(shouldBeLazy, exitingPickler(memberClassesForInnerClassTable(moduleClassSym)) map classBTypeFromSymbol) + Right(ClassInfo( + superClass = Some(ObjectRef), + interfaces = Nil, + flags = asm.Opcodes.ACC_SUPER | asm.Opcodes.ACC_PUBLIC | asm.Opcodes.ACC_FINAL, + nestedClasses = nested, + nestedInfo = Lazy.eagerNone, + inlineInfo = EmptyInlineInfo.copy(isEffectivelyFinal = true))) // no method inline infos needed, scala never invokes methods on the mirror class } } def beanInfoClassClassBType(mainClass: Symbol): ClassBType = { val internalName = mainClass.javaBinaryNameString + "BeanInfo" - cachedClassBType(internalName).getOrElse { - ClassBType(internalName, true) { c: ClassBType => - Right(ClassInfo( - superClass = Some(sbScalaBeanInfoRef), - interfaces = Nil, - flags = javaFlags(mainClass), - nestedClasses = Lazy.eagerNil, - nestedInfo = Lazy.eagerNone, - inlineInfo = EmptyInlineInfo)) - } + ClassBType(internalName, fromSymbol = true) { c: ClassBType => + Right(ClassInfo( + superClass = Some(sbScalaBeanInfoRef), + interfaces = Nil, + flags = javaFlags(mainClass), + nestedClasses = Lazy.eagerNil, + nestedInfo = Lazy.eagerNone, + inlineInfo = EmptyInlineInfo)) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index c3b249ad2b93..95417af6a034 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -148,8 +148,8 @@ abstract class PostProcessor extends PerRunInit { */ override def getCommonSuperClass(inameA: String, inameB: String): String = { // All types that appear in a class node need to have their ClassBType cached, see [[cachedClassBType]]. - val a = cachedClassBType(inameA).get - val b = cachedClassBType(inameB).get + val a = cachedClassBType(inameA) + val b = cachedClassBType(inameB) val lub = a.jvmWiseLUB(b).get val lubName = lub.internalName assert(lubName != "scala/Any") diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala index 4af8b317a833..da7dcc68131b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CallGraphTest.scala @@ -9,6 +9,7 @@ import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ import scala.collection.immutable.IntMap +import scala.reflect.internal.util.JavaClearable import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.nsc.reporters.StoreReporter @@ -24,7 +25,7 @@ class CallGraphTest extends BytecodeTesting { compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCache, + JavaClearable.forMap(bTypes.classBTypeCache), postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses, postProcessor.callGraph.callsites)) @@ -142,7 +143,7 @@ class CallGraphTest extends BytecodeTesting { val m = getAsmMethod(c, "m") val List(fn) = callsInMethod(m) val forNameMeth = byteCodeRepository.methodNode("java/lang/Class", "forName", "(Ljava/lang/String;)Ljava/lang/Class;").get._1 - val classTp = cachedClassBType("java/lang/Class").get + val classTp = cachedClassBType("java/lang/Class") val r = callGraph.callsites(m)(fn) checkCallsite(fn, m, forNameMeth, classTp, safeToInline = false, atInline = false, atNoInline = false) } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index 1f1eace35073..ab750855aeff 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ +import scala.reflect.internal.util.JavaClearable import scala.tools.nsc.backend.jvm.BTypes.MethodInlineInfo import scala.tools.nsc.backend.jvm.BackendReporting._ import scala.tools.testing.BytecodeTesting @@ -20,7 +21,7 @@ class InlineInfoTest extends BytecodeTesting { override def compilerArgs = "-opt:l:inline -opt-inline-from:**" compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCache, + JavaClearable.forMap(bTypes.classBTypeCache), postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses)) @@ -45,7 +46,7 @@ class InlineInfoTest extends BytecodeTesting { """.stripMargin val classes = compileClasses(code) - val fromSyms = classes.map(c => global.genBCode.bTypes.cachedClassBType(c.name).get.info.get.inlineInfo) + val fromSyms = classes.map(c => global.genBCode.bTypes.cachedClassBType(c.name).info.get.inlineInfo) val fromAttrs = classes.map(c => { assert(c.attrs.asScala.exists(_.isInstanceOf[InlineInfoAttribute]), c.attrs) @@ -64,7 +65,7 @@ class InlineInfoTest extends BytecodeTesting { |} """.stripMargin compileClasses("class C { new A }", javaCode = List((jCode, "A.java"))) - val info = global.genBCode.bTypes.cachedClassBType("A").get.info.get.inlineInfo + val info = global.genBCode.bTypes.cachedClassBType("A").info.get.inlineInfo assertEquals(info.methodInfos, Map( "bar()I" -> MethodInlineInfo(true,false,false), "()V" -> MethodInlineInfo(false,false,false), @@ -85,7 +86,7 @@ class InlineInfoTest extends BytecodeTesting { compileClasses("class C { def t: java.nio.file.WatchEvent.Kind[String] = null }", javaCode = List((jCode, "WatchEvent.java"))) // before the fix of scala-dev#402, the companion of the nested class `Kind` (containing the static method) was taken from // the classpath (classfile WatchEvent$Kind.class) instead of the actual companion from the source, so the static method was missing. - val info = global.genBCode.bTypes.cachedClassBType("java/nio/file/WatchEvent$Kind").get.info.get.inlineInfo + val info = global.genBCode.bTypes.cachedClassBType("java/nio/file/WatchEvent$Kind").info.get.inlineInfo assertEquals(info.methodInfos, Map( "HAI()Ljava/lang/String;" -> MethodInlineInfo(true,false,false), "()V" -> MethodInlineInfo(false,false,false))) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 61fecada673e..0d4408998989 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -8,6 +8,7 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.collection.JavaConverters._ +import scala.reflect.internal.util.JavaClearable import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ import scala.tools.nsc.backend.jvm.BackendReporting._ @@ -26,7 +27,7 @@ class InlinerTest extends BytecodeTesting { compiler.keepPerRunCachesAfterRun(List( - bTypes.classBTypeCache, + JavaClearable.forMap(bTypes.classBTypeCache), postProcessor.byteCodeRepository.compilingClasses, postProcessor.byteCodeRepository.parsedClasses, postProcessor.callGraph.callsites)) From 38e994b204c581b3f1b894481a2a6cb170cfbd90 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 23 Aug 2018 18:52:05 -0400 Subject: [PATCH 1530/2793] avoid intermediate zipped list building --- src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala | 2 +- .../scala/tools/nsc/symtab/classfile/ClassfileParser.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 4885083938e9..b10b9bb68784 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -932,7 +932,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { } def genLoadArguments(args: List[Tree], btpes: List[BType]) { - (args zip btpes) foreach { case (arg, btpe) => genLoad(arg, btpe) } + foreach2(args, btpes) { case (arg, btpe) => genLoad(arg, btpe) } } def genLoadModule(tree: Tree): BType = { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f99b85b7cfdd..ffaeb40a4e38 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1094,7 +1094,7 @@ abstract class ClassfileParser { def addParamNames(): Unit = if ((paramNames ne null) && sym.hasRawInfo && sym.isMethod) { val params = sym.rawInfo.params - (paramNames zip params).foreach { + foreach2(paramNames.toList, params) { case (nme.NO_NAME, _) => // param was ACC_SYNTHETIC; ignore case (name, param) => param.resetFlag(SYNTHETIC) From fb2b676cd3ab7bd02a094f8e105dd72145b53a6b Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 21 Aug 2018 15:22:56 -0400 Subject: [PATCH 1531/2793] faster `sequence`, and fuse `sequence(xs.map(f))` into `traverse(xs)(f)` --- .../tools/nsc/settings/MutableSettings.scala | 14 ++++++------- .../nsc/transform/patmat/MatchAnalysis.scala | 2 +- .../transform/patmat/MatchOptimization.scala | 10 ++++----- .../transform/patmat/PatternExpansion.scala | 3 ++- .../tools/nsc/typechecker/Contexts.scala | 5 ++--- .../nsc/typechecker/TypeDiagnostics.scala | 4 ++-- .../scala/tools/nsc/typechecker/Typers.scala | 5 ++--- .../scala/reflect/internal/Definitions.scala | 2 +- .../reflect/internal/ReificationSupport.scala | 6 +++--- .../reflect/internal/util/Collections.scala | 21 +++++++++++++++---- .../scala/reflect/internal/util/package.scala | 1 + 11 files changed, 43 insertions(+), 30 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index bddef769be99..60650c48e0dc 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -11,7 +11,7 @@ package settings import io.{ AbstractFile, Jar, Path, PlainFile, VirtualDirectory } import scala.collection.generic.Clearable import scala.io.Source -import scala.reflect.internal.util.StringOps +import scala.reflect.internal.util.{ SomeOfNil, StringOps } import scala.reflect.{ ClassTag, classTag } /** A mutable Settings object. @@ -127,7 +127,7 @@ class MutableSettings(val errorFn: String => Unit) // -Xfoo: clears Clearables def clearIfExists(cmd: String): Option[List[String]] = lookupSetting(cmd) match { - case Some(c: Clearable) => c.clear() ; Some(Nil) + case Some(c: Clearable) => c.clear() ; SomeOfNil case Some(s) => s.errorAndValue(s"Missing argument to $cmd", None) case None => None } @@ -463,10 +463,10 @@ class MutableSettings(val errorFn: String => Unit) case List(x) => if (x.equalsIgnoreCase("true")) { value = true - Some(Nil) + SomeOfNil } else if (x.equalsIgnoreCase("false")) { value = false - Some(Nil) + SomeOfNil } else errorAndValue(s"'$x' is not a valid choice for '$name'", None) case _ => errorAndValue(s"'$name' accepts only one boolean value", None) } @@ -867,8 +867,8 @@ class MutableSettings(val errorFn: String => Unit) override def tryToSetColon(args: List[String]) = args match { case Nil => errorAndValue(usageErrorMessage, None) - case List("help") => sawHelp = true; Some(Nil) - case List(x) if choices contains x => value = x ; Some(Nil) + case List("help") => sawHelp = true; SomeOfNil + case List(x) if choices contains x => value = x ; SomeOfNil case List(x) => errorAndValue("'" + x + "' is not a valid choice for '" + name + "'", None) case xs => errorAndValue("'" + name + "' does not accept multiple arguments.", None) } @@ -933,7 +933,7 @@ class MutableSettings(val errorFn: String => Unit) args match { case Nil => if (default == "") errorAndValue("missing phase", None) else tryToSetColon(splitDefault) - case xs => value = (value ++ xs).distinct.sorted ; Some(Nil) + case xs => value = (value ++ xs).distinct.sorted ; SomeOfNil } } catch { case _: NumberFormatException => None } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 27fdfe806b94..1c4e7caf1ff8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -857,7 +857,7 @@ trait MatchAnalysis extends MatchApproximation { val argLen = (caseFieldAccs.length min ctorParams.length) val examples = (0 until argLen).map(i => fields.get(caseFieldAccs(i)).map(_.toCounterExample(brevity)) getOrElse Some(WildcardExample)).toList - sequence(examples) + sequenceOpt(examples) } cls match { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala index dd1872c67790..837f5158f971 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchOptimization.scala @@ -414,7 +414,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { // generate if-then-else for 1 case switch (avoids verify error... can't imagine a one-case switch being faster than if-then-else anyway) if (cases.isEmpty || cases.tail.isEmpty) Nil else { - val caseDefs = cases map { case (scrutSym, makers) => + val caseDefs = traverseOpt(cases) { case (scrutSym, makers) => makers match { // default case case GuardAndBodyTreeMakers(guard, body) => @@ -424,15 +424,15 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { Some(CaseDef(pattern, guard, body)) // alternatives case AlternativesTreeMaker(_, altss, pos) :: GuardAndBodyTreeMakers(guard, body) if alternativesSupported => - val switchableAlts = altss map { + // succeed iff they were all switchable + val switchableAlts = traverseOpt(altss) { case SwitchableTreeMaker(pattern) :: Nil => Some(pattern) case _ => None } - // succeed if they were all switchable - sequence(switchableAlts) map { switchableAlts => + switchableAlts map { switchableAlts => def extractConst(t: Tree) = t match { case Literal(const) => const case _ => t @@ -451,7 +451,7 @@ trait MatchOptimization extends MatchTreeMaking with MatchAnalysis { } } - val caseDefsWithGuards = sequence(caseDefs) match { + val caseDefsWithGuards = caseDefs match { case None => return Nil case Some(cds) => cds } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index e56110cb6bb2..02a28999690a 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -10,6 +10,7 @@ package transform package patmat import scala.tools.nsc.typechecker.Contexts +import scala.reflect.internal.util /** An 'extractor' can be a case class or an unapply or unapplySeq method. * @@ -157,7 +158,7 @@ trait PatternExpansion { else None } - private def booleanUnapply = if (isBooleanUnapply) Some(Nil) else None + private def booleanUnapply = if (isBooleanUnapply) util.SomeOfNil else None // In terms of the (equivalent -- if we're dealing with an unapply) case class, what are the constructor's parameter types? private val equivConstrParamTypes = diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 1fd78e478858..0c19be60929f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -8,7 +8,7 @@ package typechecker import scala.collection.{ immutable, mutable } import scala.annotation.tailrec -import scala.reflect.internal.util.shortClassOfInstance +import scala.reflect.internal.util.{ shortClassOfInstance, SomeOfNil } import scala.tools.nsc.reporters.Reporter /** @@ -938,7 +938,7 @@ trait Contexts { self: Analyzer => // the corresponding package object may contain implicit members. val pre = owner.packageObject.typeOfThis Some(collectImplicits(pre.implicitMembers, pre)) - } else SomeNil + } else SomeOfNil } // @@ -1567,7 +1567,6 @@ trait Contexts { self: Analyzer => private def imp1Explicit = imp1 isExplicitImport name private def imp2Explicit = imp2 isExplicitImport name } - private final val SomeNil = Some(Nil) } object ContextMode { diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 314b856dab28..3c4e88334a11 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -123,10 +123,10 @@ trait TypeDiagnostics { */ final def exampleTuplePattern(names: List[Name]): String = { val arity = names.length - val varPatternNames: Option[List[String]] = sequence(names map { + val varPatternNames: Option[List[String]] = traverseOpt(names) { case name if nme.isVariableName(name) => Some(name.decode) case _ => None - }) + } def parenthesize(a: String) = s"($a)" def genericParams = (Seq("param1") ++ (if (arity > 2) Seq("...") else Nil) ++ Seq(s"param$arity")) parenthesize(varPatternNames.getOrElse(genericParams).mkString(", ")) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2f828154c5d0..73ee29342207 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3796,9 +3796,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper tryConst(tree, pt) } def trees2ConstArg(trees: List[Tree], pt: Type): Option[ArrayAnnotArg] = { - val args = trees.map(tree2ConstArg(_, pt)) - if (args.exists(_.isEmpty)) None - else Some(ArrayAnnotArg(args.flatten.toArray)) + traverseOpt(trees)(tree2ConstArg(_, pt)) + .map(args => ArrayAnnotArg(args.toArray)) } // begin typedAnnotation diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 92e462d6c816..cf3b33a6eafe 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1350,7 +1350,7 @@ trait Definitions extends api.StandardDefinitions { newPolyMethod(1, owner, name, flags)(tparams => (None, createFn(tparams.head))) } def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): MethodSymbol = { - newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head))) + newPolyMethod(1, owner, name, flags)(tparams => (util.SomeOfNil, createFn(tparams.head))) } /** Is symbol a phantom class for which no runtime representation exists? */ diff --git a/src/reflect/scala/reflect/internal/ReificationSupport.scala b/src/reflect/scala/reflect/internal/ReificationSupport.scala index 28b01eb59906..f6c9a7ab04e8 100644 --- a/src/reflect/scala/reflect/internal/ReificationSupport.scala +++ b/src/reflect/scala/reflect/internal/ReificationSupport.scala @@ -433,7 +433,7 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Tree): Option[List[Tree]] = tree match { case Literal(Constant(())) => - Some(Nil) + SomeOfNil case Apply(MaybeTypeTreeOriginal(SyntacticTypeApplied(MaybeSelectApply(TupleCompanionRef(sym)), targs)), args) if sym == TupleClass(args.length).companionModule && (targs.isEmpty || targs.length == args.length) => @@ -453,7 +453,7 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Tree): Option[List[Tree]] = tree match { case MaybeTypeTreeOriginal(UnitClassRef(_)) => - Some(Nil) + SomeOfNil case MaybeTypeTreeOriginal(AppliedTypeTree(TupleClassRef(sym), args)) if sym == TupleClass(args.length) => Some(args) @@ -507,7 +507,7 @@ trait ReificationSupport { self: SymbolTable => def unapply(tree: Tree): Option[List[Tree]] = tree match { case bl @ self.Block(stats, SyntheticUnit()) => Some(treeInfo.untypecheckedBlockBody(bl)) case bl @ self.Block(stats, expr) => Some(treeInfo.untypecheckedBlockBody(bl) :+ expr) - case SyntheticUnit() => Some(Nil) + case SyntheticUnit() => SomeOfNil case _ if tree.isTerm && tree.nonEmpty => Some(tree :: Nil) case _ => None } diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 970a5d300f8f..11d10128d1fc 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -288,10 +288,23 @@ trait Collections { true } - final def sequence[A](as: List[Option[A]]): Option[List[A]] = { - if (as.exists (_.isEmpty)) None - else Some(as.flatten) - } + // "Opt" suffix or traverse clashes with the various traversers' traverses + final def sequenceOpt[A](as: List[Option[A]]): Option[List[A]] = traverseOpt(as)(identity) + final def traverseOpt[A, B](as: List[A])(f: A => Option[B]): Option[List[B]] = + if (as eq Nil) SomeOfNil else { + var result: ListBuffer[B] = null + var curr = as + while (curr ne Nil) { + f(curr.head) match { + case Some(b) => + if (result eq null) result = ListBuffer.empty + result += b + case None => return None + } + curr = curr.tail + } + Some(result.toList) + } final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try { Some(ass.transpose) diff --git a/src/reflect/scala/reflect/internal/util/package.scala b/src/reflect/scala/reflect/internal/util/package.scala index 9b5fd3798d70..cbffe587f6b8 100644 --- a/src/reflect/scala/reflect/internal/util/package.scala +++ b/src/reflect/scala/reflect/internal/util/package.scala @@ -8,6 +8,7 @@ package object util { // An allocation-avoiding reusable instance of the so-common List(Nil). val ListOfNil: List[List[Nothing]] = Nil :: Nil + val SomeOfNil: Option[List[Nothing]] = Some(Nil) def andFalse(body: Unit): Boolean = false From b79a6237112085763e9c291a1ea77563ed896978 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 25 Jan 2018 09:59:55 +1000 Subject: [PATCH 1532/2793] Fix lookup of default getter in scope By slightly modifying an existing test to force creation of default getters for both `bar` methods _before_ typechecking the application, I was able to show a latent bug in the way the default getter is looked up in scope. The bespoke `Context.lookup` method did not respect shadowing, but rather considered the two, same-named default getters as overloaded. Because the overloaded symbol had NoSymbol as its owner, which didn't match the expected owner, neither default was eligible. This commit brings the code more into line with `Context.lookupSymbol` and respects shadowing. (cherry picked from commit 86f2028c0780fa15cb48e15c3eb81f037964114c) --- .../scala/tools/nsc/typechecker/Contexts.scala | 13 ++++++++----- test/files/run/names-defaults-nest.scala | 13 +++++++++++++ 2 files changed, 21 insertions(+), 5 deletions(-) create mode 100644 test/files/run/names-defaults-nest.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 5eae827baa2b..b54e59dbf57a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1223,11 +1223,14 @@ trait Contexts { self: Analyzer => var res: Symbol = NoSymbol var ctx = this while (res == NoSymbol && ctx.outer != ctx) { - val s = ctx.scope lookup name - if (s != NoSymbol && s.owner == expectedOwner) - res = s - else - ctx = ctx.outer + ctx.scope.lookupUnshadowedEntries(name).filter(s => s.sym != NoSymbol && s.sym.owner == expectedOwner).toList match { + case Nil => + ctx = ctx.outer + case found :: Nil => + res = found.sym + case alts => + res = expectedOwner.newOverloaded(NoPrefix, alts.map(_.sym)) + } } res } diff --git a/test/files/run/names-defaults-nest.scala b/test/files/run/names-defaults-nest.scala new file mode 100644 index 000000000000..d98a9ee45b14 --- /dev/null +++ b/test/files/run/names-defaults-nest.scala @@ -0,0 +1,13 @@ +object Test { + def multinest = { + def baz = bar(); + def bar(x: String = "a"): Any = { + def bar(x: String = "b") = x + bar() + x + }; + assert(baz == "ba", baz) + } + def main(args: Array[String]) { + multinest + } +} From dd0b8c6d6f1a740042b7c3bf6fce3b627035c24c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 25 Jan 2018 11:10:27 +1000 Subject: [PATCH 1533/2793] Unify scope lookup for companions and default getters In #5700, I fixed a bug in the companion lookup, which ensured they were defined in the same scope. The same approach applies well to the lookup of default getters. You may ask, we can't just use: ``` context.lookupSymbol(name, _.owner == expectedOwner) ``` That doesn't individually lookup the entry in each enclosing nested scopes, but rather relies on the outer scope delegation in `Scope.lookupEntry` itself. This in turn relies on the way that nested scopes share the `elems` table with the enclosing scope: ``` final def newNestedScope(outer: Scope): Scope = { val nested = newScope nested.elems = outer.elems nested.nestinglevel = outer.nestinglevel + 1 ... } ``` If the outer scope is later mutated, in our case by lazily adding the default getter, the inner scope won't see the new elems. Context.lookupSymbol will jump immediately jump to search of the enclosing prefix. Perhaps a better design would be for the inner scope to retain a reference to the outer one, rather than just to the head of its elems linked list at the time the nested scope was created. (cherry picked from commit da14e9c75d4230785fccf98eef69d0e7f5c867fa) --- .../tools/nsc/typechecker/Contexts.scala | 39 +++++-------------- .../tools/nsc/typechecker/NamesDefaults.scala | 5 +-- test/files/run/names-defaults-nest.scala | 3 +- 3 files changed, 13 insertions(+), 34 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index b54e59dbf57a..32a0a4524d3d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1210,32 +1210,14 @@ trait Contexts { self: Analyzer => else finish(EmptyTree, NoSymbol) } - /** - * Find a symbol in this context or one of its outers. - * - * Used to find symbols are owned by methods (or fields), they can't be - * found in some scope. - * - * Examples: companion module of classes owned by a method, default getter - * methods of nested methods. See NamesDefaults.scala - */ - def lookup(name: Name, expectedOwner: Symbol) = { - var res: Symbol = NoSymbol - var ctx = this - while (res == NoSymbol && ctx.outer != ctx) { - ctx.scope.lookupUnshadowedEntries(name).filter(s => s.sym != NoSymbol && s.sym.owner == expectedOwner).toList match { - case Nil => - ctx = ctx.outer - case found :: Nil => - res = found.sym - case alts => - res = expectedOwner.newOverloaded(NoPrefix, alts.map(_.sym)) - } - } - res + final def lookupCompanionInIncompleteOwner(original: Symbol): Symbol = { + // Must have both a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions. + def isCompanion(sym: Symbol): Boolean = + (original.isModule && sym.isClass || sym.isModule && original.isClass) && sym.isCoDefinedWith(original) + lookupSibling(original, original.name.companionName).filter(isCompanion) } - final def lookupCompanionInIncompleteOwner(original: Symbol): Symbol = { + final def lookupSibling(original: Symbol, name: Name): Symbol = { /* Search scopes in current and enclosing contexts for the definition of `symbol` */ def lookupScopeEntry(symbol: Symbol): ScopeEntry = { var res: ScopeEntry = null @@ -1250,15 +1232,12 @@ trait Contexts { self: Analyzer => res } - // 1) Must be owned by the same Scope, to ensure that in - // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object. - // 2) Must be a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions. + // Must be owned by the same Scope, to ensure that in + // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object. lookupScopeEntry(original) match { case null => NoSymbol case entry => - def isCompanion(sym: Symbol): Boolean = - (original.isModule && sym.isClass || sym.isModule && original.isClass) && sym.isCoDefinedWith(original) - entry.owner.lookupNameInSameScopeAs(original, original.name.companionName).filter(isCompanion) + entry.owner.lookupNameInSameScopeAs(original, name) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 0dbb0e860b25..6a78a6906d3d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -479,9 +479,8 @@ trait NamesDefaults { self: Analyzer => if (param.owner.owner.isClass) { param.owner.owner.info.member(defGetterName) } else { - // the owner of the method is another method. find the default - // getter in the context. - context.lookup(defGetterName, param.owner.owner) + // the owner of the method is another method. find the default getter in the context. + context.lookupSibling(param.owner, defGetterName) } } } else NoSymbol diff --git a/test/files/run/names-defaults-nest.scala b/test/files/run/names-defaults-nest.scala index d98a9ee45b14..2849bdfc5072 100644 --- a/test/files/run/names-defaults-nest.scala +++ b/test/files/run/names-defaults-nest.scala @@ -1,10 +1,11 @@ object Test { def multinest = { - def baz = bar(); + def baz = {bar()} def bar(x: String = "a"): Any = { def bar(x: String = "b") = x bar() + x }; + bar$default$1(0) assert(baz == "ba", baz) } def main(args: Array[String]) { From a7856fa51b0bc5c09cf797c2ac910e0c0dccb848 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Jan 2018 23:05:54 +1000 Subject: [PATCH 1534/2793] scalap should not print class type param annots in decls The annotation removed in this diff was actually from `R`! (cherry picked from commit 623589a81be219e11f71f3a62f3d00673b0fda60) --- .../tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala | 2 +- .../scala/tools/scalap/scalax/rules/scalasig/Symbol.scala | 2 +- test/files/scalap/typeAnnotations.check | 1 - 3 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index a7bf10673967..fff15eee1b38 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -61,7 +61,7 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { case a: AliasSymbol => indent printAlias(level, a) - case t: TypeSymbol if !t.isParam && !t.name.matches("_\\$\\d+")=> + case t: TypeSymbol if !t.name.matches("_\\$\\d+")=> indent printTypeSymbol(level, t) case s => diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala index 6c38687649bd..2c3913c1f354 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala @@ -27,7 +27,7 @@ abstract class ScalaSigSymbol extends Symbol { def entry: ScalaSig#Entry def index = entry.index - lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (_.parent == Some(this)) + lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (sym => sym.parent == Some(this) && !sym.isParam) lazy val attributes: Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this) } diff --git a/test/files/scalap/typeAnnotations.check b/test/files/scalap/typeAnnotations.check index cba69f8e41d7..575816c3658c 100644 --- a/test/files/scalap/typeAnnotations.check +++ b/test/files/scalap/typeAnnotations.check @@ -1,6 +1,5 @@ abstract class TypeAnnotations[@scala.specialized R] extends scala.AnyRef { def this() = { /* compiled code */ } - @scala.specialized val x: scala.Int = { /* compiled code */ } @scala.specialized type T From e3722fa5b039a33e321634868fc3725b86255dc3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 23 Jan 2018 14:59:08 +1000 Subject: [PATCH 1535/2793] Refactor pickle phase Small refactoring designed to make the subsequent commit more reviewable. Tightens up the definition of "companion"-s so that we no longer add a type alias in a package object into the pickle of a same-named module. (cherry picked from commit b41e6516321da2cda0441bec1c7d3d66ae2dab42) --- .../tools/nsc/symtab/classfile/Pickler.scala | 34 +++++++++++-------- 1 file changed, 19 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 7fc9ec14f98e..9c8b81b2d903 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -33,26 +33,30 @@ abstract class Pickler extends SubComponent { def newPhase(prev: Phase): StdPhase = new PicklePhase(prev) class PicklePhase(prev: Phase) extends StdPhase(prev) { - def apply(unit: CompilationUnit) { - def pickle(tree: Tree) { - def add(sym: Symbol, pickle: Pickle) = { - if (currentRun.compiles(sym) && !currentRun.symData.contains(sym)) { - debuglog("pickling " + sym) - pickle putSymbol sym - currentRun.symData(sym) = pickle - } - } - + def apply(unit: CompilationUnit): Unit = { + def pickle(tree: Tree): Unit = { tree match { case PackageDef(_, stats) => stats foreach pickle case ClassDef(_, _, _, _) | ModuleDef(_, _, _) => val sym = tree.symbol - val pickle = new Pickle(sym) - add(sym, pickle) - add(sym.companionSymbol, pickle) - pickle.writeArray() - currentRun registerPickle sym + def shouldPickle(sym: Symbol) = currentRun.compiles(sym) && !currentRun.symData.contains(sym) + if (shouldPickle(sym)) { + val pickle = new Pickle(sym) + def pickleSym(sym: Symbol) = { + pickle.putSymbol(sym) + currentRun.symData(sym) = pickle + } + + val companion = sym.companionSymbol.filter(_.owner == sym.owner) // exclude companionship between package- and package object-owned symbols. + val syms = sym :: (if (shouldPickle(companion)) companion :: Nil else Nil) + syms.foreach { sym => + pickle.putSymbol(sym) + currentRun.symData(sym) = pickle + } + pickle.writeArray() + currentRun registerPickle sym + } case _ => } } From 1cff32676765ab217a0010896464ebbb916f0247 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 31 Jan 2018 12:16:40 +1000 Subject: [PATCH 1536/2793] Preserve order of decls through pickle/unpickle The pickle format does not explicitly enccode the order of decls. Instead, symbols are entered into an index in the order that they are found by the pickler, either as a definition or as a reference. During unpickling, symbols are read and entered into the owner's decls in that order. This is a cause of unstable compiler output: a class that mixes in members from some trait will have a different order if it is compiled jointly with / separately from that trait. This commit modifies the pickler with an initial pass that reserves index entries for all declarations in the declaration order. The pickle format and the unpickler are unchanged. (cherry picked from commit 8cc7e56d86c621a4c63a276f2390849196451888) --- .../tools/nsc/symtab/classfile/Pickler.scala | 16 ++++++-- .../nsc/symtab/classfile/PicklerTest.scala | 40 +++++++++++++++++++ 2 files changed, 52 insertions(+), 4 deletions(-) create mode 100644 test/junit/scala/tools/nsc/symtab/classfile/PicklerTest.scala diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 9c8b81b2d903..adda9368ff3f 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -43,13 +43,15 @@ abstract class Pickler extends SubComponent { def shouldPickle(sym: Symbol) = currentRun.compiles(sym) && !currentRun.symData.contains(sym) if (shouldPickle(sym)) { val pickle = new Pickle(sym) - def pickleSym(sym: Symbol) = { - pickle.putSymbol(sym) - currentRun.symData(sym) = pickle + def reserveDeclEntries(sym: Symbol): Unit = { + pickle.reserveEntry(sym) + if (sym.isClass) sym.info.decls.foreach(reserveDeclEntries) + else if (sym.isModule) reserveDeclEntries(sym.moduleClass) } val companion = sym.companionSymbol.filter(_.owner == sym.owner) // exclude companionship between package- and package object-owned symbols. val syms = sym :: (if (shouldPickle(companion)) companion :: Nil else Nil) + syms.foreach(reserveDeclEntries) syms.foreach { sym => pickle.putSymbol(sym) currentRun.symData(sym) = pickle @@ -125,6 +127,11 @@ abstract class Pickler extends SubComponent { private def isExternalSymbol(sym: Symbol): Boolean = (sym != NoSymbol) && !isLocalToPickle(sym) // Phase 1 methods: Populate entries/index ------------------------------------ + private val reserved = mutable.BitSet() + final def reserveEntry(sym: Symbol): Unit = { + reserved(ep) = true + putEntry(sym) + } /** Store entry e in index at next available position unless * it is already there. @@ -132,7 +139,8 @@ abstract class Pickler extends SubComponent { * @return true iff entry is new. */ private def putEntry(entry: AnyRef): Boolean = index.get(entry) match { - case Some(_) => false + case Some(i) => + reserved.remove(i) case None => if (ep == entries.length) { val entries1 = new Array[AnyRef](ep * 2) diff --git a/test/junit/scala/tools/nsc/symtab/classfile/PicklerTest.scala b/test/junit/scala/tools/nsc/symtab/classfile/PicklerTest.scala new file mode 100644 index 000000000000..d994727b1f9a --- /dev/null +++ b/test/junit/scala/tools/nsc/symtab/classfile/PicklerTest.scala @@ -0,0 +1,40 @@ +package scala.tools.nsc.symtab.classfile + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.reflect.io.VirtualDirectory +import scala.tools.nsc.Global +import scala.tools.nsc.classpath.{AggregateClassPath, VirtualDirectoryClassPath} +import scala.tools.testing.BytecodeTesting + +@RunWith(classOf[JUnit4]) +class PicklerTest extends BytecodeTesting { + @Test + def pickleUnpicklePreserveDeclOrder(): Unit = { + assertStableDecls("package p1; trait C { def x: T; def y: Int; class T }", "p1.C") + assertStableDecls("package p1; class D; object D { def x: T = null; def y: Int = 0; class T }", "p1.D") + } + + def assertStableDecls(source: String, name: String): Unit = { + val compiler1 = BytecodeTesting.newCompiler(extraArgs = compilerArgs) + val r = new compiler1.global.Run + r.compileSources(compiler1.global.newSourceFile(source) :: Nil) + val compiler2 = BytecodeTesting.newCompiler(extraArgs = compilerArgs) + val out = compiler1.global.settings.outputDirs.getSingleOutput.get.asInstanceOf[VirtualDirectory] + def showDecls(global: Global): Seq[String] = global.exitingPickler { + val classSym = global.rootMirror.getClassIfDefined(name) + val moduleSym = global.rootMirror.getModuleIfDefined(name).moduleClass + val syms = List(classSym, moduleSym).filter(sym => sym.exists) + Assert.assertTrue(syms.nonEmpty) + syms.flatMap(sym => sym.name.toString :: sym.info.decls.toList.map(decl => global.definitions.fullyInitializeSymbol(decl).defString)) + } + val decls1 = showDecls(compiler1.global) + compiler2.global.classPath + compiler2.global.platform.currentClassPath = Some(AggregateClassPath(new VirtualDirectoryClassPath(out) :: compiler2.global.platform.currentClassPath.get :: Nil)) + new compiler2.global.Run + val decls2 = showDecls(compiler2.global) + Assert.assertEquals(decls1, decls2) + } +} From fc1e2f1f946f4d0c85eed4b29e6162e821ecd51a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 18 Jan 2018 17:09:56 +1000 Subject: [PATCH 1537/2793] Refactor default getter synthesis - Factor out differences between constructors and regular methods into an virtual call to a helper class - Tease apart symbol creation/entry from synthesis of the default getter tree to prepare for a subsequent commit that will perform the first part eagerly. - Add a test to show the unstable order of the default getter symbols in the owner's scope. (cherry picked from commit f44e1bf728e8cf6c950af6f0aacd1a2c03bbd1d3) --- .../scala/tools/nsc/typechecker/Namers.scala | 176 +++++++++++------- .../tools/nsc/typechecker/NamerTest.scala | 23 +++ 2 files changed, 128 insertions(+), 71 deletions(-) create mode 100644 test/junit/scala/tools/nsc/typechecker/NamerTest.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 806025c026c8..d3980c3996ee 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -757,7 +757,7 @@ trait Namers extends MethodSynthesis { } else completerOf(tree) sym setInfo completer - } + } def enterClassDef(tree: ClassDef) { val ClassDef(mods, _, _, impl) = tree @@ -1442,6 +1442,7 @@ trait Namers extends MethodSynthesis { // in methods with multiple default parameters def rtparams = rtparams0.map(_.duplicate) def rvparamss = rvparamss0.map(_.map(_.duplicate)) + val search = DefaultGetterNamerSearch(context, meth, initCompanionModule = true) val methOwner = meth.owner val isConstr = meth.isConstructor val overrides = overridden != NoSymbol && !overridden.isOverloaded @@ -1457,9 +1458,6 @@ trait Namers extends MethodSynthesis { "" + meth.fullName + ", "+ overridden.fullName ) - // cache the namer used for entering the default getter symbols - var ownerNamer: Option[Namer] = None - var moduleNamer: Option[(ClassDef, Namer)] = None var posCounter = 1 // For each value parameter, create the getter method if it has a @@ -1499,80 +1497,59 @@ trait Namers extends MethodSynthesis { val oflag = if (baseHasDefault) OVERRIDE else 0 val name = nme.defaultGetterName(meth.name, posCounter) - var defTparams = rtparams val defVparamss = mmap(rvparamss.take(previous.length)){ rvp => copyValDef(rvp)(mods = rvp.mods &~ DEFAULTPARAM, rhs = EmptyTree) } - - val parentNamer = if (isConstr) { - val (cdef, nmr) = moduleNamer.getOrElse { - val module = companionSymbolOf(methOwner, context) - module.initialize // call type completer (typedTemplate), adds the - // module's templateNamer to classAndNamerOfModule - module.attachments.get[ConstructorDefaultsAttachment] match { - // by martin: the null case can happen in IDE; this is really an ugly hack on top of an ugly hack but it seems to work - case Some(cda) => - if (cda.companionModuleClassNamer == null) { - devWarning(s"scala/bug#6576 The companion module namer for $meth was unexpectedly null") - return - } - val p = (cda.classWithDefault, cda.companionModuleClassNamer) - moduleNamer = Some(p) - p - case _ => - return // fix #3649 (prevent crash in erroneous source code) - } - } - val ClassDef(_, _, rtparams, _) = resetAttrs(deriveClassDef(cdef)(_ => Template(Nil, noSelfType, Nil)).duplicate) - defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT))) - nmr - } - else ownerNamer getOrElse { - val ctx = context.nextEnclosing(c => c.scope.toList.contains(meth)) - assert(ctx != NoContext, meth) - val nmr = newNamer(ctx) - ownerNamer = Some(nmr) - nmr - } - - val defTpt = - // don't mess with tpt's of case copy default getters, because assigning something other than TypeTree() - // will break the carefully orchestrated naming/typing logic that involves copyMethodCompleter and caseClassCopyMeth - if (meth.isCaseCopy) TypeTree() - else { - // If the parameter type mentions any type parameter of the method, let the compiler infer the - // return type of the default getter => allow "def foo[T](x: T = 1)" to compile. - // This is better than always using Wildcard for inferring the result type, for example in - // def f(i: Int, m: Int => Int = identity _) = m(i) - // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable. - // TODO: this is a very brittle approach; I sincerely hope that Denys's research into hygiene - // will open the doors to a much better way of doing this kind of stuff - val tparamNames = defTparams map { case TypeDef(_, name, _, _) => name } - val eraseAllMentionsOfTparams = new TypeTreeSubstituter(tparamNames contains _) - eraseAllMentionsOfTparams(rvparam.tpt match { - // default getter for by-name params - case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg - case t => t + val defaultGetterSym = search.createAndEnter { owner: Symbol => + methOwner.resetFlag(INTERFACE) // there's a concrete member now + val default = owner.newMethodSymbol(name, vparam.pos, paramFlagsToDefaultGetter(meth.flags)) + default.setPrivateWithin(meth.privateWithin) + if (meth.name == nme.apply && meth.hasAllFlags(CASE | SYNTHETIC)) { + val att = meth.attachments.get[CaseApplyDefaultGetters].getOrElse({ + val a = new CaseApplyDefaultGetters() + meth.updateAttachment(a) + a }) + att.defaultGetters += default } - val defRhs = rvparam.rhs - - val defaultTree = atPos(vparam.pos.focus) { - DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags), ddef.mods.privateWithin) | oflag, name, defTparams, defVparamss, defTpt, defRhs) + if (default.owner.isTerm) + saveDefaultGetter(meth, default) + default } - if (!isConstr) - methOwner.resetFlag(INTERFACE) // there's a concrete member now - val default = parentNamer.enterSyntheticSym(defaultTree) - if (meth.name == nme.apply && meth.hasAllFlags(CASE | SYNTHETIC)) { - val att = meth.attachments.get[CaseApplyDefaultGetters].getOrElse({ - val a = new CaseApplyDefaultGetters() - meth.updateAttachment(a) - a - }) - att.defaultGetters += default + if (defaultGetterSym == NoSymbol) return + + search.addGetter(rtparams) { + (parentNamer: Namer, defTparams: List[TypeDef]) => + val defTpt = + // don't mess with tpt's of case copy default getters, because assigning something other than TypeTree() + // will break the carefully orchestrated naming/typing logic that involves copyMethodCompleter and caseClassCopyMeth + if (meth.isCaseCopy) TypeTree() + else { + // If the parameter type mentions any type parameter of the method, let the compiler infer the + // return type of the default getter => allow "def foo[T](x: T = 1)" to compile. + // This is better than always using Wildcard for inferring the result type, for example in + // def f(i: Int, m: Int => Int = identity _) = m(i) + // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable. + // TODO: this is a very brittle approach; I sincerely hope that Denys's research into hygiene + // will open the doors to a much better way of doing this kind of stuff + val tparamNames = defTparams map { case TypeDef(_, name, _, _) => name } + val eraseAllMentionsOfTparams = new TypeTreeSubstituter(tparamNames contains _) + eraseAllMentionsOfTparams(rvparam.tpt match { + // default getter for by-name params + case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg + case t => t + }) + } + val defRhs = rvparam.rhs + + val defaultTree = atPos(vparam.pos.focus) { + DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags), ddef.mods.privateWithin) | oflag, name, defTparams, defVparamss, defTpt, defRhs) + } + assert(defaultGetterSym != NoSymbol, (parentNamer.owner, name)) + defaultTree.setSymbol(defaultGetterSym) + defaultGetterSym.setInfo(parentNamer.completerOf(defaultTree)) + defaultTree } - if (default.owner.isTerm) - saveDefaultGetter(meth, default) } else if (baseHasDefault) { // the parameter does not have a default itself, but the @@ -1587,6 +1564,63 @@ trait Namers extends MethodSynthesis { } } + private object DefaultGetterNamerSearch { + def apply(c: Context, meth: Symbol, initCompanionModule: Boolean) = if (meth.isConstructor) new DefaultGetterInCompanion(c, meth, initCompanionModule) + else new DefaultMethodInOwningScope(c, meth) + } + private abstract class DefaultGetterNamerSearch { + def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree) + + def createAndEnter(f: Symbol => Symbol): Symbol + } + private class DefaultGetterInCompanion(c: Context, meth: Symbol, initCompanionModule: Boolean) extends DefaultGetterNamerSearch { + private val module = companionSymbolOf(meth.owner, context) + if (initCompanionModule) module.initialize + private val cda: Option[ConstructorDefaultsAttachment] = module.attachments.get[ConstructorDefaultsAttachment] + private val moduleNamer = cda.flatMap(x => Option(x.companionModuleClassNamer)) + + def createAndEnter(f: Symbol => Symbol): Symbol = { + val default = f(module.moduleClass) + moduleNamer match { + case Some(namer) => + namer.enterInScope(default) + case None => + // ignore error to fix #3649 (prevent crash in erroneous source code) + NoSymbol + } + } + def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree): Unit = { + cda match { + case Some(attachment) => + moduleNamer match { + case Some(namer) => + val cdef = attachment.classWithDefault + val ClassDef(_, _, rtparams, _) = resetAttrs(deriveClassDef(cdef)(_ => Template(Nil, noSelfType, Nil)).duplicate) + val defTparams = rtparams.map(rt => copyTypeDef(rt)(mods = rt.mods &~ (COVARIANT | CONTRAVARIANT))) + val tree = create(namer, defTparams) + namer.enterSyntheticSym(tree) + case None => + } + case None => + } + + } + } + private class DefaultMethodInOwningScope(c: Context, meth: Symbol) extends DefaultGetterNamerSearch { + private lazy val ownerNamer: Namer = { + val ctx = context.nextEnclosing(c => c.scope.toList.contains(meth)) // TODO use lookup rather than toList.contains + assert(ctx != NoContext, meth) + newNamer(ctx) + } + def createAndEnter(f: Symbol => Symbol): Symbol = { + ownerNamer.enterInScope(f(ownerNamer.context.owner)) + } + def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree): Unit = { + val tree = create(ownerNamer, rtparams0) + ownerNamer.enterSyntheticSym(tree) + } + } + private def valDefSig(vdef: ValDef) = { val ValDef(_, _, tpt, rhs) = vdef val result = diff --git a/test/junit/scala/tools/nsc/typechecker/NamerTest.scala b/test/junit/scala/tools/nsc/typechecker/NamerTest.scala new file mode 100644 index 000000000000..2b0bdfc47e33 --- /dev/null +++ b/test/junit/scala/tools/nsc/typechecker/NamerTest.scala @@ -0,0 +1,23 @@ +package scala.tools.nsc.typechecker + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.BytecodeTesting + +@RunWith(classOf[JUnit4]) +class NamerTest extends BytecodeTesting { + + import compiler.global._ + + override def compilerArgs: String = "-Ystop-after:typer" + + @Test + def defaultMethodsInDeclarationOrder(): Unit = { + compiler.compileClasses("package p1; class Test { C.b(); C.a() }; object C { def a(x: Int = 0) = 0; def b(x: Int = 0) = 0 }") + val methods = compiler.global.rootMirror.getRequiredModule("p1.C").info.decls.toList.map(_.name.toString).filter(_.matches("""(a|b).*""")) + def getterName(s: String) = nme.defaultGetterName(TermName(s), 1).toString + Assert.assertEquals(List("a", "b", getterName("b"), getterName("a")), methods) // order depends on order of lazy type completion :( + } +} From 9c08ebfef4c25a04f706564efbf0362b80813e4e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 30 Jan 2018 12:20:43 +1000 Subject: [PATCH 1538/2793] Eagerly enter default getters into scope This stabilizes the order they appear in the owners scope. Previously, their order was goverened by the order that the methods bearing default parameters were type completed. Make macro annotations compatible with these changes (cherry picked from commit 87be453c234b53ef4550ce6eb932952f09b4bf7a) --- .../scala/tools/nsc/typechecker/Namers.scala | 110 ++++++++++++------ .../tools/nsc/typechecker/NamesDefaults.scala | 4 +- .../tools/nsc/typechecker/NamerTest.scala | 2 +- 3 files changed, 78 insertions(+), 38 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index d3980c3996ee..5281d7d43417 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -742,21 +742,25 @@ trait Namers extends MethodSynthesis { def enterTypeDef(tree: TypeDef) = assignAndEnterFinishedSymbol(tree) - def enterDefDef(tree: DefDef): Unit = tree match { - case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => - assignAndEnterFinishedSymbol(tree) - case DefDef(mods, name, _, _, _, _) => - val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0 - val sym = enterInScope(assignMemberSymbol(tree)) setFlag bridgeFlag + def enterDefDef(tree: DefDef): Unit = { + tree match { + case DefDef(_, nme.CONSTRUCTOR, _, _, _, _) => + assignAndEnterFinishedSymbol(tree) + case DefDef(mods, name, _, _, _, _) => + val bridgeFlag = if (mods hasAnnotationNamed tpnme.bridgeAnnot) BRIDGE | ARTIFACT else 0 + val sym = enterInScope(assignMemberSymbol(tree)) setFlag bridgeFlag - val completer = - if (sym hasFlag SYNTHETIC) { - if (name == nme.copy) copyMethodCompleter(tree) - else if (sym hasFlag CASE) applyUnapplyMethodCompleter(tree, context) - else completerOf(tree) - } else completerOf(tree) + val completer = + if (sym hasFlag SYNTHETIC) { + if (name == nme.copy) copyMethodCompleter(tree) + else if (sym hasFlag CASE) applyUnapplyMethodCompleter(tree, context) + else completerOf(tree) + } else completerOf(tree) - sym setInfo completer + sym setInfo completer + } + if (mexists(tree.vparamss)(_.mods.hasDefault)) + enterDefaultGetters(tree.symbol, tree, tree.vparamss, tree.tparams) } def enterClassDef(tree: ClassDef) { @@ -1176,6 +1180,12 @@ trait Namers extends MethodSynthesis { val module = clazz.sourceModule for (cda <- module.attachments.get[ConstructorDefaultsAttachment]) { debuglog(s"Storing the template namer in the ConstructorDefaultsAttachment of ${module.debugLocationString}.") + if (cda.defaults.nonEmpty) { + for (sym <- cda.defaults) { + decls.enter(sym) + } + cda.defaults.clear() + } cda.companionModuleClassNamer = templateNamer } val classTp = ClassInfoType(parents, decls, clazz) @@ -1428,6 +1438,42 @@ trait Namers extends MethodSynthesis { pluginsTypeSig(methSig, typer, ddef, resTpGiven) } + /** + * For every default argument, insert a method symbol computing that default + */ + def enterDefaultGetters(meth: Symbol, ddef: DefDef, vparamss: List[List[ValDef]], tparams: List[TypeDef]) { + val methOwner = meth.owner + val search = DefaultGetterNamerSearch(context, meth, initCompanionModule = false) + var posCounter = 1 + + mforeach(vparamss){(vparam) => + // true if the corresponding parameter of the base class has a default argument + if (vparam.mods.hasDefault) { + val name = nme.defaultGetterName(meth.name, posCounter) + + search.createAndEnter { owner: Symbol => + methOwner.resetFlag(INTERFACE) // there's a concrete member now + val default = owner.newMethodSymbol(name, vparam.pos, paramFlagsToDefaultGetter(meth.flags)) + default.setPrivateWithin(meth.privateWithin) + default.referenced = meth + default.setInfo(ErrorType) + if (meth.name == nme.apply && meth.hasAllFlags(CASE | SYNTHETIC)) { + val att = meth.attachments.get[CaseApplyDefaultGetters].getOrElse({ + val a = new CaseApplyDefaultGetters() + meth.updateAttachment(a) + a + }) + att.defaultGetters += default + } + if (default.owner.isTerm) + saveDefaultGetter(meth, default) + default + } + } + posCounter += 1 + } + } + /** * For every default argument, insert a method computing that default * @@ -1500,24 +1546,6 @@ trait Namers extends MethodSynthesis { val defVparamss = mmap(rvparamss.take(previous.length)){ rvp => copyValDef(rvp)(mods = rvp.mods &~ DEFAULTPARAM, rhs = EmptyTree) } - val defaultGetterSym = search.createAndEnter { owner: Symbol => - methOwner.resetFlag(INTERFACE) // there's a concrete member now - val default = owner.newMethodSymbol(name, vparam.pos, paramFlagsToDefaultGetter(meth.flags)) - default.setPrivateWithin(meth.privateWithin) - if (meth.name == nme.apply && meth.hasAllFlags(CASE | SYNTHETIC)) { - val att = meth.attachments.get[CaseApplyDefaultGetters].getOrElse({ - val a = new CaseApplyDefaultGetters() - meth.updateAttachment(a) - a - }) - att.defaultGetters += default - } - if (default.owner.isTerm) - saveDefaultGetter(meth, default) - default - } - if (defaultGetterSym == NoSymbol) return - search.addGetter(rtparams) { (parentNamer: Namer, defTparams: List[TypeDef]) => val defTpt = @@ -1545,6 +1573,11 @@ trait Namers extends MethodSynthesis { val defaultTree = atPos(vparam.pos.focus) { DefDef(Modifiers(paramFlagsToDefaultGetter(meth.flags), ddef.mods.privateWithin) | oflag, name, defTparams, defVparamss, defTpt, defRhs) } + def referencesThis(sym: Symbol) = sym match { + case term: TermSymbol => term.referenced == meth + case _ => false + } + val defaultGetterSym = parentNamer.context.scope.lookup(name).filter(referencesThis) assert(defaultGetterSym != NoSymbol, (parentNamer.owner, name)) defaultTree.setSymbol(defaultGetterSym) defaultGetterSym.setInfo(parentNamer.completerOf(defaultTree)) @@ -1571,7 +1604,7 @@ trait Namers extends MethodSynthesis { private abstract class DefaultGetterNamerSearch { def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree) - def createAndEnter(f: Symbol => Symbol): Symbol + def createAndEnter(f: Symbol => Symbol): Unit } private class DefaultGetterInCompanion(c: Context, meth: Symbol, initCompanionModule: Boolean) extends DefaultGetterNamerSearch { private val module = companionSymbolOf(meth.owner, context) @@ -1579,14 +1612,19 @@ trait Namers extends MethodSynthesis { private val cda: Option[ConstructorDefaultsAttachment] = module.attachments.get[ConstructorDefaultsAttachment] private val moduleNamer = cda.flatMap(x => Option(x.companionModuleClassNamer)) - def createAndEnter(f: Symbol => Symbol): Symbol = { + def createAndEnter(f: Symbol => Symbol): Unit = { val default = f(module.moduleClass) moduleNamer match { case Some(namer) => namer.enterInScope(default) case None => - // ignore error to fix #3649 (prevent crash in erroneous source code) - NoSymbol + cda match { + case Some(attachment) => + // defer entry until the companion module body it type completed + attachment.defaults += default + case None => + // ignore error to fix #3649 (prevent crash in erroneous source code) + } } } def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree): Unit = { @@ -1612,7 +1650,7 @@ trait Namers extends MethodSynthesis { assert(ctx != NoContext, meth) newNamer(ctx) } - def createAndEnter(f: Symbol => Symbol): Symbol = { + def createAndEnter(f: Symbol => Symbol): Unit = { ownerNamer.enterInScope(f(ownerNamer.context.owner)) } def addGetter(rtparams0: List[TypeDef])(create: (Namer, List[TypeDef]) => Tree): Unit = { diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 6a78a6906d3d..e9792868b631 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -27,7 +27,9 @@ trait NamesDefaults { self: Analyzer => // we need the ClassDef. To create and enter the symbols into the companion // object, we need the templateNamer of that module class. These two are stored // as an attachment in the companion module symbol - class ConstructorDefaultsAttachment(val classWithDefault: ClassDef, var companionModuleClassNamer: Namer) + class ConstructorDefaultsAttachment(val classWithDefault: ClassDef, var companionModuleClassNamer: Namer) { + var defaults = mutable.ListBuffer[Symbol]() + } // Attached to the synthetic companion `apply` method symbol generated for case classes, holds // the set contains all default getters for that method. If the synthetic `apply` is unlinked in diff --git a/test/junit/scala/tools/nsc/typechecker/NamerTest.scala b/test/junit/scala/tools/nsc/typechecker/NamerTest.scala index 2b0bdfc47e33..9fa0c330c22f 100644 --- a/test/junit/scala/tools/nsc/typechecker/NamerTest.scala +++ b/test/junit/scala/tools/nsc/typechecker/NamerTest.scala @@ -18,6 +18,6 @@ class NamerTest extends BytecodeTesting { compiler.compileClasses("package p1; class Test { C.b(); C.a() }; object C { def a(x: Int = 0) = 0; def b(x: Int = 0) = 0 }") val methods = compiler.global.rootMirror.getRequiredModule("p1.C").info.decls.toList.map(_.name.toString).filter(_.matches("""(a|b).*""")) def getterName(s: String) = nme.defaultGetterName(TermName(s), 1).toString - Assert.assertEquals(List("a", "b", getterName("b"), getterName("a")), methods) // order depends on order of lazy type completion :( + Assert.assertEquals(List("a", getterName("a"), "b", getterName("b")), methods) // order depends on order of lazy type completion :( } } From 259a1cbf2d8154284ef7eff3df979d36c4cafa42 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 30 May 2018 20:03:32 +1000 Subject: [PATCH 1539/2793] ConstructorDefault.defaults need not be a var (cherry picked from commit 2c26cf2fa7da363cdffffa6c5827d739779a1217) --- src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index e9792868b631..c370e7d5e7be 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -28,7 +28,7 @@ trait NamesDefaults { self: Analyzer => // object, we need the templateNamer of that module class. These two are stored // as an attachment in the companion module symbol class ConstructorDefaultsAttachment(val classWithDefault: ClassDef, var companionModuleClassNamer: Namer) { - var defaults = mutable.ListBuffer[Symbol]() + val defaults = mutable.ListBuffer[Symbol]() } // Attached to the synthetic companion `apply` method symbol generated for case classes, holds From 99e8cda4840d74ea18e6ccb7ec9aa77e2650a0da Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 30 May 2018 20:27:14 +1000 Subject: [PATCH 1540/2793] Update comment in test (cherry picked from commit befc337d57422f2caab1f76b691aad14558a0c80) --- test/junit/scala/tools/nsc/typechecker/NamerTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/junit/scala/tools/nsc/typechecker/NamerTest.scala b/test/junit/scala/tools/nsc/typechecker/NamerTest.scala index 9fa0c330c22f..9e18807ceb6e 100644 --- a/test/junit/scala/tools/nsc/typechecker/NamerTest.scala +++ b/test/junit/scala/tools/nsc/typechecker/NamerTest.scala @@ -18,6 +18,6 @@ class NamerTest extends BytecodeTesting { compiler.compileClasses("package p1; class Test { C.b(); C.a() }; object C { def a(x: Int = 0) = 0; def b(x: Int = 0) = 0 }") val methods = compiler.global.rootMirror.getRequiredModule("p1.C").info.decls.toList.map(_.name.toString).filter(_.matches("""(a|b).*""")) def getterName(s: String) = nme.defaultGetterName(TermName(s), 1).toString - Assert.assertEquals(List("a", getterName("a"), "b", getterName("b")), methods) // order depends on order of lazy type completion :( + Assert.assertEquals(List("a", getterName("a"), "b", getterName("b")), methods) // order no longer depends on order of lazy type completion :) } } From f50ec3c866263448d803139e119b33afb04ec2bc Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 11 Jan 2018 12:33:51 +1000 Subject: [PATCH 1541/2793] Stable names for lambda lifted method and fresh names Fresh names are created using a FreshNameCreator, which appends an increasing number to the given prefix. ``` scala> val fresh = new scala.reflect.internal.util.FreshNameCreator() fresh: scala.reflect.internal.util.FreshNameCreator = scala.reflect.internal.util.FreshNameCreator@42b84286 scala> List("foo$", "bar$", "foo$").map(fresh.newName(_)) res1: List[String] = List(foo$1, bar$1, foo$2) ``` Each compilation unit had its own fresh name creator, which is used in the regular compiler. Macros and quasiquotes make use of a global creator (at least, as of #3401). Both of these are too broadly scoped to help achieve deterministic fresh names: if sources are recompiled in a different order or separately recompiled, the fresh name counters can be different. Methods in a given compilation unit are not necessarily typechecked in a linear fashion; they might be typechecked ahead of time to provide an inferred type to a caller. This commit: - Changes all known fresh name creations within the typer phase (in which out-of-order typechecking is a factor) to use a fineer grained fresh name creator. How fine grained? A fresh name generated as some position `p` shares the fresh name generator scoped at the closest method or class that encloses that the outermost enclosing tree at the same position. This definition is designed to give a shared fresh name creator for all fresh names generated in `macro1(macro2())`, even if the fresh names are requiested from with a Typer in the macro enclosed by a synthetic method. - Changes macro fresh names to use the same fresh naming scheme as the regular typechecker. An opt-out compiler option allows the old behaviour, but I'm interested to find real-world cases where the new scheme actually causes a problem In addition, a small change is made to lambda lift to lift local methods in the order that they are encountered during traversal, rather than sorting them based on `Symbol.isLess` (which include `Symbol.id`, an order-of-typechecking dependent value). (cherry picked from commit 69d60cb54d787a90c74de092cc5173e12a1087fb) --- .../scala/reflect/macros/contexts/Names.scala | 2 +- .../reflect/macros/contexts/Parsers.scala | 5 +- .../reflect/reify/utils/Extractors.scala | 2 +- .../reflect/reify/utils/SymbolTables.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 9 +- .../scala/tools/nsc/ast/TreeGen.scala | 13 +- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/nsc/transform/CleanUp.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 6 +- .../tools/nsc/transform/LambdaLift.scala | 7 +- .../tools/nsc/typechecker/Contexts.scala | 12 + .../tools/nsc/typechecker/EtaExpansion.scala | 2 +- .../scala/tools/nsc/typechecker/Macros.scala | 7 +- .../tools/nsc/typechecker/NamesDefaults.scala | 4 +- .../tools/nsc/typechecker/PatternTypers.scala | 2 +- .../nsc/typechecker/SyntheticMethods.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 17 +- test/files/jvm/innerClassAttribute/Test.scala | 40 +-- test/files/jvm/javaReflection.check | 10 +- test/files/neg/t1909-object.check | 2 +- test/files/neg/t5189b.check | 4 +- test/files/neg/t6666.check | 6 +- test/files/neg/t6666b.check | 4 +- test/files/neg/t6666c.check | 6 +- test/files/neg/t6675b.check | 4 +- test/files/run/delambdafy_t6028.check | 30 +-- test/files/run/t4171.check | 2 +- test/files/run/t6028.check | 34 +-- test/files/run/t9375.check | 20 +- .../scala/tools/nsc/DeterminismTest.scala | 228 ++++++++++++++++++ .../backend/jvm/opt/ScalaInlineInfoTest.scala | 4 +- 31 files changed, 376 insertions(+), 115 deletions(-) create mode 100644 test/junit/scala/tools/nsc/DeterminismTest.scala diff --git a/src/compiler/scala/reflect/macros/contexts/Names.scala b/src/compiler/scala/reflect/macros/contexts/Names.scala index 8af8888a56a7..c95f1b8c8988 100644 --- a/src/compiler/scala/reflect/macros/contexts/Names.scala +++ b/src/compiler/scala/reflect/macros/contexts/Names.scala @@ -6,7 +6,7 @@ trait Names { import global._ - def freshNameCreator = globalFreshNameCreator + def freshNameCreator = self.callsiteTyper.fresh def fresh(): String = freshName() diff --git a/src/compiler/scala/reflect/macros/contexts/Parsers.scala b/src/compiler/scala/reflect/macros/contexts/Parsers.scala index cc3f01e53b4a..36d87ed0b178 100644 --- a/src/compiler/scala/reflect/macros/contexts/Parsers.scala +++ b/src/compiler/scala/reflect/macros/contexts/Parsers.scala @@ -1,6 +1,7 @@ package scala.reflect.macros package contexts +import scala.reflect.internal.util.FreshNameCreator import scala.tools.nsc.reporters.StoreReporter trait Parsers { @@ -12,7 +13,9 @@ trait Parsers { val oldReporter = global.reporter try { global.reporter = sreporter - val parser = newUnitParser(new CompilationUnit(newSourceFile(code, ""))) + val parser = newUnitParser(new CompilationUnit(newSourceFile(code, "")) { + override implicit val fresh: FreshNameCreator = currentFreshNameCreator + }) val tree = gen.mkTreeOrBlock(parser.parseStatsOrPackages()) sreporter.infos.foreach { case sreporter.Info(pos, msg, sreporter.ERROR) => throw ParseException(pos, msg) diff --git a/src/compiler/scala/reflect/reify/utils/Extractors.scala b/src/compiler/scala/reflect/reify/utils/Extractors.scala index 4ec4de28c450..1f78e8153691 100644 --- a/src/compiler/scala/reflect/reify/utils/Extractors.scala +++ b/src/compiler/scala/reflect/reify/utils/Extractors.scala @@ -72,7 +72,7 @@ trait Extractors { } val tpec = ClassDef( Modifiers(FINAL), - newTypeName(global.currentUnit.fresh.newName(flavor.toString)), + newTypeName(currentFreshNameCreator.newName(flavor.toString)), List(), Template(List(Ident(reifierBase)), noSelfType, diff --git a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala index 5800e88fe1fa..b1e580304f96 100644 --- a/src/compiler/scala/reflect/reify/utils/SymbolTables.scala +++ b/src/compiler/scala/reflect/reify/utils/SymbolTables.scala @@ -77,7 +77,7 @@ trait SymbolTables { var name = name0.toString name = name.replace(".type", "$type") name = name.replace(" ", "$") - val fresh = typer.context.unit.fresh + val fresh = typer.fresh newTermName(fresh.newName(name)) } val bindingAttachment = reification.attachments.get[ReifyBindingAttachment].get diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index d43564e44e3a..ab0efb570c57 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -10,12 +10,13 @@ package nsc import java.io.{File, FileNotFoundException, IOException} import java.net.URL import java.nio.charset.{Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException} + import scala.collection.{immutable, mutable} import io.{AbstractFile, Path, SourceReader} import reporters.Reporter import util.{ClassPath, returning} import scala.reflect.ClassTag -import scala.reflect.internal.util.{BatchSourceFile, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile, StatisticsStatics} +import scala.reflect.internal.util.{BatchSourceFile, FreshNameCreator, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile, StatisticsStatics} import scala.reflect.internal.pickling.PickleBuffer import symtab.{Flags, SymbolTable, SymbolTrackers} import symtab.classfile.Pickler @@ -26,7 +27,7 @@ import typechecker._ import transform.patmat.PatternMatching import transform._ import backend.{JavaPlatform, ScalaPrimitives} -import backend.jvm.{GenBCode, BackendStats} +import backend.jvm.{BackendStats, GenBCode} import scala.concurrent.Future import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} @@ -984,7 +985,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def currentRun: Run = curRun def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile - def currentFreshNameCreator = currentUnit.fresh + def currentFreshNameCreator = if (curFreshNameCreator == null) currentUnit.fresh else curFreshNameCreator + private[this] var curFreshNameCreator: FreshNameCreator = null + private[scala] def currentFreshNameCreator_=(fresh: FreshNameCreator): Unit = curFreshNameCreator = fresh def isGlobalInitialized = ( definitions.isDefinitionsInitialized diff --git a/src/compiler/scala/tools/nsc/ast/TreeGen.scala b/src/compiler/scala/tools/nsc/ast/TreeGen.scala index 0ba7dad971d3..22ac241fd4a1 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeGen.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeGen.scala @@ -9,6 +9,7 @@ package ast import scala.collection.mutable.ListBuffer import symtab.Flags._ import scala.language.postfixOps +import scala.reflect.internal.util.FreshNameCreator /** XXX to resolve: TreeGen only assumes global is a SymbolTable, but * TreeDSL at the moment expects a Global. Can we get by with SymbolTable? @@ -196,20 +197,24 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { /** Used in situations where you need to access value of an expression several times */ - def evalOnce(expr: Tree, owner: Symbol, unit: CompilationUnit)(within: (() => Tree) => Tree): Tree = { + def evalOnce(expr: Tree, owner: Symbol, unit: CompilationUnit)(within: (() => Tree) => Tree): Tree = evalOnce(expr, owner, unit.fresh)(within) + def evalOnce(expr: Tree, owner: Symbol, fresh: FreshNameCreator)(within: (() => Tree) => Tree): Tree = { var used = false if (treeInfo.isExprSafeToInline(expr)) { within(() => if (used) expr.duplicate else { used = true; expr }) } else { - val (valDef, identFn) = mkPackedValDef(expr, owner, unit.freshTermName("ev$")) + val (valDef, identFn) = mkPackedValDef(expr, owner, freshTermName("ev$")(fresh)) val containing = within(identFn) ensureNonOverlapping(containing, List(expr)) Block(List(valDef), containing) setPos (containing.pos union expr.pos) } } - def evalOnceAll(exprs: List[Tree], owner: Symbol, unit: CompilationUnit)(within: (List[() => Tree]) => Tree): Tree = { + def evalOnceAll(exprs: List[Tree], owner: Symbol, unit: CompilationUnit)(within: (List[() => Tree]) => Tree): Tree = + evalOnceAll(exprs, owner, unit.fresh)(within) + + def evalOnceAll(exprs: List[Tree], owner: Symbol, fresh: FreshNameCreator)(within: (List[() => Tree]) => Tree): Tree = { val vdefs = new ListBuffer[ValDef] val exprs1 = new ListBuffer[() => Tree] val used = new Array[Boolean](exprs.length) @@ -222,7 +227,7 @@ abstract class TreeGen extends scala.reflect.internal.TreeGen with TreeDSL { } } else { - val (valDef, identFn) = mkPackedValDef(expr, owner, unit.freshTermName("ev$")) + val (valDef, identFn) = mkPackedValDef(expr, owner, freshTermName("ev$")(fresh)) vdefs += valDef exprs1 += identFn } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a0fbedc03924..b6e629b1cec0 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -225,6 +225,7 @@ trait ScalaSettings extends AbsScalaSettings val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Ymacroexpand = ChoiceSetting ("-Ymacro-expand", "policy", "Control expansion of macros, useful for scaladoc and presentation compiler.", List(MacroExpand.Normal, MacroExpand.None, MacroExpand.Discard), MacroExpand.Normal) val Ymacronoexpand = BooleanSetting ("-Ymacro-no-expand", "Don't expand macros. Might be useful for scaladoc and presentation compiler, but will crash anything which uses macros and gets past typer.") withDeprecationMessage(s"Use ${Ymacroexpand.name}:${MacroExpand.None}") withPostSetHook(_ => Ymacroexpand.value = MacroExpand.None) + val YmacroFresh = BooleanSetting ("-Ymacro-global-fresh-names", "Should fresh names in macros be unique across all compilation units") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yreplclassbased = BooleanSetting ("-Yrepl-class-based", "Use classes to wrap REPL snippets instead of objects") val Yreploutdir = StringSetting ("-Yrepl-outdir", "path", "Write repl-generated classfiles to given output directory (use \"\" to generate a temporary dir)" , "") diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 81dc15db4c95..45242c127809 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -193,7 +193,7 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { val runDefinitions = currentRun.runDefinitions import runDefinitions._ - gen.evalOnce(qual, currentOwner, unit) { qual1 => + gen.evalOnce(qual, currentOwner, localTyper.fresh) { qual1 => /* Some info about the type of the method being called. */ val methSym = ad.symbol val boxedResType = toBoxedType(resType) // Int -> Integer diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 33d869919083..55f7a698fa4d 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -681,7 +681,7 @@ abstract class Erasure extends InfoTransform else { val untyped = // util.trace("new asinstanceof test") { - gen.evalOnce(qual1, context.owner, context.unit) { qual => + gen.evalOnce(qual1, context.owner, fresh) { qual => If(Apply(Select(qual(), nme.eq), List(Literal(Constant(null)) setType NullTpe)), Literal(Constant(null)) setType targ.tpe, unbox(qual(), targ.tpe)) @@ -1015,7 +1015,7 @@ abstract class Erasure extends InfoTransform Apply(Select(qual, cmpOp), List(gen.mkAttributedQualifier(targ.tpe))) } case RefinedType(parents, decls) if (parents.length >= 2) => - gen.evalOnce(qual, currentOwner, unit) { q => + gen.evalOnce(qual, currentOwner, localTyper.fresh) { q => // Optimization: don't generate isInstanceOf tests if the static type // conforms, because it always succeeds. (Or at least it had better.) // At this writing the pattern matcher generates some instance tests @@ -1062,7 +1062,7 @@ abstract class Erasure extends InfoTransform global.typer.typedPos(tree.pos) { if (level == 1) isArrayTest(qual) - else gen.evalOnce(qual, currentOwner, unit) { qual1 => + else gen.evalOnce(qual, currentOwner, localTyper.fresh) { qual1 => gen.mkAnd( gen.mkMethodCall( qual1(), diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index cf3b4b649605..ce046a98e9ac 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -9,7 +9,7 @@ package transform import symtab._ import Flags._ import scala.collection.mutable -import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet, TreeSet } +import scala.collection.mutable.{ LinkedHashMap, LinkedHashSet } abstract class LambdaLift extends InfoTransform { import global._ @@ -50,7 +50,7 @@ abstract class LambdaLift extends InfoTransform { class LambdaLifter(unit: CompilationUnit) extends explicitOuter.OuterPathTransformer(unit) { - private type SymSet = TreeSet[Symbol] + private type SymSet = LinkedHashSet[Symbol] /** A map storing free variables of functions and classes */ private val free = new LinkedHashMap[Symbol, SymSet] @@ -64,8 +64,7 @@ abstract class LambdaLift extends InfoTransform { /** Symbols that are called from an inner class. */ private val calledFromInner = new LinkedHashSet[Symbol] - private val ord = Ordering.fromLessThan[Symbol](_ isLess _) - private def newSymSet = TreeSet.empty[Symbol](ord) + private def newSymSet: LinkedHashSet[Symbol] = new LinkedHashSet[Symbol] private def symSet(f: LinkedHashMap[Symbol, SymSet], sym: Symbol): SymSet = f.getOrElseUpdate(sym, newSymSet) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 32a0a4524d3d..428e25f539f2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -635,6 +635,18 @@ trait Contexts { self: Analyzer => def nextEnclosing(p: Context => Boolean): Context = if (p(this)) this else outer.nextEnclosing(p) + final def outermostContextAtCurrentPos: Context = { + var pos = tree.pos + var encl = this + while (pos == NoPosition && encl != NoContext) { + encl = encl.outer + pos = encl.tree.pos + } + while (encl.outer.tree.pos == pos && encl != NoContext) + encl = encl.outer + encl + } + def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain private def treeTruncated = tree.toString.replaceAll("\\s+", " ").lines.mkString("\\n").take(70) diff --git a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala index fe0d6a24f5cb..a08f6f2e1a5c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala +++ b/src/compiler/scala/tools/nsc/typechecker/EtaExpansion.scala @@ -42,7 +42,7 @@ trait EtaExpansion { self: Analyzer => var cnt = 0 // for NoPosition def freshName() = { cnt += 1 - unit.freshTermName("eta$" + (cnt - 1) + "$") + freshTermName("eta$" + (cnt - 1) + "$")(typer.fresh) } val defs = new ListBuffer[Tree] diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4cb9c2ca39d7..5a234849ba53 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -754,7 +754,12 @@ trait Macros extends MacroRuntimes with Traces with Helpers { /** Expands a term macro used in apply role as `M(2)(3)` in `val x = M(2)(3)`. * @see DefMacroExpander */ - def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = pluginsMacroExpand(typer, expandee, mode, pt) + def macroExpand(typer: Typer, expandee: Tree, mode: Mode, pt: Type): Tree = { + // By default, use the current typer's fresh name creator in macros. The compiler option + // allows people to opt in to the old behaviour of Scala 2.12, which used a global fresh creator. + if (!settings.YmacroFresh.value) currentFreshNameCreator = typer.fresh + pluginsMacroExpand(typer, expandee, mode, pt) + } /** Default implementation of `macroExpand`. * Can be overridden by analyzer plugins (see AnalyzerPlugins.pluginsMacroExpand for more details) diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index c370e7d5e7be..5a2953790520 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -179,7 +179,7 @@ trait NamesDefaults { self: Analyzer => // never used for constructor calls, they always have a stable qualifier def blockWithQualifier(qual: Tree, selected: Name) = { - val sym = blockTyper.context.owner.newValue(unit.freshTermName(nme.QUAL_PREFIX), newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent) + val sym = blockTyper.context.owner.newValue(freshTermName(nme.QUAL_PREFIX)(typer.fresh), newFlags = ARTIFACT) setInfo uncheckedBounds(qual.tpe) setPos (qual.pos.makeTransparent) blockTyper.context.scope enter sym val vd = atPos(sym.pos)(ValDef(sym, qual) setType NoType) // it stays in Vegas: scala/bug#5720, scala/bug#5727 @@ -307,7 +307,7 @@ trait NamesDefaults { self: Analyzer => arg.tpe } ).widen // have to widen or types inferred from literal defaults will be singletons - val s = context.owner.newValue(unit.freshTermName(nme.NAMEDARG_PREFIX), arg.pos, newFlags = ARTIFACT) setInfo { + val s = context.owner.newValue(freshTermName(nme.NAMEDARG_PREFIX)(typer.fresh), arg.pos, newFlags = ARTIFACT) setInfo { val tp = if (byName) functionType(Nil, argTpe) else argTpe uncheckedBounds(tp) } diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 100480a6d29f..e1d6035f5ca8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -191,7 +191,7 @@ trait PatternTypers { else TypeBounds.lower(tpSym.tpeHK) ) // origin must be the type param so we can deskolemize - val skolem = context.owner.newGADTSkolem(unit.freshTypeName("?" + tpSym.name), tpSym, bounds) + val skolem = context.owner.newGADTSkolem(freshTypeName("?" + tpSym.name), tpSym, bounds) skolemBuffer += skolem logResult(s"Created gadt skolem $skolem: ${skolem.tpe_*} to stand in for $tpSym")(skolem.tpe_*) case tp1 => tp1 diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 6b4ea13ddf91..05caee1973fb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -177,7 +177,7 @@ trait SyntheticMethods extends ast.TreeDSL { rt != NothingTpe && rt != NullTpe && rt != UnitTpe } - val otherName = context.unit.freshTermName(clazz.name + "$") + val otherName = freshTermName(clazz.name + "$")(freshNameCreatorFor(context)) val otherSym = eqmeth.newValue(otherName, eqmeth.pos, SYNTHETIC) setInfo clazz.tpe val pairwise = accessors collect { case acc if usefulEquality(acc) => @@ -390,7 +390,7 @@ trait SyntheticMethods extends ast.TreeDSL { val i = original.owner.caseFieldAccessors.indexOf(original) def freshAccessorName = { devWarning(s"Unable to find $original among case accessors of ${original.owner}: ${original.owner.caseFieldAccessors}") - context.unit.freshTermName(original.name + "$") + freshTermName(original.name + "$")(freshNameCreatorFor(context)) } def nameSuffixedByParamIndex = original.name.append(nme.CASE_ACCESSOR + "$" + i).toTermName val newName = if (i < 0) freshAccessorName else nameSuffixedByParamIndex diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 2f828154c5d0..c21db31e0d83 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -14,7 +14,7 @@ package tools.nsc package typechecker import scala.collection.{immutable, mutable} -import scala.reflect.internal.util.{ListOfNil, Statistics, StatisticsStatics} +import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics, StatisticsStatics} import scala.reflect.internal.TypesStats import mutable.ListBuffer import symtab.Flags._ @@ -181,11 +181,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } + private final val typerFreshNameCreators = perRunCaches.newAnyRefMap[Symbol, FreshNameCreator]() + def freshNameCreatorFor(context: Context) = typerFreshNameCreators.getOrElseUpdate(context.outermostContextAtCurrentPos.enclClassOrMethod.owner, new FreshNameCreator) + abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with Tag with PatternTyper with TyperContextErrors { private def unit = context.unit import typeDebug.ptTree import TyperErrorGen._ + implicit def fresh: FreshNameCreator = freshNameCreatorFor(context) + private def transformed: mutable.Map[Tree, Tree] = unit.transformed val infer = new Inferencer { @@ -3458,7 +3463,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val args1 = typedArgs(args, forArgMode(fun, mode)) val pts = args1.map(_.tpe.deconst) val clone = fun.symbol.cloneSymbol.withoutAnnotations - val cloneParams = pts map (pt => clone.newValueParameter(currentUnit.freshTermName()).setInfo(pt)) + val cloneParams = pts map (pt => clone.newValueParameter(freshTermName()).setInfo(pt)) val resultType = if (isFullyDefined(pt)) pt else ObjectTpe clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType)) val fun1 = fun.setSymbol(clone).setType(clone.info) @@ -4455,14 +4460,14 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val cases = tree.cases if (selector == EmptyTree) { if (pt.typeSymbol == PartialFunctionClass) - synthesizePartialFunction(newTermName(context.unit.fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt) + synthesizePartialFunction(newTermName(fresh.newName("x")), tree.pos, paramSynthetic = true, tree, mode, pt) else { val arity = functionArityFromType(pt) match { case -1 => 1 case arity => arity } // scala/bug#8429: consider sam and function type equally in determining function arity val params = for (i <- List.range(0, arity)) yield atPos(tree.pos.focusStart) { ValDef(Modifiers(PARAM | SYNTHETIC), - unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree) + freshTermName("x" + i + "$"), TypeTree(), EmptyTree) } val ids = for (p <- params) yield Ident(p.name) val selector1 = atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) } @@ -4807,7 +4812,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper ) setPos tree.pos def mkUpdate(table: Tree, indices: List[Tree], argss: List[List[Tree]]) = - gen.evalOnceAll(table :: indices, context.owner, context.unit) { + gen.evalOnceAll(table :: indices, context.owner, fresh) { case tab :: is => def mkCall(name: Name, extraArgs: Tree*) = ( Apply( @@ -4828,7 +4833,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper mkAssign(qual) case Select(qualqual, vname) => - gen.evalOnce(qualqual, context.owner, context.unit) { qq => + gen.evalOnce(qualqual, context.owner, fresh) { qq => val qq1 = qq() mkAssign(Select(qq1, qual.symbol) setPos qual.pos) } diff --git a/test/files/jvm/innerClassAttribute/Test.scala b/test/files/jvm/innerClassAttribute/Test.scala index 288c6ee30f32..6814f326024d 100644 --- a/test/files/jvm/innerClassAttribute/Test.scala +++ b/test/files/jvm/innerClassAttribute/Test.scala @@ -6,7 +6,7 @@ import scala.collection.JavaConverters._ object Test extends BytecodeTest { // Helpful for debugging the test: - // println(new java.io.File(classpath.asURLs.head.toURI).list().sorted.mkString("\n")) + //println(new java.io.File(classpath.asURLs.head.toURI).list().sorted.mkString("\n")) def assertSame(a: Any, b: Any) = { assert(a == b, s"\na: $a\nb: $b") @@ -114,11 +114,11 @@ object Test extends BytecodeTest { def testA5() = { val List(b1) = innerClassNodes("A5") - assertLocal(b1, "A5$B$2$", "B$2$") - val List(b2) = innerClassNodes("A5$B$2$") - assertLocal(b2, "A5$B$2$", "B$2$") + assertLocal(b1, "A5$B$1$", "B$1$") + val List(b2) = innerClassNodes("A5$B$1$") + assertLocal(b2, "A5$B$1$", "B$1$") assertEnclosingMethod( - "A5$B$2$", + "A5$B$1$", "A5", "f", "()Ljava/lang/Object;") } @@ -221,7 +221,7 @@ object Test extends BytecodeTest { assertAnonymous(anon1, "A18$$anon$5") assertAnonymous(anon2, "A18$$anon$6") - assertLocal(a, "A18$A$2", "A$2") + assertLocal(a, "A18$A$1", "A$1") assertLocal(b, "A18$B$4", "B$4") assertEnclosingMethod( @@ -232,7 +232,7 @@ object Test extends BytecodeTest { "A18", "g$1", "()V") assertEnclosingMethod( - "A18$A$2", + "A18$A$1", "A18", "g$1", "()V") assertEnclosingMethod( "A18$B$4", @@ -293,8 +293,8 @@ object Test extends BytecodeTest { assertMember(defsApi, "A24Base", "DefinitionsApi", flags = publicAbstractInterface) } - def testSI_9105() { - assertEnclosingMethod ("SI_9105$A$3" , "SI_9105", null , null) + def testSI_9105(): Unit = { + assertEnclosingMethod ("SI_9105$A$2" , "SI_9105", null , null) assertEnclosingMethod ("SI_9105$B$5" , "SI_9105", "m$1", "()Ljava/lang/Object;") assertEnclosingMethod ("SI_9105$C$1" , "SI_9105", null , null) assertEnclosingMethod ("SI_9105$D$1" , "SI_9105", "met", "()Lscala/Function1;") @@ -302,7 +302,7 @@ object Test extends BytecodeTest { assertEnclosingMethod ("SI_9105$F$1" , "SI_9105", "met", "()Lscala/Function1;") assertNoEnclosingMethod("SI_9105") - assertLocal(innerClassNodes("SI_9105$A$3").head, "SI_9105$A$3", "A$3") + assertLocal(innerClassNodes("SI_9105$A$2").head, "SI_9105$A$2", "A$2") assertLocal(innerClassNodes("SI_9105$B$5").head, "SI_9105$B$5", "B$5") assertLocal(innerClassNodes("SI_9105$C$1").head, "SI_9105$C$1", "C$1") assertLocal(innerClassNodes("SI_9105$D$1").head, "SI_9105$D$1", "D$1") @@ -416,7 +416,7 @@ object Test extends BytecodeTest { def testAnonymousClassesMayBeNestedInSpecialized() { assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$$anon$17", "AnonymousClassesMayBeNestedInSpecialized$C", "foo", "(Ljava/lang/Object;)LAnonymousClassesMayBeNestedInSpecialized$A;") - assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp$$anon$21", "AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp", "foo$mcI$sp", "(I)LAnonymousClassesMayBeNestedInSpecialized$A;") + assertEnclosingMethod("AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp$$anon$18", "AnonymousClassesMayBeNestedInSpecialized$C$mcI$sp", "foo$mcI$sp", "(I)LAnonymousClassesMayBeNestedInSpecialized$A;") } def testNestedInValueClass() { @@ -445,17 +445,17 @@ object Test extends BytecodeTest { } def testLocalAndAnonymousInLazyInitializer(): Unit = { - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$$anon$18", "LocalAndAnonymousInLazyInitializer$C", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$4", "LocalAndAnonymousInLazyInitializer$C", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$5$", "LocalAndAnonymousInLazyInitializer$C", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$$anon$19", "LocalAndAnonymousInLazyInitializer$C", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$1", "LocalAndAnonymousInLazyInitializer$C", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$C$AA$2$", "LocalAndAnonymousInLazyInitializer$C", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$$anon$19", "LocalAndAnonymousInLazyInitializer$O$", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$6", "LocalAndAnonymousInLazyInitializer$O$", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$7$", "LocalAndAnonymousInLazyInitializer$O$", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$$anon$20", "LocalAndAnonymousInLazyInitializer$O$", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$4", "LocalAndAnonymousInLazyInitializer$O$", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$O$AA$5$", "LocalAndAnonymousInLazyInitializer$O$", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$$anon$20", "LocalAndAnonymousInLazyInitializer$T", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$8", "LocalAndAnonymousInLazyInitializer$T", null, null) - assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$9$", "LocalAndAnonymousInLazyInitializer$T", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$$anon$21", "LocalAndAnonymousInLazyInitializer$T", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$7", "LocalAndAnonymousInLazyInitializer$T", null, null) + assertEnclosingMethod("LocalAndAnonymousInLazyInitializer$T$AA$8$", "LocalAndAnonymousInLazyInitializer$T", null, null) } def show(): Unit = { diff --git a/test/files/jvm/javaReflection.check b/test/files/jvm/javaReflection.check index f3924940e9ef..aa11e860ef83 100644 --- a/test/files/jvm/javaReflection.check +++ b/test/files/jvm/javaReflection.check @@ -66,7 +66,7 @@ A$F$1 / null (canon) / F$1 (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) - properties : true (local) / false (member) -A$G$2$ / null (canon) / G$2$ (simple) +A$G$1$ / null (canon) / G$1$ (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) - properties : true (local) / false (member) @@ -78,7 +78,7 @@ A$I$1 / null (canon) / I$1 (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) - properties : true (local) / false (member) -A$J$2$ / null (canon) / J$2$ (simple) +A$J$1$ / null (canon) / J$1$ (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / public java.lang.Object A.f() (meth) - properties : true (local) / false (member) @@ -90,7 +90,7 @@ A$L$1 / null (canon) / L$1 (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) - properties : true (local) / false (member) -A$M$2$ / null (canon) / M$2$ (simple) +A$M$1$ / null (canon) / M$1$ (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) - properties : true (local) / false (member) @@ -102,7 +102,7 @@ A$O$1 / null (canon) / O$1 (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) - properties : true (local) / false (member) -A$P$2$ / null (canon) / P$2$ (simple) +A$P$1$ / null (canon) / P$1$ (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / null (constr) / null (meth) - properties : true (local) / false (member) @@ -114,7 +114,7 @@ A$R$1 / null (canon) / R$1 (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth) - properties : true (local) / false (member) -A$S$2$ / null (canon) / S$2$ (simple) +A$S$1$ / null (canon) / S$1$ (simple) - declared cls: List() - enclosing : null (declaring cls) / class A (cls) / public A(int) (constr) / null (meth) - properties : true (local) / false (member) diff --git a/test/files/neg/t1909-object.check b/test/files/neg/t1909-object.check index c2546106c116..530177057542 100644 --- a/test/files/neg/t1909-object.check +++ b/test/files/neg/t1909-object.check @@ -1,4 +1,4 @@ -t1909-object.scala:4: warning: !!! scala/bug#1909 Unable to STATICally lift object InnerTrouble$1, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely. +t1909-object.scala:4: warning: !!! scala/bug#1909 Unable to STATICally lift object InnerTrouble$2, which is defined in the self- or super-constructor call of class Kaboom. A VerifyError is likely. object InnerTrouble ^ error: No warnings can be incurred under -Xfatal-warnings. diff --git a/test/files/neg/t5189b.check b/test/files/neg/t5189b.check index 46996e96d064..200eeb7d0a0c 100644 --- a/test/files/neg/t5189b.check +++ b/test/files/neg/t5189b.check @@ -1,7 +1,7 @@ t5189b.scala:38: error: type mismatch; - found : TestNeg.Wrapped[?T7] where type ?T7 <: T (this is a GADT skolem) + found : TestNeg.Wrapped[?T1] where type ?T1 <: T (this is a GADT skolem) required: TestNeg.Wrapped[T] -Note: ?T7 <: T, but class Wrapped is invariant in type W. +Note: ?T1 <: T, but class Wrapped is invariant in type W. You may wish to define W as +W instead. (SLS 4.5) case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter ^ diff --git a/test/files/neg/t6666.check b/test/files/neg/t6666.check index bae948fe56c2..5bfdcfb262aa 100644 --- a/test/files/neg/t6666.check +++ b/test/files/neg/t6666.check @@ -16,7 +16,7 @@ t6666.scala:54: error: Implementation restriction: access of lazy value x$7 in c t6666.scala:58: error: Implementation restriction: access of method x$8 in class C3 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C3 F.hof(() => x) ^ -t6666.scala:62: error: Implementation restriction: access of method x$9 in class C4 from object Nested$4, would require illegal premature access to the unconstructed `this` of class C4 +t6666.scala:62: error: Implementation restriction: access of method x$9 in class C4 from object Nested$1, would require illegal premature access to the unconstructed `this` of class C4 object Nested { def xx = x} ^ t6666.scala:76: error: Implementation restriction: access of method x$11 in class C11 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C11 @@ -25,13 +25,13 @@ t6666.scala:76: error: Implementation restriction: access of method x$11 in clas t6666.scala:95: error: Implementation restriction: access of method x$12 in class C13 from <$anon: Function0>, would require illegal premature access to the unconstructed `this` of class C13 F.hof(() => x) ^ -t6666.scala:104: error: Implementation restriction: access of method x$13 in class C14 from object Nested$5, would require illegal premature access to the unconstructed `this` of class C14 +t6666.scala:104: error: Implementation restriction: access of method x$13 in class C14 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C14 object Nested { def xx = x} ^ t6666.scala:112: error: Implementation restriction: access of method foo$1 in class COuter from class CInner$1, would require illegal premature access to the unconstructed `this` of class COuter class CInner extends C({foo}) ^ -t6666.scala:118: error: Implementation restriction: access of method x$14 in class CEarly from object Nested$6, would require illegal premature access to the unconstructed `this` of class CEarly +t6666.scala:118: error: Implementation restriction: access of method x$14 in class CEarly from object Nested$5, would require illegal premature access to the unconstructed `this` of class CEarly object Nested { def xx = x} ^ 12 errors found diff --git a/test/files/neg/t6666b.check b/test/files/neg/t6666b.check index c3ffc7cfa9a2..21f3947c0fae 100644 --- a/test/files/neg/t6666b.check +++ b/test/files/neg/t6666b.check @@ -1,7 +1,7 @@ -t6666b.scala:11: error: Implementation restriction: access of method x$1 in class C5 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C5 +t6666b.scala:11: error: Implementation restriction: access of method x$1 in class C5 from object Nested$1, would require illegal premature access to the unconstructed `this` of class C5 object Nested { def xx = x} ^ -t6666b.scala:22: error: Implementation restriction: access of method x$2 in class C15 from object Nested$4, would require illegal premature access to the unconstructed `this` of class C15 +t6666b.scala:22: error: Implementation restriction: access of method x$2 in class C15 from object Nested$3, would require illegal premature access to the unconstructed `this` of class C15 object Nested { def xx = x} ^ two errors found diff --git a/test/files/neg/t6666c.check b/test/files/neg/t6666c.check index 384e52a9fce5..d695fe72b986 100644 --- a/test/files/neg/t6666c.check +++ b/test/files/neg/t6666c.check @@ -1,10 +1,10 @@ -t6666c.scala:2: error: Implementation restriction: access of method x$1 in class D from object X$4, would require illegal premature access to the unconstructed `this` of class D +t6666c.scala:2: error: Implementation restriction: access of method x$1 in class D from object X$1, would require illegal premature access to the unconstructed `this` of class D class D extends C({def x = 0; object X { x }}) ^ -t6666c.scala:5: error: Implementation restriction: access of method x$2 in class D1 from object X$5, would require illegal premature access to the unconstructed `this` of class D1 +t6666c.scala:5: error: Implementation restriction: access of method x$2 in class D1 from object X$3, would require illegal premature access to the unconstructed `this` of class D1 class D1 extends C1({def x = 0; () => {object X { x }}}) ^ -t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$6, would require illegal premature access to the unconstructed `this` of <$anon: Function0> +t6666c.scala:8: error: Implementation restriction: access of method x$3 from object X$5, would require illegal premature access to the unconstructed `this` of <$anon: Function0> class D2 extends C2({def x = 0; object X { x }}) ^ three errors found diff --git a/test/files/neg/t6675b.check b/test/files/neg/t6675b.check index 331fa8be5d4a..9de595a13bd7 100644 --- a/test/files/neg/t6675b.check +++ b/test/files/neg/t6675b.check @@ -11,7 +11,7 @@ t6675b.scala:24: warning: object LeftOrRight expects 2 patterns to hold (A, A) b ^ t6675b.scala:26: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) - required: (?A11, ?A12) where type ?A12 <: A (this is a GADT skolem), type ?A11 <: A (this is a GADT skolem) + required: (?A1, ?A2) where type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case LeftOrRight((a, b, c)) => a } // fail ^ t6675b.scala:30: warning: object NativelyTwo expects 2 patterns to hold ((Int, Int), (Int, Int)) but crushing into 2-tuple to fit single pattern (scala/bug#6675) @@ -30,7 +30,7 @@ t6675b.scala:37: warning: object NativelyTwo expects 2 patterns to hold ((A, A), ^ t6675b.scala:39: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) - required: ((?A17, ?A18), (?A19, ?A20)) where type ?A20 <: A (this is a GADT skolem), type ?A19 <: A (this is a GADT skolem), type ?A18 <: A (this is a GADT skolem), type ?A17 <: A (this is a GADT skolem) + required: ((?A1, ?A2), (?A3, ?A4)) where type ?A4 <: A (this is a GADT skolem), type ?A3 <: A (this is a GADT skolem), type ?A2 <: A (this is a GADT skolem), type ?A1 <: A (this is a GADT skolem) def f4[A](x: A) = (Left(x -> x): Either[(A, A), (A, A)]) match { case NativelyTwo((a, b, c)) => a } // fail ^ 5 warnings found diff --git a/test/files/run/delambdafy_t6028.check b/test/files/run/delambdafy_t6028.check index 86cb1d5e97a8..1f6503233b9a 100644 --- a/test/files/run/delambdafy_t6028.check +++ b/test/files/run/delambdafy_t6028.check @@ -16,12 +16,12 @@ package { }; def bar(barParam: String): Object = { lazy val MethodLocalObject$module: scala.runtime.LazyRef = new scala.runtime.LazyRef(); - T.this.MethodLocalObject$1(barParam, MethodLocalObject$module) + T.this.MethodLocalObject$2(MethodLocalObject$module, barParam) }; def tryy(tryyParam: String): Function0 = { var tryyLocal: runtime.ObjectRef = scala.runtime.ObjectRef.create(""); { - (() => T.this.$anonfun$tryy$1(tryyParam, tryyLocal)) + (() => T.this.$anonfun$tryy$1(tryyLocal, tryyParam)) } }; final private[this] def $anonfun$foo$1(methodParam$1: String, methodLocal$1: String): String = T.this.classParam.+(T.this.field()).+(methodParam$1).+(methodLocal$1); @@ -32,25 +32,25 @@ package { scala.Predef.print(barParam$1); def $outer(): T }; - object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 { - def ($outer: T, barParam$1: String): T#MethodLocalObject$2.type = { - MethodLocalObject$2.super.(); - MethodLocalObject$2.super./*MethodLocalTrait$1*/$init$(barParam$1); + object MethodLocalObject$1 extends Object with T#MethodLocalTrait$1 { + def ($outer: T, barParam$1: String): T#MethodLocalObject$1.type = { + MethodLocalObject$1.super.(); + MethodLocalObject$1.super./*MethodLocalTrait$1*/$init$(barParam$1); () }; private[this] val $outer: T = _; - def $outer(): T = MethodLocalObject$2.this.$outer; - def $outer(): T = MethodLocalObject$2.this.$outer + def $outer(): T = MethodLocalObject$1.this.$outer; + def $outer(): T = MethodLocalObject$1.this.$outer }; - final private[this] def MethodLocalObject$lzycompute$1(barParam$1: String, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = MethodLocalObject$module$1.synchronized[T#MethodLocalObject$2.type](if (MethodLocalObject$module$1.initialized()) - MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]() + final private[this] def MethodLocalObject$lzycompute$1(MethodLocalObject$module$1: scala.runtime.LazyRef, barParam$1: String): T#MethodLocalObject$1.type = MethodLocalObject$module$1.synchronized[T#MethodLocalObject$1.type](if (MethodLocalObject$module$1.initialized()) + MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$1.type]() else - MethodLocalObject$module$1.initialize(new T#MethodLocalObject$2.type(T.this, barParam$1)).$asInstanceOf[T#MethodLocalObject$2.type]()); - final private[this] def MethodLocalObject$1(barParam$1: String, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = if (MethodLocalObject$module$1.initialized()) - MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]() + MethodLocalObject$module$1.initialize(new T#MethodLocalObject$1.type(T.this, barParam$1)).$asInstanceOf[T#MethodLocalObject$1.type]()); + final private[this] def MethodLocalObject$2(MethodLocalObject$module$1: scala.runtime.LazyRef, barParam$1: String): T#MethodLocalObject$1.type = if (MethodLocalObject$module$1.initialized()) + MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$1.type]() else - T.this.MethodLocalObject$lzycompute$1(barParam$1, MethodLocalObject$module$1); - final private[this] def $anonfun$tryy$1(tryyParam$1: String, tryyLocal$1: runtime.ObjectRef): Unit = try { + T.this.MethodLocalObject$lzycompute$1(MethodLocalObject$module$1, barParam$1); + final private[this] def $anonfun$tryy$1(tryyLocal$1: runtime.ObjectRef, tryyParam$1: String): Unit = try { tryyLocal$1.elem = tryyParam$1 } finally () } diff --git a/test/files/run/t4171.check b/test/files/run/t4171.check index d72391a1c4f3..b26ff294af24 100644 --- a/test/files/run/t4171.check +++ b/test/files/run/t4171.check @@ -1,3 +1,3 @@ 1 5 -class Test$B$1 +class Test$B$2 diff --git a/test/files/run/t6028.check b/test/files/run/t6028.check index 05634fa8eb87..a62dc3c1366e 100644 --- a/test/files/run/t6028.check +++ b/test/files/run/t6028.check @@ -16,12 +16,12 @@ package { }; def bar(barParam: Int): Object = { lazy val MethodLocalObject$module: scala.runtime.LazyRef = new scala.runtime.LazyRef(); - T.this.MethodLocalObject$1(barParam, MethodLocalObject$module) + T.this.MethodLocalObject$2(MethodLocalObject$module, barParam) }; def tryy(tryyParam: Int): Function0 = { var tryyLocal: runtime.IntRef = scala.runtime.IntRef.create(0); { - (new <$anon: Function0>(T.this, tryyParam, tryyLocal): Function0) + (new <$anon: Function0>(T.this, tryyLocal, tryyParam): Function0) } }; @SerialVersionUID(value = 0) final class $anonfun$foo$1 extends scala.runtime.AbstractFunction0$mcI$sp with Serializable { @@ -44,26 +44,26 @@ package { scala.Predef.print(scala.Int.box(barParam$1)); def $outer(): T }; - object MethodLocalObject$2 extends Object with T#MethodLocalTrait$1 { - def ($outer: T, barParam$1: Int): T#MethodLocalObject$2.type = { - MethodLocalObject$2.super.(); - MethodLocalObject$2.super./*MethodLocalTrait$1*/$init$(barParam$1); + object MethodLocalObject$1 extends Object with T#MethodLocalTrait$1 { + def ($outer: T, barParam$1: Int): T#MethodLocalObject$1.type = { + MethodLocalObject$1.super.(); + MethodLocalObject$1.super./*MethodLocalTrait$1*/$init$(barParam$1); () }; private[this] val $outer: T = _; - def $outer(): T = MethodLocalObject$2.this.$outer; - def $outer(): T = MethodLocalObject$2.this.$outer + def $outer(): T = MethodLocalObject$1.this.$outer; + def $outer(): T = MethodLocalObject$1.this.$outer }; - final private[this] def MethodLocalObject$lzycompute$1(barParam$1: Int, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = MethodLocalObject$module$1.synchronized[T#MethodLocalObject$2.type](if (MethodLocalObject$module$1.initialized()) - MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]() + final private[this] def MethodLocalObject$lzycompute$1(MethodLocalObject$module$1: scala.runtime.LazyRef, barParam$1: Int): T#MethodLocalObject$1.type = MethodLocalObject$module$1.synchronized[T#MethodLocalObject$1.type](if (MethodLocalObject$module$1.initialized()) + MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$1.type]() else - MethodLocalObject$module$1.initialize(new T#MethodLocalObject$2.type(T.this, barParam$1)).$asInstanceOf[T#MethodLocalObject$2.type]()); - final private[this] def MethodLocalObject$1(barParam$1: Int, MethodLocalObject$module$1: scala.runtime.LazyRef): T#MethodLocalObject$2.type = if (MethodLocalObject$module$1.initialized()) - MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$2.type]() + MethodLocalObject$module$1.initialize(new T#MethodLocalObject$1.type(T.this, barParam$1)).$asInstanceOf[T#MethodLocalObject$1.type]()); + final private[this] def MethodLocalObject$2(MethodLocalObject$module$1: scala.runtime.LazyRef, barParam$1: Int): T#MethodLocalObject$1.type = if (MethodLocalObject$module$1.initialized()) + MethodLocalObject$module$1.value().$asInstanceOf[T#MethodLocalObject$1.type]() else - T.this.MethodLocalObject$lzycompute$1(barParam$1, MethodLocalObject$module$1); + T.this.MethodLocalObject$lzycompute$1(MethodLocalObject$module$1, barParam$1); @SerialVersionUID(value = 0) final class $anonfun$tryy$1 extends scala.runtime.AbstractFunction0$mcV$sp with Serializable { - def ($outer: T, tryyParam$1: Int, tryyLocal$1: runtime.IntRef): <$anon: Function0> = { + def ($outer: T, tryyLocal$1: runtime.IntRef, tryyParam$1: Int): <$anon: Function0> = { $anonfun$tryy$1.super.(); () }; @@ -77,8 +77,8 @@ package { $anonfun$tryy$1.this.apply(); scala.runtime.BoxedUnit.UNIT }; - private[this] val tryyParam$1: Int = _; - private[this] val tryyLocal$1: runtime.IntRef = _ + private[this] val tryyLocal$1: runtime.IntRef = _; + private[this] val tryyParam$1: Int = _ } } } diff --git a/test/files/run/t9375.check b/test/files/run/t9375.check index 8f43fab025e9..65e700e649ee 100644 --- a/test/files/run/t9375.check +++ b/test/files/run/t9375.check @@ -1,10 +1,10 @@ konstruktor: class A - konstruktor: class A$O$12$ + konstruktor: class A$O$3$ konstruktor: class A$$anon$1 konstruktor: class A$A konstruktor: class A$C konstruktor: class C - konstruktor: class T$O$15$ + konstruktor: class T$O$9$ konstruktor: class T$$anon$2 konstruktor: class T$A konstruktor: class T$C @@ -20,8 +20,8 @@ now initializing nested objects konstruktor: class A$A$Op$ konstruktor: class A$T$O$ konstruktor: class A$T$Op$ - konstruktor: class A$O$11$ - konstruktor: class A$O$13$ + konstruktor: class A$O$1$ + konstruktor: class A$O$5$ konstruktor: class A$$anon$1$O$ konstruktor: class A$$anon$1$Op$ konstruktor: class T$O$ @@ -32,8 +32,8 @@ now initializing nested objects konstruktor: class T$A$Op$ konstruktor: class T$T$O$ konstruktor: class T$T$Op$ - konstruktor: class T$O$14$ - konstruktor: class T$O$16$ + konstruktor: class T$O$7$ + konstruktor: class T$O$11$ konstruktor: class T$$anon$2$O$ konstruktor: class T$$anon$2$Op$ no object konstruktors called when serializing / deserializing objects (starting at the outer or the object itself) @@ -46,7 +46,7 @@ accessing modules triggers initialization deserializing creates a new object graph, including new scala 'object' instances, no matter where serialization starts init static module M and field v konstruktor: class M$ - konstruktor: class M$O$18$ + konstruktor: class M$O$15$ serDeser does not initialize nested static modules init M.O konstruktor: class M$O$ @@ -54,7 +54,7 @@ serDeser nested static module objects declared in field decls are not static modules, so they deserialize to new instances init lazy val M.w objects declared in lazy val are not static modules either - konstruktor: class M$O$19$ + konstruktor: class M$O$17$ object declared in a function: new instance created on each invocation - konstruktor: class M$O$20$ - konstruktor: class M$O$20$ + konstruktor: class M$O$19$ + konstruktor: class M$O$19$ diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala new file mode 100644 index 000000000000..2d329d4d68e7 --- /dev/null +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -0,0 +1,228 @@ +package scala.tools.nsc + +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} +import java.util + +import org.junit.Test + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.language.implicitConversions +import scala.reflect.internal.util.{BatchSourceFile, SourceFile} +import scala.reflect.io.PlainNioFile +import scala.tools.nsc.reporters.StoreReporter + +class DeterminismTest { + @Test def testLambdaLift(): Unit = { + def code = List[SourceFile]( + source("a.scala", + """ + |package demo + | + |class a { + | def x = { + | def local = "x" + | } + | def y = { + | def local = "y" + | } + |} + | + """.stripMargin), + source("b.scala", + """ + |package demo + | + |class b { + | def test: Unit = { + | new a().y + | } + |} + """.stripMargin) + + ) + test(List(code)) + } + @Test def testTyperFreshName(): Unit = { + def code = List[SourceFile]( + source("a.scala", + """ + |package demo + | + |class a { + | def x = { + | { case x if "".isEmpty => "" }: PartialFunction[Any, Any] + | } + | def y = { + | { case x if "".isEmpty => "" }: PartialFunction[Any, Any] + | } + |} + | + """.stripMargin), + source("b.scala", + """ + |package demo + | + |class b { + | def test: Unit = { + | new a().y + | } + |} + """.stripMargin) + + ) + test(List(code)) + } + + @Test def testReify(): Unit = { + def code = List[SourceFile]( + source("a.scala", + """ + |package demo + | + |import language.experimental.macros + |import scala.reflect.macros.blackbox.Context + | + |class a { + | def x(c: Context) = { + | import c.universe._ + | reify { type T = Option[_]; () }.tree + | } + | def y(c: Context) = { + | import c.universe._ + | reify { type T = Option[_]; () }.tree + | } + |} + | + """.stripMargin), + source("b.scala", + """ + |package demo + | + |class b { + | def test: Unit = { + | new a().y(null) + | } + |} + """.stripMargin) + + ) + test(List(code)) + } + + @Test def testMacroFreshName(): Unit = { + val macroCode = source("macro.scala", + """ + |package demo + | + |import language.experimental.macros + |import scala.reflect.macros.blackbox.Context + | + |object Macro { + | def impl(c: Context): c.Tree = { + | import c.universe._ + | val name = c.freshName("foo") + | Block(ValDef(NoMods, TermName(name), tq"_root_.scala.Int", Literal(Constant(0))) :: Nil, Ident(name)) + | } + | def m: Unit = macro impl + |} + | + """.stripMargin) + def code = List( + source("a.scala", + """ + |package demo + | + |class a { + | def test: Unit = { + | Macro.m + | } + |} + """.stripMargin), + source("b.scala", + """ + |package demo + | + |class b { + | def test: Unit = { + | Macro.m + | } + |} + """.stripMargin) + + ) + test(List(List(macroCode), code)) + } + + def source(name: String, code: String): SourceFile = new BatchSourceFile(name, code) + private def test(groups: List[List[SourceFile]]): Unit = { + val referenceOutput = Files.createTempDirectory("reference") + + def compile(output: Path, files: List[SourceFile]): Unit = { + val g = new Global(new Settings) + g.settings.usejavacp.value = true + g.settings.classpath.value = output.toAbsolutePath.toString + g.settings.outputDirs.setSingleOutput(output.toString) + val storeReporter = new StoreReporter + g.reporter = storeReporter + import g._ + val r = new Run + // println("scalac " + files.mkString(" ")) + r.compileSources(files) + assert(!storeReporter.hasErrors, storeReporter.infos.mkString("\n")) + } + + for (group <- groups.init) { + compile(referenceOutput, group) + } + compile(referenceOutput, groups.last) + + class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.createDirectories(dest.resolve(src.relativize(dir))) + super.preVisitDirectory(dir, attrs) + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.copy(file, dest.resolve(src.relativize(file))) + super.visitFile(file, attrs) + } + } + for (permutation <- permutationsWithSubsets(groups.last)) { + val recompileOutput = Files.createTempDirectory("recompileOutput") + copyRecursive(referenceOutput, recompileOutput) + compile(recompileOutput, permutation) + assert(diff(referenceOutput, recompileOutput), s"Difference detected between recompiling $permutation Run:\njardiff -r $referenceOutput $recompileOutput\n") + deleteRecursive(recompileOutput) + } + deleteRecursive(referenceOutput) + + } + def permutationsWithSubsets[A](as: List[A]): List[List[A]] = + as.permutations.toList.flatMap(_.inits.filter(_.nonEmpty)).distinct + + private def diff(dir1: Path, dir2: Path): Boolean = { + def allFiles(dir: Path) = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).sortBy(_._1.toString) + + val dir1Files = allFiles(dir1) + val dir2Files = allFiles(dir2) + val identical = dir1Files.corresponds(dir2Files) { + case ((rel1, file1), (rel2, file2)) => + rel1 == rel2 && java.util.Arrays.equals(Files.readAllBytes(file1), Files.readAllBytes(file2)) + } + identical + } + private def deleteRecursive(f: Path) = new PlainNioFile(f).delete() + private def copyRecursive(src: Path, dest: Path): Unit = { + class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.createDirectories(dest.resolve(src.relativize(dir))) + super.preVisitDirectory(dir, attrs) + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.copy(file, dest.resolve(src.relativize(file))) + super.visitFile(file, attrs) + } + } + Files.walkFileTree(src, new CopyVisitor(src, dest)) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index f551c839d747..d27eb95521e8 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -105,10 +105,10 @@ class ScalaInlineInfoTest extends BytecodeTesting { ("x4$(LT;)I", MethodInlineInfo(true ,false,false)), ("x5()I", MethodInlineInfo(true, false,false)), ("x5$(LT;)I", MethodInlineInfo(true ,false,false)), - ("L$1(Lscala/runtime/LazyRef;)LT$L$2$;", MethodInlineInfo(true, false,false)), + ("L$2(Lscala/runtime/LazyRef;)LT$L$1$;", MethodInlineInfo(true, false,false)), ("nest$1()I", MethodInlineInfo(true, false,false)), ("$init$(LT;)V", MethodInlineInfo(true,false,false)), - ("L$lzycompute$1(Lscala/runtime/LazyRef;)LT$L$2$;", MethodInlineInfo(true,false,false)) + ("L$lzycompute$1(Lscala/runtime/LazyRef;)LT$L$1$;", MethodInlineInfo(true,false,false)) ), None // warning ) From f5c53edfaa7c522584e7189ccfab2c72845ce678 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 2 Feb 2018 09:49:05 +1000 Subject: [PATCH 1542/2793] Report an error on problematic use of unit.fresh I've used this to flush out the corner cases fixed in the previous commit. (cherry picked from commit dfaefa00189a4c6688f4f4dfd8d3c4ec21761ad0) --- .../scala/tools/nsc/CompilationUnits.scala | 17 ++++++++++++++--- src/compiler/scala/tools/nsc/Global.scala | 2 +- .../scala/tools/reflect/ToolBoxFactory.scala | 3 +-- 3 files changed, 16 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 5c84748b9509..340ef74866f7 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -19,13 +19,24 @@ trait CompilationUnits { global: Global => override def toString() = "NoCompilationUnit" } + /** Creates a `FreshNameCreator` that reports an error if it is used during the typer phase */ + final def warningFreshNameCreator: FreshNameCreator = new FreshNameCreator { + override def newName(prefix: String): String = { + if (global.phase == currentRun.typerPhase) { + Thread.dumpStack() + reporter.error(lastSeenContext.owner.pos, "Typer phase should not use the compilation unit scoped fresh name creator") + } + super.newName(prefix) + } + } + /** One unit of compilation that has been submitted to the compiler. * It typically corresponds to a single file of source code. It includes * error-reporting hooks. */ - class CompilationUnit(val source: SourceFile) extends CompilationUnitContextApi { self => - + class CompilationUnit(val source: SourceFile, freshNameCreator: FreshNameCreator) extends CompilationUnitContextApi { self => + def this(source: SourceFile) = this(source, new FreshNameCreator) /** the fresh name creator */ - implicit val fresh: FreshNameCreator = new FreshNameCreator + implicit val fresh: FreshNameCreator = freshNameCreator def freshTermName(prefix: String = nme.FRESH_TERM_NAME_PREFIX) = global.freshTermName(prefix) def freshTypeName(prefix: String) = global.freshTypeName(prefix) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index ab0efb570c57..4a92a35b5903 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1440,7 +1440,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) reporting.summarizeErrors() } - val units = sources map scripted map (new CompilationUnit(_)) + val units = sources map scripted map (file => new CompilationUnit(file, warningFreshNameCreator)) units match { case Nil => checkDeprecations() // nothing to compile, report deprecated options diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 61166f4239b8..9bb33240fa2f 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -6,9 +6,8 @@ import scala.tools.cmd.CommandLineParser import scala.tools.nsc.reporters._ import scala.tools.nsc.CompilerCommand import scala.tools.nsc.io.{AbstractFile, VirtualDirectory} -import scala.reflect.internal.util.AbstractFileClassLoader +import scala.reflect.internal.util.{AbstractFileClassLoader, FreshNameCreator, NoSourceFile} import scala.reflect.internal.Flags._ -import scala.reflect.internal.util.NoSourceFile import java.lang.{Class => jClass} import scala.compat.Platform.EOL import scala.reflect.NameTransformer From e517c4750837febedd0968688fba7d336339e0af Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 2 Feb 2018 09:49:26 +1000 Subject: [PATCH 1543/2793] Tone error down to a dev warning (cherry picked from commit c5cc71f597612498dec852fd1526bff730e40162) --- src/compiler/scala/tools/nsc/CompilationUnits.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 340ef74866f7..9afbded55e35 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -23,8 +23,7 @@ trait CompilationUnits { global: Global => final def warningFreshNameCreator: FreshNameCreator = new FreshNameCreator { override def newName(prefix: String): String = { if (global.phase == currentRun.typerPhase) { - Thread.dumpStack() - reporter.error(lastSeenContext.owner.pos, "Typer phase should not use the compilation unit scoped fresh name creator") + devWarningDumpStack("Typer phase should not use the compilation unit scoped fresh name creator", 32) } super.newName(prefix) } From 4e0cc9791369fea2db92fe6c663318d24e52104e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 2 Feb 2018 16:50:02 +1000 Subject: [PATCH 1544/2793] Defer parameter alias computation until the end of typechecking Previously, it was done while typechecking super calls, and would fail to see the fact that a yet-to-be-typechecked super constructor itself had a parameter aliased by a grand-parent class. (cherry picked from commit 3ae11c1cdbf5e47b8dd1bea497075bf43fabf0ad) --- .../tools/nsc/typechecker/Analyzer.scala | 1 + .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 75 +++++++++++++------ .../nsc/typechecker/ParamAliasTest.scala | 60 +++++++++++++++ 4 files changed, 116 insertions(+), 22 deletions(-) create mode 100644 test/junit/scala/tools/nsc/typechecker/ParamAliasTest.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 74b154eb21bc..84e85f05ca79 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -96,6 +96,7 @@ trait Analyzer extends AnyRef applyPhase(units.next()) undoLog.clear() } + finishComputeParamAlias() // defensive measure in case the bookkeeping in deferred macro expansion is buggy clearDelayed() if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.typerNanos, start) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 7aa71cfda051..e196ba7f28fb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -298,7 +298,7 @@ trait ContextErrors { def DeprecatedParamNameError(param: Symbol, name: Name) = issueSymbolTypeError(param, "deprecated parameter name "+ name +" has to be distinct from any other parameter name (deprecated or not).") - // computeParamAliases + // analyzeSuperConsructor def SuperConstrReferenceError(tree: Tree) = NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index c21db31e0d83..068cd00e693a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -40,6 +40,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final val shortenImports = false + // For each class, we collect a mapping from constructor param accessors that are aliases of their superclass + // param accessors. At the end of the typer phase, when this information is available all the way up the superclass + // chain, this is used to determine which are true aliases, ones where the field can be elided from this class. + // And yes, if you were asking, this is yet another binary fragility, as we bake knowledge of the super class into + // this class. + private val superConstructorCalls: mutable.AnyRefMap[Symbol, collection.Map[Symbol, Symbol]] = perRunCaches.newAnyRefMap() + // allows override of the behavior of the resetTyper method w.r.t comments def resetDocComments() = clearDocComments() @@ -48,6 +55,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper resetContexts() resetImplicits() resetDocComments() + superConstructorCalls.clear() } sealed abstract class SilentResult[+T] { @@ -2087,9 +2095,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper treeCopy.ValDef(vdef, typedMods, sym.name, tpt1, checkDead(context, rhs1)) setType NoType } - /** Enter all aliases of local parameter accessors. - */ - def computeParamAliases(clazz: Symbol, vparamss: List[List[ValDef]], rhs: Tree) { + /** Analyze the super constructor call to record information used later to compute parameter aliases */ + def analyzeSuperConsructor(meth: Symbol, vparamss: List[List[ValDef]], rhs: Tree): Unit = { + val clazz = meth.owner debuglog(s"computing param aliases for $clazz:${clazz.primaryConstructor.tpe}:$rhs") val pending = ListBuffer[AbsTypeError]() @@ -2131,27 +2139,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!superClazz.isJavaDefined) { val superParamAccessors = superClazz.constrParamAccessors if (sameLength(superParamAccessors, superArgs)) { + val accToSuperAcc = mutable.AnyRefMap[Symbol, Symbol]() for ((superAcc, superArg@Ident(name)) <- superParamAccessors zip superArgs) { if (mexists(vparamss)(_.symbol == superArg.symbol)) { - val alias = ( - superAcc.initialize.alias - orElse (superAcc getterIn superAcc.owner) - filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias) - ) - if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) { - val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match { - case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed - case acc => acc - } - ownAcc match { - case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) => - debuglog(s"$acc has alias ${alias.fullLocationString}") - acc setAlias alias - case _ => - } + val ownAcc = clazz.info decl name suchThat (_.isParamAccessor) match { + case acc if !acc.isDeferred && acc.hasAccessorFlag => acc.accessed + case acc => acc + } + ownAcc match { + case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) => + accToSuperAcc(acc) = superAcc + case _ => } } } + if (!accToSuperAcc.isEmpty) { + superConstructorCalls(clazz) = accToSuperAcc + } } } } @@ -2306,10 +2310,10 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { // There are no supercalls for AnyVal or constructors from Java sources, which - // would blow up in computeParamAliases; there's nothing to be computed for them + // would blow up in analyzeSuperConsructor; there's nothing to be computed for them // anyway. if (meth.isPrimaryConstructor) - computeParamAliases(meth.owner, vparamss1, rhs1) + analyzeSuperConsructor(meth, vparamss1, rhs1) else checkSelfConstructorArgs(ddef, meth.owner) } @@ -5842,6 +5846,35 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (phase.erasedTypes) None // OPT save the hashmap lookup in erasure type and beyond else transformed remove tree } + + + /** Finish computation of param aliases after typechecking is completed */ + final def finishComputeParamAlias(): Unit = { + val classes = superConstructorCalls.keys.toArray + // superclasses before subclasses to avoid a data race between `superAcc.alias` and `acc.setAlias` below. + scala.util.Sorting.quickSort(classes)(Ordering.fromLessThan((a, b) => b.isLess(a))) + + for (sym <- classes) { + for ((ownAcc, superAcc) <- superConstructorCalls.getOrElse(sym, Nil)) { + // We have a corresponding paramter in the super class. + val superClazz = sym.superClass + val alias = ( + superAcc.initialize.alias // Is the param accessor is an alias for a field further up the class heirarchy? + orElse (superAcc getterIn superAcc.owner) // otherwise, lookup the accessor for the super + filter (alias => superClazz.info.nonPrivateMember(alias.name) == alias) // the accessor must be public + ) + if (alias.exists && !alias.accessed.isVariable && !isRepeatedParamType(alias.accessed.info)) { + ownAcc match { + case acc: TermSymbol if !acc.isVariable && !isByNameParamType(acc.info) => + debuglog(s"$acc has alias ${alias.fullLocationString}") + acc setAlias alias + case _ => + } + } + } + } + superConstructorCalls.clear() + } } trait TypersStats { diff --git a/test/junit/scala/tools/nsc/typechecker/ParamAliasTest.scala b/test/junit/scala/tools/nsc/typechecker/ParamAliasTest.scala new file mode 100644 index 000000000000..9892c09f9686 --- /dev/null +++ b/test/junit/scala/tools/nsc/typechecker/ParamAliasTest.scala @@ -0,0 +1,60 @@ +package scala.tools.nsc.typechecker + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.reflect.io.VirtualDirectory +import scala.tools.nsc.Global +import scala.tools.nsc.classpath.{AggregateClassPath, VirtualDirectoryClassPath} +import scala.tools.nsc.reporters.StoreReporter +import scala.tools.testing.BytecodeTesting + +@RunWith(classOf[JUnit4]) +class ParamAliasTest extends BytecodeTesting { + + @Test + def checkAliasWorksWhenSubclassesAreTypecheckedFirst(): Unit = { + def test(code: List[String], check: List[(String, String)], expected: List[String]): Unit = { + val compiler1 = BytecodeTesting.newCompiler(extraArgs = compilerArgs) + val r = new compiler1.global.Run + r.compileSources(code.map(compiler1.global.newSourceFile(_))) + Predef.assert(!compiler1.global.reporter.hasErrors, compiler1.global.reporter.asInstanceOf[StoreReporter].infos) + def aliasNames(g: Global) = { + check.map { + case (clsName, paramName) => + val cls = g.rootMirror.getRequiredClass(clsName) + val field = g.exitingPickler(cls.info.decl(g.TermName(paramName)).suchThat(_.isParamAccessor).accessed) + assert(field.exists, (clsName, paramName, cls.info)) + val alias = field.alias + s"${field.fullName} stored in ${alias.fullName}" + } + } + val aliasInfoAfterCompilation = aliasNames(compiler1.global) + val compiler2 = BytecodeTesting.newCompiler(extraArgs = compilerArgs) + val out = compiler1.global.settings.outputDirs.getSingleOutput.get.asInstanceOf[VirtualDirectory] + compiler2.global.platform.classPath + compiler2.global.platform.currentClassPath = Some(AggregateClassPath(new VirtualDirectoryClassPath(out) :: compiler2.global.platform.currentClassPath.get :: Nil)) + val r2 = new compiler2.global.Run + val aliasInfoUnpickled = aliasNames(compiler2.global) + Assert.assertEquals(expected.sorted, aliasInfoAfterCompilation.sorted) + Assert.assertEquals(expected.sorted, aliasInfoUnpickled.sorted) + } + + { + val code = List("package p1; class A(val a: Int) extends B(a)", "package p1; class B(b: Int) extends C(b)", "package p1; class C(val c: Int)") + val check = List("p1.A" -> "a") + val expected = List("p1.A.a stored in p1.C.c") + test(code, check, expected) + test(code.reverse, check, expected) + } + + { + val code = List("package p1; class A(val a: Int) extends B(a)", "package p1; class B(val b: Int) extends C(b)", "package p1; class C(val c: Int)") + val check = List("p1.A" -> "a", "p1.B" -> "b") + val expected = List("p1.A.a stored in p1.C.c", "p1.B.b stored in p1.C.c") + test(code, check, expected) + test(code.reverse, check, expected) + } + } +} From f697224d28341a6b4ba64292a5009fb1b9cc2492 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 22 Jan 2018 20:02:47 +1000 Subject: [PATCH 1545/2793] Don't copy the OVERRIDE flag in refinement decls typedRefinement defers the setting of this flag until the end of the compilation unit, which means that inferred types that are derived from the written refinement type can be unstable depending on whether they were computed before or after the flag was set. An alternative fix might be to just remove the setting of OVERRIDE in typedRefinement.unitToCheck. (cherry picked from commit f6ca3dd53e7937a6750266625f3f47e28a0b38a2) --- .../scala/reflect/internal/Types.scala | 2 +- .../scala/tools/nsc/DeterminismTest.scala | 33 +++++++++++++++++++ 2 files changed, 34 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e7e4840b050c..ec37df891f4d 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3618,7 +3618,7 @@ trait Types else refinedType(parents, owner) val syms1 = decls.toList for (sym <- syms1) - result.decls.enter(sym.cloneSymbol(result.typeSymbol)) + result.decls.enter(sym.cloneSymbol(result.typeSymbol).resetFlag(OVERRIDE)) val syms2 = result.decls.toList val resultThis = result.typeSymbol.thisType for (sym <- syms2) diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index 2d329d4d68e7..8651f23dcf0f 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -154,6 +154,39 @@ class DeterminismTest { test(List(List(macroCode), code)) } + + @Test def testRefinementTypeOverride(): Unit = { + def code = List[SourceFile]( + source("a.scala", + """ + |class Global + |trait Analyzer extends StdAttachments { + | val global: Global + |} + |trait Context { + | val universe: Global + |} + | + |trait StdAttachments { + | self: Analyzer => + | + | type UnaffiliatedMacroContext = Context + | type MacroContext = UnaffiliatedMacroContext { val universe: self.global.type } + |} + | + """.stripMargin), + source("b.scala", + """ + |class Macros { + | self: Analyzer => + | def foo = List.apply[MacroContext]() + |} + | + """.stripMargin) + ) + test(List(code)) + } + def source(name: String, code: String): SourceFile = new BatchSourceFile(name, code) private def test(groups: List[List[SourceFile]]): Unit = { val referenceOutput = Files.createTempDirectory("reference") From e76420490300e036c7357afcd685ed0e856fd8e8 Mon Sep 17 00:00:00 2001 From: Sujeet Kausallya Gholap Date: Wed, 11 Jul 2018 17:58:33 +0530 Subject: [PATCH 1546/2793] Make quasiquote pattern matching deterministic As per the discussion at https://github.com/scala/scala/commit/7184fe0d3740ac8558067c18bdf449a65a8a26b9#r29651930, all we want to avoid is name collision among the holes in a single pattern. For that, `c.freshName()` itself is sufficient and the randomness is not needed. This is what scala.meta does, and works just as well. This fixes https://github.com/scala/bug/issues/11008 (cherry picked from commit 0336c145e2b5be29a3d983d239251082882a55e7) --- src/compiler/scala/reflect/quasiquotes/Placeholders.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala index bc4f95427519..cf706a4ace98 100644 --- a/src/compiler/scala/reflect/quasiquotes/Placeholders.scala +++ b/src/compiler/scala/reflect/quasiquotes/Placeholders.scala @@ -1,7 +1,6 @@ package scala.reflect package quasiquotes -import java.util.UUID.randomUUID import scala.collection.mutable /** Emulates hole support (see Holes.scala) in the quasiquote parser (see Parsers.scala). @@ -20,7 +19,6 @@ trait Placeholders { self: Quasiquotes => lazy val posMap = mutable.LinkedHashMap[Position, (Int, Int)]() lazy val code = { val sb = new StringBuilder() - val sessionSuffix = randomUUID().toString.replace("-", "").substring(0, 8) + "$" def appendPart(value: String, pos: Position) = { val start = sb.length @@ -30,7 +28,7 @@ trait Placeholders { self: Quasiquotes => } def appendHole(tree: Tree, rank: Rank) = { - val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX + sessionSuffix)) + val placeholderName = c.freshName(TermName(nme.QUASIQUOTE_PREFIX)) sb.append(placeholderName) val holeTree = if (method != nme.unapply) tree From 4cf75d6ada13a42b630c7026057c5141b10abb9f Mon Sep 17 00:00:00 2001 From: psilospore Date: Sat, 6 Oct 2018 19:13:50 -0400 Subject: [PATCH 1547/2793] 10786 setting privateWithin for java protected inner classes and modules loaded by classpath, and created tests. --- .../symtab/classfile/ClassfileParser.scala | 10 +++++++ test/files/pos/t10786/Bar_2.scala | 27 +++++++++++++++++++ test/files/pos/t10786/Foo_1.java | 12 +++++++++ 3 files changed, 49 insertions(+) create mode 100644 test/files/pos/t10786/Bar_2.scala create mode 100644 test/files/pos/t10786/Foo_1.java diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f99b85b7cfdd..b09a9cf54d45 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1150,6 +1150,16 @@ abstract class ClassfileParser { mod.moduleClass setInfo loaders.moduleClassLoader cls.associatedFile = file mod.moduleClass.associatedFile = file + + /** + * need to set privateWithin here because the classfile of a nested protected class is public in bytecode, + * so propagatePackageBoundary will not set it when the symbols are completed + */ + if (jflags.isProtected) { + cls.privateWithin = cls.enclosingPackage + mod.privateWithin = cls.enclosingPackage + } + (cls, mod) } diff --git a/test/files/pos/t10786/Bar_2.scala b/test/files/pos/t10786/Bar_2.scala new file mode 100644 index 000000000000..37f0809ebf07 --- /dev/null +++ b/test/files/pos/t10786/Bar_2.scala @@ -0,0 +1,27 @@ +package pkg { + class C { + class T1 extends Foo_1.StaticClass + class T2 extends Foo_1.ProtectedStaticClass + def test(): Unit = { + val n1 = new Foo_1.StaticClass + n1.x + Foo_1.StaticClass.y + val n2 = new Foo_1.ProtectedStaticClass + n2.x + Foo_1.ProtectedStaticClass.y + } + + class I extends Foo_1 { + class T1 extends Foo_1.StaticClass + class T2 extends Foo_1.ProtectedStaticClass + def test(): Unit = { + val n1 = new Foo_1.StaticClass + n1.x + Foo_1.StaticClass.y + val n2 = new Foo_1.ProtectedStaticClass + n2.x + Foo_1.ProtectedStaticClass.y + } + } + } +} diff --git a/test/files/pos/t10786/Foo_1.java b/test/files/pos/t10786/Foo_1.java new file mode 100644 index 000000000000..bdda66de5b06 --- /dev/null +++ b/test/files/pos/t10786/Foo_1.java @@ -0,0 +1,12 @@ +package pkg; + +public class Foo_1 { + static class StaticClass { + public int x = 1; + public static int y = 1; + } + protected static class ProtectedStaticClass { + public int x = 1; + public static int y = 1; + } +} From 3dcbf474bdb127e28e0c8701f6daec32fb198f79 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Jan 2018 23:05:54 +1000 Subject: [PATCH 1548/2793] [forwardmerge] Rework change to scalap This fixed a regression in lift-json, which programattically uses scalap, and expects that method parameters are children of the method symbol. --- .../scala/tools/scalap/scalax/rules/scalasig/Symbol.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala index 2c3913c1f354..cc8f4bb1cb01 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/Symbol.scala @@ -27,7 +27,7 @@ abstract class ScalaSigSymbol extends Symbol { def entry: ScalaSig#Entry def index = entry.index - lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (sym => sym.parent == Some(this) && !sym.isParam) + lazy val children: Seq[Symbol] = applyScalaSigRule(ScalaSigParsers.symbols) filter (sym => sym.parent == Some(this) && (this match { case _: MethodSymbol => true case _ => !sym.isParam})) lazy val attributes: Seq[AttributeInfo] = applyScalaSigRule(ScalaSigParsers.attributes) filter (_.symbol == this) } From 127c1ec39977783c3a14c708cb9242911c40e105 Mon Sep 17 00:00:00 2001 From: Janek Bogucki Date: Tue, 9 Oct 2018 11:30:45 +0100 Subject: [PATCH 1549/2793] When parsing a Scaladoc table do not consume non-table content Fixes two issues by reducing greediness of Scaladoc table parsing, - Content appearing directly after table ends is not lost in the call to blockEnded("table") - Cell content breaking over more than one line is not parsed By looking further ahead and ensuring the next line of content is a valid table row both these issues are avoided and subsequent parsing is simplified because the rows being parsing have a known structure. This approach preserves the fast failing check on existence of a new table row via the same quick check on the next char as the previous non-regex approach. Scaladoc tables are a subset of GFM tables. One restriction over GFM is the requirement for table rows to start and end with the | character with no leading/trailing whitespace. The test for leading whitespace not being ignored is turned on and confirms the restriction is active. --- .../nsc/doc/base/CommentFactoryBase.scala | 55 +++++++----- test/scaladoc/resources/tables.scala | 42 +++++---- test/scaladoc/run/tables-warnings.check | 5 +- test/scaladoc/run/tables-warnings.scala | 26 ++++-- test/scaladoc/run/tables.check | 7 +- test/scaladoc/run/tables.scala | 90 ++++++++++++++----- 6 files changed, 152 insertions(+), 73 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala index 3239735772c2..5172869788fb 100644 --- a/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala +++ b/src/scaladoc/scala/tools/nsc/doc/base/CommentFactoryBase.scala @@ -457,7 +457,7 @@ trait CommentFactoryBase { this: MemberLookupBase => hrule() else if (checkList) listBlock - else if (check(TableCellStart)) + else if (checkTableRow) table() else { para() @@ -551,6 +551,18 @@ trait CommentFactoryBase { this: MemberLookupBase => HorizontalRule() } + /** Starts and end with a cell separator matching the minimal row || and all other possible rows */ + private val TableRow = """^\|.*\|$""".r + + /* Checks for a well-formed table row */ + private def checkTableRow = { + check(TableCellStart) && { + val newlineIdx = buffer.indexOf('\n', offset) + newlineIdx != -1 && + TableRow.findFirstIn(buffer.substring(offset, newlineIdx)).isDefined + } + } + /** {{{ * table ::= headerRow '\n' delimiterRow '\n' dataRows '\n' * content ::= inline-content @@ -572,8 +584,6 @@ trait CommentFactoryBase { this: MemberLookupBase => println(s"peek: $tag: '$limitedPeek'") } - def nextIsCellStart = check(TableCellStart) - /* Accumulated state */ var header: Option[Row] = None @@ -603,7 +613,8 @@ trait CommentFactoryBase { this: MemberLookupBase => val escapeChar = "\\" /* Poor man's negative lookbehind */ - def checkInlineEnd = check(TableCellStart) && !check(escapeChar, -1) + def checkInlineEnd = + (check(TableCellStart) && !check(escapeChar, -1)) || check("\n") def decodeEscapedCellMark(text: String) = text.replace(escapeChar + TableCellStart, TableCellStart) @@ -624,8 +635,8 @@ trait CommentFactoryBase { this: MemberLookupBase => def contentNonEmpty(content: Inline) = content != Text("") /** - * @param cellStartMark The char indicating the start or end of a cell - * @param finalizeRow Function to invoke when the row has been fully parsed + * @param cellStartMark The char indicating the start or end of a cell + * @param finalizeRow Function to invoke when the row has been fully parsed */ def parseCells(cellStartMark: String, finalizeRow: () => Unit): Unit = { def jumpCellStartMark() = { @@ -646,7 +657,7 @@ trait CommentFactoryBase { this: MemberLookupBase => // Continue parsing a table row. // - // After reading inline content the follow conditions will be encountered, + // After reading inline content the following conditions will be encountered, // // Case : Next Chars // .................. @@ -667,10 +678,6 @@ trait CommentFactoryBase { this: MemberLookupBase => // State : The cell separator not followed by a newline // Action: Store the current contents, skip the cell separator, continue parsing the row. // - // Case 4. - // State : A newline followed by anything - // Action: Store the current contents, report warning, skip the newline, close the row, stop parsing. - // @tailrec def parseCells0( contents: List[Block], finalizeRow: () => Unit, @@ -693,12 +700,12 @@ trait CommentFactoryBase { this: MemberLookupBase => finalizeRow() reportError(pos, "unclosed table row") } else if (isStartMarkNewline) { - // peek("2/1: start-mark-new-line") + // peek("2: start-mark-new-line/before") // Case 2 storeContents() finalizeRow() skipStartMarkNewline() - // peek("2/2: start-mark-new-line") + // peek("2: start-mark-new-line/after") } else if (isStartMark) { // peek("3: start-mark") // Case 3 @@ -708,14 +715,6 @@ trait CommentFactoryBase { this: MemberLookupBase => // TrailingCellsEmpty produces empty content val accContents = if (contentNonEmpty(content)) Paragraph(content) :: Nil else Nil parseCells0(accContents, finalizeRow, startPos, offset) - } else if (isNewline) { - // peek("4: newline") - // Case 4 - /* Fix and continue as there is no option to not return a table at present. */ - reportError(pos, "missing trailing cell marker") - storeContents() - finalizeRow() - skipNewline() } else { // Case π√ⅈ // When the impossible happens leave some clues. @@ -732,7 +731,7 @@ trait CommentFactoryBase { this: MemberLookupBase => parseCells(TableCellStart, finalizeHeaderCells) - while (nextIsCellStart) { + while (checkTableRow) { val initialOffset = offset parseCells(TableCellStart, finalizeCells) @@ -814,6 +813,14 @@ trait CommentFactoryBase { this: MemberLookupBase => defaultColumnOption } } + + if (check("\n", -1)) { + prevChar() + } else { + peek("expected-newline-missing") + sys.error("table parsing left buffer in unexpected state") + } + blockEnded("table") Table(header.get, columnOptions, constrainedDataRows) } @@ -1088,6 +1095,10 @@ trait CommentFactoryBase { this: MemberLookupBase => offset += 1 } + final def prevChar() { + offset -= 1 + } + final def check(chars: String): Boolean = { val poff = offset val ok = jump(chars) diff --git a/test/scaladoc/resources/tables.scala b/test/scaladoc/resources/tables.scala index 820ad2ea5a20..f4119e9f8f69 100644 --- a/test/scaladoc/resources/tables.scala +++ b/test/scaladoc/resources/tables.scala @@ -67,6 +67,15 @@ package scala.test.scaladoc.tables { */ trait TrailingCellsEmpty + /** + * ||Header 1|Header 2| + * |---|---|---| + * |||Fig| + * ||Cherry|| + * |Walnut||| + */ + trait LeadingCellsEmpty + // Headers /** @@ -164,20 +173,8 @@ package scala.test.scaladoc.tables { */ trait CellMarkerEscapeEscapesOnlyMarker - // Known suboptimal behaviour. Candidates for improving later. - /** - * ||Header 1|Header 2| - * |---|---|---| - * |||Fig| - * ||Cherry|| - * |Walnut||| - */ - trait LeadingCellsEmpty - - // Should not lose r2c1 or warn - /** - * |Unstarted| + * |Unstarted Row| * |-| * |r1c1| * r2c1| @@ -191,16 +188,25 @@ package scala.test.scaladoc.tables { * |-| * |Accidental * newline| - * |~FIN~| * */ trait SplitCellContent + /** + * |Split| + * |-| + * |Accidental + * newline| + * |~FIN~| + * + */ + trait SplitInternalCellContent + /** * |Hill Dweller| * |---| * |Ant| - * Ants are cool. + * Ants are cool * |Hive Dweller| * |---| * |Bee| @@ -208,7 +214,11 @@ package scala.test.scaladoc.tables { */ trait MixedContentUnspaced - // Should parse to table with a header, defaulted delimiter and no rows. + // Known suboptimal behaviour. Candidates for improving later. + + // Because table rows must not have leading whitespace this + // should parse to a table with a header, defaulted delimiter and no rows + // while the ignored content is parsed as non-table content. /** * |Leading| * |-| diff --git a/test/scaladoc/run/tables-warnings.check b/test/scaladoc/run/tables-warnings.check index 35d4d72ebd3b..f81b5b203070 100644 --- a/test/scaladoc/run/tables-warnings.check +++ b/test/scaladoc/run/tables-warnings.check @@ -1,7 +1,4 @@ -newSource:3: warning: unclosed table row - /** - ^ -newSource:9: warning: missing trailing cell marker +newSource:9: warning: Fixing missing delimiter row /** ^ newSource:19: warning: Fixing invalid column alignment: ::- diff --git a/test/scaladoc/run/tables-warnings.scala b/test/scaladoc/run/tables-warnings.scala index 7a75557417ea..dc46044aaaf7 100644 --- a/test/scaladoc/run/tables-warnings.scala +++ b/test/scaladoc/run/tables-warnings.scala @@ -42,16 +42,20 @@ object Test extends ScaladocModelTest { withComment("PrematureEndOfText") { comment => val header = r("Header") val colOpts = ColumnOptionLeft :: Nil - val row = r("cell") - val rows = row :: Nil - assertTableEquals(Table(header, colOpts, rows), comment.body) + val table = Table(header, colOpts, Nil) + val summary = Paragraph(Chain(List(Summary(Text("|cell"))))) + val body = Body(table :: summary :: Nil) + assertBodiesEquals(body, comment.body) } withComment("MissingTrailingCellMark") { comment => - val header = r("Unterminated") val colOpts = ColumnOptionLeft :: Nil - val rows = r("r1c1") :: r("r2c1") :: r("r3c1") :: Nil - assertTableEquals(Table(header, colOpts, rows), comment.body) + val table1 = Table(r("Unterminated"), colOpts, r("r1c1") :: Nil) + // val rows = r("r1c1") :: r("r2c1") :: r("r3c1") :: Nil + val summary = Paragraph(Chain(List(Summary(Text("|r2c1"))))) + val table2 = Table(r("r3c1"), colOpts, Nil) + val body = Body(table1 :: summary :: table2 :: Nil) + assertBodiesEquals(body, comment.body) } withComment("InvalidColumnOptions") { comment => @@ -89,6 +93,16 @@ object Test extends ScaladocModelTest { assert(expectedTable == actualBlock, s"Expected: $expectedTable, Actual: $actualBlock") } + private def assertBodiesEquals(expectedBody: Body, actualBody: Body): Unit = { + val blocks = expectedBody.blocks zip actualBody.blocks + val blockComparisons = blocks.zipWithIndex.collect { + case ((expectedBlock, actualBlock), idx) if expectedBlock != actualBlock => + s"Block mismatch at index $idx\nExpected block: $expectedBlock\nActual block : $actualBlock" + }.headOption.getOrElse("") + + assert(expectedBody == actualBody, s"$blockComparisons\n\nExpected: $expectedBody, Actual: $actualBody") + } + private def multilineFormat(table: Table): String = { "header : " + table.header + "\n" + "columnOptions: " + table.columnOptions.size + "\n" + diff --git a/test/scaladoc/run/tables.check b/test/scaladoc/run/tables.check index 8bbb25e4d1b7..1bfa5884ad8a 100644 --- a/test/scaladoc/run/tables.check +++ b/test/scaladoc/run/tables.check @@ -4,10 +4,13 @@ newSource:36: warning: Dropping 1 excess table delimiter cells from row. newSource:36: warning: Dropping 1 excess table data cells from row. /** ^ -newSource:179: warning: no additional content on same line after table +newSource:176: warning: Fixing missing delimiter row /** ^ -newSource:179: warning: Fixing missing delimiter row +newSource:195: warning: Fixing missing delimiter row + /** + ^ +newSource:222: warning: Fixing missing delimiter row /** ^ Done. diff --git a/test/scaladoc/run/tables.scala b/test/scaladoc/run/tables.scala index 719207af3d8f..27b7fc879ec2 100644 --- a/test/scaladoc/run/tables.scala +++ b/test/scaladoc/run/tables.scala @@ -271,42 +271,80 @@ object Test extends ScaladocModelTest { assertTableEquals(Table(header, colOpts, rows), comment.body) } - /* Deferred Enhancements. - * - * When these improvements are made corresponding test updates to any new or - * changed error messages and parsed content and would be included. - */ - withComment("MissingInitialCellMark") { comment => val colOpts = ColumnOptionLeft :: Nil - val table1 = Table(r("Unstarted"), colOpts, r("r1c1") :: Nil) + val table1 = Table(r("Unstarted Row"), colOpts, r("r1c1") :: Nil) + + val content = Paragraph(Chain(List(Summary(Text("r2c1|"))))) + val table2 = Table(r("r3c1"), colOpts, Nil) - assertTablesEquals(table1 :: table2 :: Nil, comment.body) - } + val body = Body(table1 :: content :: table2 :: Nil) - // TODO: Add assertions for MixedContentUnspaced which is similar to MissingInitialCellMark + assertBodiesEquals(body, comment.body) + } withComment("SplitCellContent") { comment => val header = r("Split") val colOpts = ColumnOptionLeft :: Nil - val rows = r("Accidental\nnewline") :: r("~FIN~") :: Nil - assertTableEquals(Table(header, colOpts, rows), comment.body) + + val table = Table(header, colOpts, Nil) + + val content = Paragraph(Chain(List(Summary(Text("|Accidental\nnewline|"))))) + + val body = Body(table :: content :: Nil) + + assertBodiesEquals(body, comment.body) + } + + withComment("SplitInternalCellContent") { comment => + val colOpts = ColumnOptionLeft :: Nil + + val table1 = Table(r("Split"), colOpts, Nil) + + val content = Paragraph(Chain(List(Summary(Text("|Accidental\nnewline|"))))) + + val table2 = Table(r("~FIN~"), colOpts, Nil) + + val body = Body(table1 :: content :: table2 :: Nil) + + assertBodiesEquals(body, comment.body) } - // TODO: As a later enhancement skip whitespace before table marks to reduce rate of silently incorrect table markdown. + withComment("MixedContentUnspaced") { comment => + val colOpts = ColumnOptionLeft :: Nil + + val table1 = Table(r("Hill Dweller"), colOpts, r("Ant") :: Nil) + + val content1 = Paragraph(Chain(List(Summary(Text("Ants are cool"))))) + + val table2 = Table(r("Hive Dweller"), colOpts, r("Bee") :: Nil) + + val content2 = pt("But bees are better.\n") + + val body = Body(table1 :: content1 :: table2 :: content2 :: Nil) + + assertBodiesEquals(body, comment.body) + } + + /* Deferred Enhancements. + * + * When these improvements are made corresponding test updates to any new or + * changed error messages and parsed content and would be included. + */ + + // As a later enhancement skip whitespace before table marks to reduce rate of silently ignored intended table markdown. /* Confirm current suboptimal behaviour */ - // TODO: Restore this test by updating the expected value - if (false) { - withComment("LeadingWhitespaceNotSkipped") { comment => - val colOpts = ColumnOptionLeft :: Nil - val table1 = Table(r("Leading"), colOpts, Nil) - val table2 = Table(r("whitespace before marks"), colOpts, Nil) - val body = Body(table1 :: table2 :: Nil) - assertBodiesEquals(body, comment.body) - } + withComment("LeadingWhitespaceNotSkipped") { comment => + val colOpts = ColumnOptionLeft :: Nil + val table = Table(r("Leading"), colOpts, Nil) + val text = " |-|\n |whitespace before marks|\n |Not Yet Skipped|Maybe TO DO|\n" + val content = Paragraph(Chain(List(Summary(Text(text))))) + + val body = Body(table :: content :: Nil) + assertBodiesEquals(body, comment.body) } } @@ -330,7 +368,13 @@ object Test extends ScaladocModelTest { } private def assertBodiesEquals(expectedBody: Body, actualBody: Body): Unit = { - assert(expectedBody == actualBody, s"Expected: $expectedBody, Actual: $actualBody") + val blocks = expectedBody.blocks zip actualBody.blocks + val blockComparisons = blocks.zipWithIndex.collect { + case ((expectedBlock, actualBlock), idx) if expectedBlock != actualBlock => + s"Block mismatch at index $idx\nExpected block: $expectedBlock\nActual block : $actualBlock" + }.headOption.getOrElse("") + + assert(expectedBody == actualBody, s"$blockComparisons\n\nExpected: $expectedBody, Actual: $actualBody") } private def multilineFormat(table: Table): String = { From 8dab041341a9d834fa049ad47be6761c56219960 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Fri, 19 Oct 2018 16:42:59 +0200 Subject: [PATCH 1550/2793] [backport] Review feedback (cherry picked from commit a38f306a8980955ac504c4397619988ab29cfe56) --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index f11fd72a6a31..aa33de16a98c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3080,13 +3080,13 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!vd.tpt.isEmpty) Right(vd.tpt.tpe) else Left(args.indexWhere { case Ident(name) => name == vd.name - case _ => false // TODO: i think we need to deal with widening conversions too?? + case _ => false // TODO: this does not catch eta-expansion of an overloaded method that involves numeric widening scala/bug#9738 (and maybe similar patterns?) }) } // If some of the vparams without type annotation was not applied to `meth`, // we're not going to learn enough from typing `meth` to determine them. - if (formalsFromApply.exists{ case Left(-1) => true case _ => false }) EmptyTree + if (formalsFromApply.contains(Left(-1))) EmptyTree else { // We're looking for a method (as indicated by FUNmode in the silent typed below), // so let's make sure our expected type is a MethodType (of the right arity, but we can't easily say more about the argument types) From e412a12e310e0d9ea6048e182f333e744edc4a85 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 20 Oct 2018 23:48:34 -0400 Subject: [PATCH 1551/2793] More thorough treatment of @strictfp. (Just getting my toes wet again.) The JVM accepts `ACC_STRICT` as a modifier flag on non-abstract methods only. Java itself accepts `strictfp` on both methods (in which case it applies to the method) and classes (in which case it applies to all methods within the class, as well as nested and inner classes thereof.) Scala has somewhat more ways of nesting methods and classes than Java, so I've extrapolated the rule to be: methods nested inside of a class/module/method definition marked `@strictfp` are strict. I also fixed the interaction with value classes: when a method body on a value class was moved to the companion, its `@strictfp` attribute wasn't moved along with it. The test case covers nested/inner/local classes and methods, as well as extension methods. I'm leaving specialization to the existing specialization+strictfp tests. Fixes scala/bug#7954. --- .../nsc/backend/jvm/BTypesFromSymbols.scala | 2 +- .../scala/tools/nsc/javac/JavaParsers.scala | 4 +- .../nsc/transform/ExtensionMethods.scala | 4 + .../scala/reflect/internal/Symbols.scala | 3 +- test/files/jvm/strictfp.check | 33 ++++++++ test/files/jvm/strictfp/Test_2.scala | 51 +++++++++++++ test/files/jvm/strictfp/strictfp_1.scala | 76 +++++++++++++++++++ test/files/jvm/t7954.scala | 14 ++++ 8 files changed, 184 insertions(+), 3 deletions(-) create mode 100644 test/files/jvm/strictfp.check create mode 100644 test/files/jvm/strictfp/Test_2.scala create mode 100644 test/files/jvm/strictfp/strictfp_1.scala create mode 100644 test/files/jvm/t7954.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 073da11cffce..0b7f1ca2dfb4 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -667,7 +667,7 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { /** * Return the Java modifiers for the given symbol. * Java modifiers for classes: - * - public, abstract, final, strictfp (not used) + * - public, abstract, final * for interfaces: * - the same as for classes, without 'final' * for fields: diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 3ef75679eeda..d3cb9406ed4a 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -385,7 +385,9 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { case VOLATILE => addAnnot(VolatileAttr) in.nextToken() - case SYNCHRONIZED | STRICTFP => + case STRICTFP => + addAnnot(ScalaStrictFPAttr) + case SYNCHRONIZED => in.nextToken() case _ => val privateWithin: TypeName = diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index f21a28ccc72c..6a9196b8e78f 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -210,6 +210,10 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { companion.moduleClass.newMethod(extensionName, tree.pos.focus, origMeth.flags & ~OVERRIDE & ~PROTECTED & ~PRIVATE & ~LOCAL | FINAL) setAnnotations origMeth.annotations ) + defineOriginalOwner(extensionMeth, origMeth.owner) + // @strictfp on class means strictfp on all methods, but `setAnnotations` won't copy it + if (origMeth.isStrictFP && !extensionMeth.hasAnnotation(ScalaStrictFPAttr)) + extensionMeth.addAnnotation(ScalaStrictFPAttr) origMeth.removeAnnotation(TailrecClass) // it's on the extension method, now. companion.info.decls.enter(extensionMeth) } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index d4f94a77cc6e..f8b2c15af1ae 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -893,7 +893,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => ) } - def isStrictFP = hasAnnotation(ScalaStrictFPAttr) || (enclClass hasAnnotation ScalaStrictFPAttr) + def isStrictFP: Boolean = !isDeferred && (hasAnnotation(ScalaStrictFPAttr) || originalOwner.isStrictFP) def isSerializable = info.baseClasses.exists(p => p == SerializableClass || p == JavaSerializableClass) def hasBridgeAnnotation = hasAnnotation(BridgeClass) def isDeprecated = hasAnnotation(DeprecatedAttr) @@ -3578,6 +3578,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def flagMask = AllFlags override def exists = false override def isHigherOrderTypeParameter = false + override def isStrictFP = false override def companionClass = NoSymbol override def companionModule = NoSymbol override def companionSymbol = NoSymbol diff --git a/test/files/jvm/strictfp.check b/test/files/jvm/strictfp.check new file mode 100644 index 000000000000..59d815173f6a --- /dev/null +++ b/test/files/jvm/strictfp.check @@ -0,0 +1,33 @@ +A.foo: true +A.bar$1: true +B.foo: true +B.bar$2: true +C.foo: true +C.bar$3: true +D.foo: true +D.bar$4: true +E.foo: true +E$.foo$extension: true +E$.bar$5: true +F.foo: true +F$.foo$extension: true +F$.bar$6: true +G$I.foo: true +G$I.bar$7: true +G$I$.foo: true +G$I$.bar$8: true +H$I.foo: true +H$I.bar$9: true +H$I$.foo: true +H$I$.bar$10: true +I.foo: false +I$.foo: true +I$.foo$extension: false +I$.bar$11: false +I$.bar$12: true +J.foo: true +J$M$1.foo: true +J$M$1.bar$13: true +K.foo: true +K$M$2.foo: true +K$M$2.bar$14: true diff --git a/test/files/jvm/strictfp/Test_2.scala b/test/files/jvm/strictfp/Test_2.scala new file mode 100644 index 000000000000..0a7a06a6b06b --- /dev/null +++ b/test/files/jvm/strictfp/Test_2.scala @@ -0,0 +1,51 @@ +import scala.tools.asm.Opcodes +import scala.tools.asm.tree._ +import scala.tools.partest.BytecodeTest + +import scala.collection.JavaConverters._ + +object Test extends BytecodeTest { + + def check(cls: String, mth: String) = { + val clasz = loadClassNode(s"strictfp/$cls") + //println(clasz.methods.asScala.map(_.name).toList) + val meth = clasz.methods.asScala.find(_.name == mth).get + println(s"$cls.$mth: ${(meth.access & Opcodes.ACC_STRICT) != 0}") + } + + override def show() = { + check("A", "foo") + check("A", "bar$1") + check("B", "foo") + check("B", "bar$2") + check("C", "foo") + check("C", "bar$3") + check("D", "foo") + check("D", "bar$4") + check("E", "foo") + check("E$", "foo$extension") + check("E$", "bar$5") + check("F", "foo") + check("F$", "foo$extension") + check("F$", "bar$6") + check("G$I", "foo") + check("G$I", "bar$7") + check("G$I$", "foo") + check("G$I$", "bar$8") + check("H$I", "foo") + check("H$I", "bar$9") + check("H$I$", "foo") + check("H$I$", "bar$10") + check("I", "foo") + check("I$", "foo") + check("I$", "foo$extension") + check("I$", "bar$11") + check("I$", "bar$12") + check("J", "foo") + check("J$M$1", "foo") + check("J$M$1", "bar$13") + check("K", "foo") + check("K$M$2", "foo") + check("K$M$2", "bar$14") + } +} \ No newline at end of file diff --git a/test/files/jvm/strictfp/strictfp_1.scala b/test/files/jvm/strictfp/strictfp_1.scala new file mode 100644 index 000000000000..45cccdbc0e80 --- /dev/null +++ b/test/files/jvm/strictfp/strictfp_1.scala @@ -0,0 +1,76 @@ +package strictfp + +import annotation.strictfp + +class A { + @strictfp def foo(f: Float) = { def bar = f ; bar } +} + +trait B { + @strictfp def foo(f: Float) = { def bar = f ; bar } +} + +@strictfp class C { + def foo(f: Float) = { def bar = f; bar } +} + +@strictfp trait D { + def foo(f: Float) = { def bar = f; bar } +} + +class E(val f: Float) extends AnyVal { + @strictfp def foo = { def bar = f; bar } +} + +@strictfp class F(val f: Float) extends AnyVal { + def foo = { def bar = f; bar } +} + +@strictfp class G { + class I { def foo(f: Float) = { def bar = f; bar } } + object I { def foo(f: Float) = { def bar = f; bar } } +} + +@strictfp object H { + class I { def foo(f: Float) = { def bar = f; bar } } + object I { def foo(f: Float) = { def bar = f; bar } } +} + +class I(val f: Float) extends AnyVal { + def foo = { // NO + def bar = f // NO + bar + } +} +@strictfp object I { + def foo(f: Float) = { + def bar = f + bar + } +} + +@strictfp class J { + def foo = { + class M { + def foo(f: Float) = { + def bar = f + bar + } + } + new M + } +} + +class K { + @strictfp def foo = { + class M { + def foo(f: Float) = { + def bar = f + bar + } + } + new M + } +} + +// see run/t8574.scala for interaction with @specialized \ No newline at end of file diff --git a/test/files/jvm/t7954.scala b/test/files/jvm/t7954.scala new file mode 100644 index 000000000000..ec664986be19 --- /dev/null +++ b/test/files/jvm/t7954.scala @@ -0,0 +1,14 @@ +object Test extends App { + new A { val foo = 1 } + new B { val foo = 1 } +} + +import annotation._ + +@strictfp trait A { + def foo: Int +} + +@strictfp abstract class B { + def foo: Int +} From af46d2d94e2abff1586769657015b5c19dcf772b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 17 Oct 2018 16:37:13 +1000 Subject: [PATCH 1552/2793] Rework -Xprint-args to continue compiler and allow file output ``` $ qscalac -X 2>&1 | grep -i print-args -Xprint-args Print all compiler arguments to the specified location. Use - to echo to the reporter. $ qscalac -cp /tmp -Xprint-args -Xprint:jvm sandbox/test.scala Compiler arguments written to: -Xprint:jvm sandbox/test.scala:1: error: object apache is not a member of package org class Test { println(org.apache.commons.io.IOUtils.EOF) } ^ one error found $ qscalac -cp /tmp -Xprint-args /tmp/compiler.args -Xprint:jvm sandbox/test.scala Compiler arguments written to: /tmp/compiler.args sandbox/test.scala:1: error: object apache is not a member of package org class Test { println(org.apache.commons.io.IOUtils.EOF) } ^ one error found $ cat /tmp/compiler.args -Xprint-args /tmp/compiler.args -Xprint:jvm -classpath /tmp /Users/jz/code/scala/sandbox/test.scala $ qscalac -cp /tmp -Xprint-args - -Xprint:jvm sandbox/test.scala -Xprint:jvm -classpath /tmp /Users/jz/code/scala/sandbox/test.scala sandbox/test.scala:1: error: object apache is not a member of package org class Test { println(org.apache.commons.io.IOUtils.EOF) } ^ one error found ``` --- .../scala/tools/nsc/CompilerCommand.scala | 7 +------ src/compiler/scala/tools/nsc/Global.scala | 16 ++++++++++++++++ .../scala/tools/nsc/settings/ScalaSettings.scala | 4 ++-- 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 319fc2cacba8..c8cfcf881c45 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -100,13 +100,8 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { if (debug) "\n" + global.phaseFlagDescriptions else "" ) else if (genPhaseGraph.isSetByUser) { - val components = global.phaseNames // global.phaseDescriptors // one initializes + val components = global.phaseNames // global.phaseDescriptors // one initializes s"Phase graph of ${components.size} components output to ${genPhaseGraph.value}*.dot." - } else if (printArgs.value) { - s""" - |${recreateArgs.mkString("\n")} - |${files.mkString("\n")} - """.stripMargin } else allSettings.filter(_.isHelping).map(_.help).mkString("\n\n") } diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index d43564e44e3a..a5858bc47991 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1426,11 +1426,27 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Caching member symbols that are def-s in Definitions because they might change from Run to Run. */ val runDefinitions: definitions.RunDefinitions = new definitions.RunDefinitions + private def printArgs(sources: List[SourceFile]): Unit = { + if (settings.printArgs.isSetByUser) { + val argsFile = (settings.recreateArgs ::: sources.map(_.file.absolute.toString())).mkString("", "\n", "\n") + settings.printArgs.value match { + case "-" => + reporter.echo(argsFile) + case pathString => + import java.nio.file._ + val path = Paths.get(pathString) + Files.write(path, argsFile.getBytes(Charset.forName("UTF-8"))) + reporter.echo("Compiler arguments written to: " + path) + } + } + } + /** Compile list of source files, * unless there is a problem already, * such as a plugin was passed a bad option. */ def compileSources(sources: List[SourceFile]): Unit = if (!reporter.hasErrors) { + printArgs(sources) def checkDeprecations() = { warnDeprecatedAndConflictingSettings() diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index a0fbedc03924..5f78381a41b2 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -37,7 +37,7 @@ trait ScalaSettings extends AbsScalaSettings protected def futureSettings = List[BooleanSetting]() /** If any of these settings is enabled, the compiler should print a message and exit. */ - def infoSettings = List[Setting](version, help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph, printArgs) + def infoSettings = List[Setting](version, help, Xhelp, Yhelp, showPlugins, showPhases, genPhaseGraph) /** Is an info setting set? Any -option:help? */ def isInfo = infoSettings.exists(_.isSetByUser) || allSettings.exists(_.isHelping) @@ -132,7 +132,7 @@ trait ScalaSettings extends AbsScalaSettings val Xprint = PhasesSetting ("-Xprint", "Print out program after") val Xprintpos = BooleanSetting ("-Xprint-pos", "Print tree positions, as offsets.") val printtypes = BooleanSetting ("-Xprint-types", "Print tree types (debugging option).") - val printArgs = BooleanSetting ("-Xprint-args", "Print all compiler arguments and exit.") + val printArgs = StringSetting ("-Xprint-args", "file", "Print all compiler arguments to the specified location. Use - to echo to the reporter.", "-") val prompt = BooleanSetting ("-Xprompt", "Display a prompt after each error (debugging option).") val resident = BooleanSetting ("-Xresident", "Compiler stays resident: read source filenames from standard input.") val script = StringSetting ("-Xscript", "object", "Treat the source file as a script and wrap it in a main method.", "") From f4f3fb6185d7621f6cf483c43fdb568804208541 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Fri, 26 Oct 2018 12:24:05 -0400 Subject: [PATCH 1553/2793] Add doc for Option.apply factory --- src/library/scala/Option.scala | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index d158f91e3479..720ce453fce1 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -73,6 +73,19 @@ object Option { * } * }}} * + * Interacting with code that can occasionally return null can be + * safely wrapped in $option to become $none and $some otherwise. {{{ + * val abc = new java.util.HashMap[Int, String] + * abc.put(1, "A") + * bMaybe = Option(abc.get(2)) + * bMaybe match { + * case Some(b) => + * println(s"Found $b") + * case None => + * println("Not found") + * } + * }}} + * * @note Many of the methods in here are duplicative with those * in the Traversable hierarchy, but they are duplicated for a reason: * the implicit conversion tends to leave one with an Iterable in From bd82f0367b35d844d0ca7f105541e81495c9502c Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sat, 27 Oct 2018 12:57:06 -0400 Subject: [PATCH 1554/2793] Put @implicitAmbiguous annotation on the getter, not the field Because that's where the compiler checks. Also fix a dangling backtick. Fixes scala/bug#11228 --- .../tools/nsc/typechecker/PatternTypers.scala | 4 ++-- .../scala/annotation/implicitAmbiguous.scala | 1 + test/files/neg/implicit-ambiguous-val.check | 4 ++++ test/files/neg/implicit-ambiguous-val.scala | 17 +++++++++++++++++ test/files/neg/t7850.check | 4 ++-- 5 files changed, 26 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/implicit-ambiguous-val.check create mode 100644 test/files/neg/implicit-ambiguous-val.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index 100480a6d29f..d2bae523b2e4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -95,9 +95,9 @@ trait PatternTypers { else if (isOkay) fun else if (isEmptyType == NoType) - CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean") + CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean`") else - CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: $isEmptyType)") + CaseClassConstructorError(fun, s"an unapply result must have a member `def isEmpty: Boolean` (found: `def isEmpty: $isEmptyType`)") } def typedArgsForFormals(args: List[Tree], formals: List[Type], mode: Mode): List[Tree] = { diff --git a/src/library/scala/annotation/implicitAmbiguous.scala b/src/library/scala/annotation/implicitAmbiguous.scala index 44e8d2308591..150ecaed7ba0 100644 --- a/src/library/scala/annotation/implicitAmbiguous.scala +++ b/src/library/scala/annotation/implicitAmbiguous.scala @@ -29,4 +29,5 @@ package scala.annotation * @author Brian McKenna * @since 2.12.0 */ +@meta.getter final class implicitAmbiguous(msg: String) extends scala.annotation.StaticAnnotation diff --git a/test/files/neg/implicit-ambiguous-val.check b/test/files/neg/implicit-ambiguous-val.check new file mode 100644 index 000000000000..1e828537d5fa --- /dev/null +++ b/test/files/neg/implicit-ambiguous-val.check @@ -0,0 +1,4 @@ +implicit-ambiguous-val.scala:16: error: unexpected string + meh("") + ^ +one error found diff --git a/test/files/neg/implicit-ambiguous-val.scala b/test/files/neg/implicit-ambiguous-val.scala new file mode 100644 index 000000000000..919a0861d45e --- /dev/null +++ b/test/files/neg/implicit-ambiguous-val.scala @@ -0,0 +1,17 @@ +sealed trait NotString[T] + +object NotString extends NotString0 { + @annotation.implicitAmbiguous("unexpected string") + implicit val stringAmb_1: NotString[String] = null + implicit val stringAmb_2: NotString[String] = null +} +sealed abstract class NotString0 { + implicit def notString[T]: NotString[T] = null +} + +object Test { + def meh[T: NotString](t: T) = () + + meh(12) + meh("") +} \ No newline at end of file diff --git a/test/files/neg/t7850.check b/test/files/neg/t7850.check index 317be2bbceed..60d62f1ce269 100644 --- a/test/files/neg/t7850.check +++ b/test/files/neg/t7850.check @@ -1,7 +1,7 @@ -t7850.scala:11: error: an unapply result must have a member `def isEmpty: Boolean (found: def isEmpty: Casey) +t7850.scala:11: error: an unapply result must have a member `def isEmpty: Boolean` (found: `def isEmpty: Casey`) val Casey(x1) = new Casey(1) ^ -t7850.scala:12: error: an unapply result must have a member `def isEmpty: Boolean +t7850.scala:12: error: an unapply result must have a member `def isEmpty: Boolean` val Dingy(x2) = new Dingy(1) ^ two errors found From 46f0cfcbee216f776a0f0730542827f9a96775f5 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 30 Oct 2018 13:04:15 -0400 Subject: [PATCH 1555/2793] fix jvm/strictfp checkfile now that compiler is deterministic --- test/files/jvm/strictfp.check | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/files/jvm/strictfp.check b/test/files/jvm/strictfp.check index 59d815173f6a..5bee9ecdecb0 100644 --- a/test/files/jvm/strictfp.check +++ b/test/files/jvm/strictfp.check @@ -23,8 +23,8 @@ H$I$.bar$10: true I.foo: false I$.foo: true I$.foo$extension: false -I$.bar$11: false -I$.bar$12: true +I$.bar$11: true +I$.bar$12: false J.foo: true J$M$1.foo: true J$M$1.bar$13: true From 856c111449d870ca3754978801c941bd9f65cdb6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 17 Apr 2018 11:35:53 +0100 Subject: [PATCH 1556/2793] Emit detailed compiler trace under -Yprofile-trace Suitable for viewing directly in chrome://tracing, or post processing with https://github.com/retronym/chrome-trace-to-flamegraph Co-Authored-By: Mike Skells --- project/ScriptCommands.scala | 4 +- src/compiler/scala/tools/nsc/Global.scala | 7 +- .../scala/tools/nsc/profile/Profiler.scala | 251 +++++++++++++---- .../tools/nsc/profile/ThreadPoolFactory.scala | 4 +- .../tools/nsc/settings/ScalaSettings.scala | 4 +- .../tools/nsc/symtab/SymbolLoaders.scala | 46 ++- .../tools/nsc/typechecker/Implicits.scala | 9 + .../scala/tools/nsc/typechecker/Macros.scala | 8 +- .../scala/tools/nsc/typechecker/Typers.scala | 263 ++++++++++-------- .../scala/reflect/internal/SymbolTable.scala | 4 + .../reflect/internal/util/ChromeTrace.scala | 189 +++++++++++++ .../reflect/internal/util/FileUtils.scala | 199 +++++++++++++ .../reflect/internal/util/FileUtilsTest.scala | 89 ++++++ 13 files changed, 874 insertions(+), 203 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/ChromeTrace.scala create mode 100644 src/reflect/scala/reflect/internal/util/FileUtils.scala create mode 100644 test/junit/scala/reflect/internal/util/FileUtilsTest.scala diff --git a/project/ScriptCommands.scala b/project/ScriptCommands.scala index a5564242ebf3..4a4003066a70 100644 --- a/project/ScriptCommands.scala +++ b/project/ScriptCommands.scala @@ -109,11 +109,11 @@ object ScriptCommands { Project.setProject(session, newStructure, state) } - private[this] val enableOptimizer = Seq( + val enableOptimizer = Seq( scalacOptions in Compile in ThisBuild ++= Seq("-opt:l:inline", "-opt-inline-from:scala/**") ) - private[this] val noDocs = Seq( + val noDocs = Seq( publishArtifact in (Compile, packageDoc) in ThisBuild := false ) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a115eac0927b..ad5365fba773 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -446,8 +446,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) currentRun.informUnitStarting(this, unit) val unit0 = currentUnit currentRun.currentUnit = unit + currentRun.profiler.beforeUnit(phase, unit.source.file) try apply(unit) finally { + currentRun.profiler.afterUnit(phase, unit.source.file) currentRun.currentUnit = unit0 currentRun.advanceUnit() } @@ -1110,6 +1112,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def newJavaUnitParser(unit: CompilationUnit): JavaUnitParser = new JavaUnitParser(unit) + override protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.beforeCompletion(root, associatedFile) + override protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.afterCompletion(root, associatedFile) + /** A Run is a single execution of the compiler on a set of units. */ class Run extends RunContextApi with RunReporting with RunParsing { @@ -1474,7 +1479,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) private final val GlobalPhaseName = "global (synthetic)" protected final val totalCompileTime = statistics.newTimer("#total compile time", GlobalPhaseName) - def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) + def compileUnits(units: List[CompilationUnit], fromPhase: Phase): Unit = compileUnitsInternal(units,fromPhase) private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { units foreach addUnit reporter.reset() diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index 87654e8e8baa..68cfab2f16e3 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -14,31 +14,41 @@ package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} import java.lang.management.ManagementFactory +import java.nio.file.{Files, Paths} import java.util.ServiceLoader import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger + import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} -import scala.tools.nsc.{Phase, Settings} +import scala.collection.mutable +import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.util.ChromeTrace +import scala.reflect.io.{AbstractFile, File} +import scala.tools.nsc.{Global, Phase, Settings} object Profiler { def apply(settings: Settings):Profiler = if (!settings.YprofileEnabled) NoOpProfiler else { - val reporter = if(settings.YprofileDestination.isSetByUser) - new StreamProfileReporter(new PrintWriter(new FileWriter(settings.YprofileDestination.value, true))) - else ConsoleProfileReporter + val reporter = settings.YprofileDestination.value match { + case _ if !settings.YprofileDestination.isSetByUser => NoOpProfileReporter + case "-" => ConsoleProfileReporter + case path => new StreamProfileReporter(new PrintWriter(new FileWriter(path, true))) + } new RealProfiler(reporter, settings) } - private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0) + private[profile] val emptySnap = ProfileSnap(0, "", 0, 0, 0, 0, 0, 0, 0, 0) +} +case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, durationMillis: Long, name:String, action:String, cause:String, threads:Long) { + val endNanos = System.nanoTime() } -case class GcEventData(pool:String, reportTimeNs: Long, gcStartMillis:Long, gcEndMillis:Long, name:String, action:String, cause:String, threads:Long) case class ProfileSnap(threadId: Long, threadName: String, snapTimeNanos : Long, - idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, - allocatedBytes:Long, heapBytes:Long) { + idleTimeNanos:Long, cpuTimeNanos: Long, userTimeNanos: Long, + allocatedBytes:Long, heapBytes:Long, totalClassesLoaded: Long, totalJITCompilationTime: Long) { def updateHeap(heapBytes:Long) = { copy(heapBytes = heapBytes) } @@ -73,13 +83,29 @@ case class ProfileRange(start: ProfileSnap, end:ProfileSnap, phase:Phase, purpos def retainedHeapMB = toMegaBytes(end.heapBytes - start.heapBytes) } -sealed trait Profiler { +sealed abstract class Profiler { def finished(): Unit def beforePhase(phase: Phase): ProfileSnap def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit + + def beforeUnit(phase: Phase, file: AbstractFile): Unit + + def afterUnit(phase: Phase, file: AbstractFile): Unit + + def beforeTypedImplDef(sym: Global#Symbol): Unit = () + def afterTypedImplDef(sym: Global#Symbol): Unit = () + + def beforeImplicitSearch(pt: Global#Type): Unit = () + def afterImplicitSearch(pt: Global#Type): Unit = () + + def beforeMacroExpansion(macroSym: Global#Symbol): Unit = () + def afterMacroExpansion(macroSym: Global#Symbol): Unit = () + + def beforeCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = () + def afterCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = () } private [profile] object NoOpProfiler extends Profiler { @@ -87,6 +113,8 @@ private [profile] object NoOpProfiler extends Profiler { override def afterPhase(phase: Phase, profileBefore: ProfileSnap): Unit = () + override def beforeUnit(phase: Phase, file: AbstractFile): Unit = () + override def afterUnit(phase: Phase, file: AbstractFile): Unit = () override def finished(): Unit = () } private [profile] object RealProfiler { @@ -99,17 +127,55 @@ private [profile] object RealProfiler { val threadMx = ExtendedThreadMxBean.proxy if (threadMx.isThreadCpuTimeSupported) threadMx.setThreadCpuTimeEnabled(true) private val idGen = new AtomicInteger() + lazy val allPlugins = ServiceLoader.load(classOf[ProfilerPlugin]).iterator().asScala.toList + + private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { + val current = Thread.currentThread() + val allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId) + ProfileSnap( + threadId = current.getId, + threadName = current.getName, + snapTimeNanos = System.nanoTime(), + idleTimeNanos = idleTimeNanos, + cpuTimeNanos = threadMx.getCurrentThreadCpuTime, + userTimeNanos = threadMx.getCurrentThreadUserTime, + allocatedBytes = allocatedBytes, + heapBytes = readHeapUsage(), + totalClassesLoaded = classLoaderMx.getTotalLoadedClassCount, + totalJITCompilationTime = compileMx.getTotalCompilationTime + ) + } + private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed } private [profile] class RealProfiler(reporter : ProfileReporter, val settings: Settings) extends Profiler with NotificationListener { + private val mainThread = Thread.currentThread() + val id = RealProfiler.idGen.incrementAndGet() + object Category { + final val Run = "run" + final val Phase = "phase" + final val File = "file" + final val TypeCheck = "typecheck" + final val Implicit = "implicit" + final val Macro = "macro" + final val Completion = "completion" + } + + private val chromeTrace = { + if (settings.YprofileTrace.isSetByUser) + new ChromeTrace(Paths.get(settings.YprofileTrace.value)) + else null + } + if (chromeTrace != null) + chromeTrace.traceDurationEventStart(Category.Run, "scalac-" + id) + def completeBackground(threadRange: ProfileRange): Unit = { reporter.reportBackground(this, threadRange) } def outDir = settings.outputDirs.getSingleOutput.getOrElse(settings.outputDirs.outputs.head._2.file).toString - val id = RealProfiler.idGen.incrementAndGet() RealProfiler.gcMx foreach { case emitter: NotificationEmitter => emitter.addNotificationListener(this, null, null) case gc => println(s"Cant connect gcListener to ${gc.getClass}") @@ -117,25 +183,6 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val active = RealProfiler.allPlugins map (_.generate(this, settings)) - private val mainThread = Thread.currentThread() - - private[profile] def snapThread( idleTimeNanos:Long): ProfileSnap = { - import RealProfiler._ - val current = Thread.currentThread() - - ProfileSnap( - threadId = current.getId, - threadName = current.getName, - snapTimeNanos = System.nanoTime(), - idleTimeNanos = idleTimeNanos, - cpuTimeNanos = threadMx.getCurrentThreadCpuTime, - userTimeNanos = threadMx.getCurrentThreadUserTime, - allocatedBytes = threadMx.getThreadAllocatedBytes(Thread.currentThread().getId), - heapBytes = readHeapUsage() - ) - } - private def readHeapUsage() = RealProfiler.memoryMx.getHeapMemoryUsage.getUsed - private def doGC: Unit = { System.gc() System.runFinalization() @@ -151,8 +198,19 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S case gc => } reporter.close(this) + if (chromeTrace != null) { + for (gcEvent <- gcEvents) { + val durationNanos = TimeUnit.MILLISECONDS.toNanos(gcEvent.durationMillis) + val startNanos = gcEvent.endNanos - durationNanos + chromeTrace.traceDurationEvent(gcEvent.name, startNanos, durationNanos, GcThreadId) + } + chromeTrace.traceDurationEventEnd(Category.Run, "scalac-" + id) + chromeTrace.close() + } } + private val gcEvents = ArrayBuffer[GcEventData]() + private val GcThreadId = "GC" override def handleNotification(notification: Notification, handback: scala.Any): Unit = { import java.lang.{Long => jLong} @@ -173,13 +231,30 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S val startTime = info.get("startTime").asInstanceOf[jLong].longValue() val endTime = info.get("endTime").asInstanceOf[jLong].longValue() val threads = info.get("GcThreadCount").asInstanceOf[jInt].longValue() - reporter.reportGc(GcEventData("", reportNs, startTime, endTime, name, action, cause, threads)) + val gcEvent = GcEventData("", reportNs, startTime, endTime, duration, name, action, cause, threads) + synchronized { + gcEvents += gcEvent + } + reporter.reportGc(gcEvent) + } + } + + override def beforePhase(phase: Phase): ProfileSnap = { + assert(mainThread eq Thread.currentThread()) + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Phase, phase.name) + if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) + doGC + if (settings.YprofileExternalTool.containsPhase(phase)) { + println("Profile hook start") + ExternalToolHook.before() } + active foreach {_.beforePhase(phase)} + RealProfiler.snapThread(0) } override def afterPhase(phase: Phase, snapBefore: ProfileSnap): Unit = { assert(mainThread eq Thread.currentThread()) - val initialSnap = snapThread(0) + val initialSnap = RealProfiler.snapThread(0) active foreach {_.afterPhase(phase)} if (settings.YprofileExternalTool.containsPhase(phase)) { println("Profile hook stop") @@ -187,24 +262,85 @@ private [profile] class RealProfiler(reporter : ProfileReporter, val settings: S } val finalSnap = if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) { doGC - initialSnap.updateHeap(readHeapUsage()) + initialSnap.updateHeap(RealProfiler.readHeapUsage()) } else initialSnap + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Phase, phase.name) reporter.reportForeground(this, ProfileRange(snapBefore, finalSnap, phase, "", 0, Thread.currentThread)) } - override def beforePhase(phase: Phase): ProfileSnap = { + override def beforeUnit(phase: Phase, file: AbstractFile): Unit = { assert(mainThread eq Thread.currentThread()) - if (settings.YprofileRunGcBetweenPhases.containsPhase(phase)) - doGC - if (settings.YprofileExternalTool.containsPhase(phase)) { - println("Profile hook start") - ExternalToolHook.before() + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.File, file.name) + } + + private var nextAfterUnitSnap: Long = System.nanoTime() + + override def afterUnit(phase: Phase, file: AbstractFile): Unit = { + assert(mainThread eq Thread.currentThread()) + if (chromeTrace != null) { + val now = System.nanoTime() + chromeTrace.traceDurationEventEnd(Category.File, file.name) + if (now > nextAfterUnitSnap) { + val initialSnap = RealProfiler.snapThread(0) + chromeTrace.traceCounterEvent("allocBytes", "allocBytes", initialSnap.allocatedBytes, processWide = false) + chromeTrace.traceCounterEvent("heapBytes", "heapBytes", initialSnap.heapBytes, processWide = true) + chromeTrace.traceCounterEvent("classesLoaded", "classesLoaded", initialSnap.totalClassesLoaded, processWide = true) + chromeTrace.traceCounterEvent("jitCompilationTime", "jitCompilationTime", initialSnap.totalJITCompilationTime, processWide = true) + chromeTrace.traceCounterEvent("userTime", "userTime", initialSnap.userTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("cpuTime", "cpuTime", initialSnap.cpuTimeNanos, processWide = false) + chromeTrace.traceCounterEvent("idleTime", "idleTime", initialSnap.idleTimeNanos, processWide = false) + nextAfterUnitSnap = System.nanoTime() + 10 * 1000 * 1000 + } } - active foreach {_.beforePhase(phase)} - snapThread(0) } + override def beforeTypedImplDef(sym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.TypeCheck, sym.rawname.toString) + } + override def afterTypedImplDef(sym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.TypeCheck, sym.rawname.toString) + } + + override def beforeImplicitSearch(pt: Global#Type): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Implicit, "?[" + pt.typeSymbol.rawname + "]", colour = "yellow") + } + + override def afterImplicitSearch(pt: Global#Type): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Implicit, "?[" + pt.typeSymbol.rawname + "]", colour = "yellow") + } + + override def beforeMacroExpansion(macroSym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventStart(Category.Macro, "«" + macroSym.rawname + "»", colour = "olive") + } + + override def afterMacroExpansion(macroSym: Global#Symbol): Unit = { + if (chromeTrace != null) chromeTrace.traceDurationEventEnd(Category.Macro, "«" + macroSym.rawname + "»", colour = "olive") + } + + override def beforeCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = { + if (chromeTrace != null) { + chromeTrace.traceDurationEventStart(Category.Completion, "↯", colour = "thread_state_sleeping") + chromeTrace.traceDurationEventStart(Category.File, associatedFile.name) + chromeTrace.traceDurationEventStart(Category.Completion, completionName(root, associatedFile)) + } + } + + override def afterCompletion(root: Global#Symbol, associatedFile: AbstractFile): Unit = { + if (chromeTrace != null) { + chromeTrace.traceDurationEventEnd(Category.Completion, completionName(root, associatedFile)) + chromeTrace.traceDurationEventEnd(Category.File, associatedFile.name) + chromeTrace.traceDurationEventEnd(Category.Completion, "↯", colour = "thread_state_sleeping") + } + } + + private def completionName(root: Global#Symbol, associatedFile: AbstractFile): String = { + if (root.hasPackageFlag || root.isTopLevel) root.javaBinaryNameString + else { + val enclosing = root.enclosingTopLevelClass + enclosing.javaBinaryNameString + "::" + root.rawname.toString + } + } } object EventType extends Enumeration { @@ -228,24 +364,23 @@ sealed trait ProfileReporter { } object ConsoleProfileReporter extends ProfileReporter { + private val outWriter = new PrintWriter(Console.out) + private val delegate = new StreamProfileReporter(new PrintWriter(Console.out)) + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = delegate.reportBackground(profiler, threadRange) + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = delegate.reportForeground(profiler, threadRange) + override def close(profiler: RealProfiler): Unit = outWriter.flush() + + override def header(profiler: RealProfiler): Unit = delegate.header(profiler) + override def reportGc(data: GcEventData): Unit = delegate.reportGc(data) +} - - override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? - override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = - // TODO - ??? - +object NoOpProfileReporter extends ProfileReporter { + override def reportBackground(profiler: RealProfiler, threadRange: ProfileRange): Unit = () + override def reportForeground(profiler: RealProfiler, threadRange: ProfileRange): Unit = () override def close(profiler: RealProfiler): Unit = () - override def header(profiler: RealProfiler): Unit = { - println(s"Profiler start (${profiler.id}) ${profiler.outDir}") - } - - override def reportGc(data: GcEventData): Unit = { - println(f"Profiler GC reported ${data.gcEndMillis - data.gcStartMillis}ms") - } + override def header(profiler: RealProfiler): Unit = () + override def reportGc(data: GcEventData): Unit = () } class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { @@ -271,10 +406,8 @@ class StreamProfileReporter(out:PrintWriter) extends ProfileReporter { out.println(s"${EventType.GC},$start,${data.reportTimeNs},${data.gcStartMillis}, ${data.gcEndMillis},${data.name},${data.action},${data.cause},${data.threads}") } - override def close(profiler: RealProfiler): Unit = { - out.flush - out.close + out.flush() + out.close() } } - diff --git a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala index 822a7317d284..641526a1de48 100644 --- a/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala +++ b/src/compiler/scala/tools/nsc/profile/ThreadPoolFactory.scala @@ -98,9 +98,9 @@ object ThreadPoolFactory { val data = new ThreadProfileData localData.set(data) - val profileStart = profiler.snapThread(0) + val profileStart = RealProfiler.snapThread(0) try worker.run finally { - val snap = profiler.snapThread(data.idleNs) + val snap = RealProfiler.snapThread(data.idleNs) val threadRange = ProfileRange(profileStart, snap, phase, shortId, data.taskCount, Thread.currentThread()) profiler.completeBackground(threadRange) } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 655c3528d18f..5f46d0606710 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -415,7 +415,9 @@ trait ScalaSettings extends AbsScalaSettings override def YhotStatisticsEnabled = YhotStatistics.value val YprofileEnabled = BooleanSetting("-Yprofile-enabled", "Enable profiling.") - val YprofileDestination = StringSetting("-Yprofile-destination", "file", "where to send profiling output - specify a file, default is to the console.", ""). + val YprofileDestination = StringSetting("-Yprofile-destination", "file", "Profiling output - specify a file or `-` for console.", ""). + withPostSetHook( _ => YprofileEnabled.value = true ) + val YprofileTrace = StringSetting("-Yprofile-trace", "file", "Capture trace of compilation in Chrome Trace format", "profile.trace"). withPostSetHook( _ => YprofileEnabled.value = true ) val YprofileExternalTool = PhasesSetting("-Yprofile-external-tool", "Enable profiling for a phase using an external tool hook. Generally only useful for a single phase", "typer"). withPostSetHook( _ => YprofileEnabled.value = true ) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 6444823efced..2ad68f4d6203 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -203,6 +203,7 @@ abstract class SymbolLoaders { protected def doComplete(root: Symbol): Unit def sourcefile: Option[AbstractFile] = None + def associatedFile(self: Symbol): AbstractFile = NoAbstractFile /** * Description of the resource (ClassPath, AbstractFile) @@ -221,23 +222,29 @@ abstract class SymbolLoaders { } override def complete(root: Symbol) { + val assocFile = associatedFile(root) + currentRunProfilerBeforeCompletion(root, assocFile) try { - val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) - val currentphase = phase - doComplete(root) - phase = currentphase - informTime("loaded " + description, start) - ok = true - setSource(root) - setSource(root.companionSymbol) // module -> class, class -> module - } - catch { - case ex @ (_: IOException | _: MissingRequirementError) => - ok = false - signalError(root, ex) + try { + val start = java.util.concurrent.TimeUnit.NANOSECONDS.toMillis(System.nanoTime()) + val currentphase = phase + doComplete(root) + phase = currentphase + informTime("loaded " + description, start) + ok = true + setSource(root) + setSource(root.companionSymbol) // module -> class, class -> module + } + catch { + case ex@(_: IOException | _: MissingRequirementError) => + ok = false + signalError(root, ex) + } + initRoot(root) + if (!root.isPackageClass) initRoot(root.companionSymbol) + } finally { + currentRunProfilerAfterCompletion(root, assocFile) } - initRoot(root) - if (!root.isPackageClass) initRoot(root.companionSymbol) } override def load(root: Symbol) { complete(root) } @@ -336,18 +343,27 @@ abstract class SymbolLoaders { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.stopTimer(statistics.classReadNanos, start) } override def sourcefile: Option[AbstractFile] = classfileParser.srcfile + override def associatedFile(self: Symbol): AbstractFile = classfile } class SourcefileLoader(val srcfile: AbstractFile) extends SymbolLoader with FlagAssigningCompleter { protected def description = "source file "+ srcfile.toString override def fromSource = true override def sourcefile = Some(srcfile) + override def associatedFile(self: Symbol): AbstractFile = srcfile protected def doComplete(root: Symbol): Unit = compileLate(srcfile) } object moduleClassLoader extends SymbolLoader with FlagAssigningCompleter { protected def description = "module class loader" protected def doComplete(root: Symbol) { root.sourceModule.initialize } + override def associatedFile(self: Symbol): AbstractFile = { + val sourceModule = self.sourceModule + sourceModule.rawInfo match { + case loader: SymbolLoader => loader.associatedFile(sourceModule) + case _ => super.associatedFile(self) + } + } } /** used from classfile parser to avoid cycles */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 891691370527..2cc7fa729899 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -87,6 +87,15 @@ trait Implicits { * @return A search result */ def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { + currentRun.profiler.beforeImplicitSearch(pt) + try { + inferImplicit1(tree, pt, reportAmbiguous, isView, context, saveAmbiguousDivergent, pos) + } finally { + currentRun.profiler.afterImplicitSearch(pt) + } + } + + private def inferImplicit1(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean, pos: Position): SearchResult = { // Note that the isInvalidConversionTarget seems to make a lot more sense right here, before all the // work is performed, than at the point where it presently exists. val shouldPrint = printTypings && !context.undetparams.isEmpty diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index e837e0eb8270..10382720089b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -770,7 +770,13 @@ trait Macros extends MacroRuntimes with Traces with Helpers { // By default, use the current typer's fresh name creator in macros. The compiler option // allows people to opt in to the old behaviour of Scala 2.12, which used a global fresh creator. if (!settings.YmacroFresh.value) currentFreshNameCreator = typer.fresh - pluginsMacroExpand(typer, expandee, mode, pt) + val macroSym = expandee.symbol + currentRun.profiler.beforeMacroExpansion(macroSym) + try { + pluginsMacroExpand(typer, expandee, mode, pt) + } finally { + currentRun.profiler.afterMacroExpansion(macroSym) + } } /** Default implementation of `macroExpand`. diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index acac49cff07d..a285d00866f5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1834,38 +1834,43 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedClassDef(cdef: ClassDef): Tree = { val clazz = cdef.symbol - val typedMods = typedModifiers(cdef.mods) - assert(clazz != NoSymbol, cdef) - reenterTypeParams(cdef.tparams) - val tparams1 = cdef.tparams mapConserve (typedTypeDef) - val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) - val impl2 = finishMethodSynthesis(impl1, clazz, context) - if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) - checkEphemeral(clazz, impl2.body) - - if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { - if (!clazz.owner.isPackageClass) - context.error(clazz.pos, "inner classes cannot be classfile annotations") - // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. - // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement - // of constant argument values "for free". Related to scala/bug#7041. - else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, - """|subclassing Classfile does not - |make your annotation visible at runtime. If that is what - |you want, you must write the annotation class in Java.""".stripMargin) - } - - warnTypeParameterShadow(tparams1, clazz) - - if (!isPastTyper) { - for (ann <- clazz.getAnnotation(DeprecatedAttr)) { - val m = companionSymbolOf(clazz, context) - if (m != NoSymbol) - m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) - } - } - treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) - .setType(NoType) + currentRun.profiler.beforeTypedImplDef(clazz) + try { + val typedMods = typedModifiers(cdef.mods) + assert(clazz != NoSymbol, cdef) + reenterTypeParams(cdef.tparams) + val tparams1 = cdef.tparams mapConserve (typedTypeDef) + val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)).typedTemplate(cdef.impl, typedParentTypes(cdef.impl)) + val impl2 = finishMethodSynthesis(impl1, clazz, context) + if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) + checkEphemeral(clazz, impl2.body) + + if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { + if (!clazz.owner.isPackageClass) + context.error(clazz.pos, "inner classes cannot be classfile annotations") + // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. + // It only extends ClassfileAnnotationClass instead of StaticAnnotation to get the enforcement + // of constant argument values "for free". Related to scala/bug#7041. + else if (clazz != SerialVersionUIDAttr) restrictionWarning(cdef.pos, unit, + """|subclassing Classfile does not + |make your annotation visible at runtime. If that is what + |you want, you must write the annotation class in Java.""".stripMargin) + } + + warnTypeParameterShadow(tparams1, clazz) + + if (!isPastTyper) { + for (ann <- clazz.getAnnotation(DeprecatedAttr)) { + val m = companionSymbolOf(clazz, context) + if (m != NoSymbol) + m.moduleClass.addAnnotation(AnnotationInfo(ann.atp, ann.args, List())) + } + } + treeCopy.ClassDef(cdef, typedMods, cdef.name, tparams1, impl2) + .setType(NoType) + } finally { + currentRun.profiler.afterTypedImplDef(clazz) + } } def typedModuleDef(mdef: ModuleDef): Tree = { @@ -1875,31 +1880,37 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (linkedClass != NoSymbol) linkedClass.info.decl(nme.CONSTRUCTOR).alternatives foreach (_.initialize) - val clazz = mdef.symbol.moduleClass - val typedMods = typedModifiers(mdef.mods) - assert(clazz != NoSymbol, mdef) - val noSerializable = ( - (linkedClass eq NoSymbol) - || linkedClass.isErroneous - || !linkedClass.isSerializable - || clazz.isSerializable - ) - val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, { - typedParentTypes(mdef.impl) ++ ( - if (noSerializable) Nil - else { - clazz.makeSerializable() - TypeTree(SerializableTpe).setPos(clazz.pos.focus) :: Nil - } - ) - }) + val clazz = mdef.symbol.moduleClass + currentRun.profiler.beforeTypedImplDef(clazz) + try { - val impl2 = finishMethodSynthesis(impl1, clazz, context) + val typedMods = typedModifiers(mdef.mods) + assert(clazz != NoSymbol, mdef) + val noSerializable = ( + (linkedClass eq NoSymbol) + || linkedClass.isErroneous + || !linkedClass.isSerializable + || clazz.isSerializable + ) + val impl1 = newTyper(context.make(mdef.impl, clazz, newScope)).typedTemplate(mdef.impl, { + typedParentTypes(mdef.impl) ++ ( + if (noSerializable) Nil + else { + clazz.makeSerializable() + TypeTree(SerializableTpe).setPos(clazz.pos.focus) :: Nil + } + ) + }) - if (settings.isScala211 && mdef.symbol == PredefModule) - ensurePredefParentsAreInSameSourceFile(impl2) + val impl2 = finishMethodSynthesis(impl1, clazz, context) - treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType + if (settings.isScala211 && mdef.symbol == PredefModule) + ensurePredefParentsAreInSameSourceFile(impl2) + + treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType + } finally { + currentRun.profiler.afterTypedImplDef(clazz) + } } private def ensurePredefParentsAreInSameSourceFile(template: Template) = { @@ -2047,13 +2058,18 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def typedValDef(vdef: ValDef): ValDef = { val sym = vdef.symbol - val valDefTyper = { - val maybeConstrCtx = - if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext - else context - newTyper(maybeConstrCtx.makeNewScope(vdef, sym)) + currentRun.profiler.beforeTypedImplDef(sym) + try { + val valDefTyper = { + val maybeConstrCtx = + if ((sym.isParameter || sym.isEarlyInitialized) && sym.owner.isConstructor) context.makeConstructorContext + else context + newTyper(maybeConstrCtx.makeNewScope(vdef, sym)) + } + valDefTyper.typedValDefImpl(vdef) + } finally { + currentRun.profiler.afterTypedImplDef(sym) } - valDefTyper.typedValDefImpl(vdef) } // use typedValDef instead. this version is called after creating a new context for the ValDef @@ -2268,89 +2284,92 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } def typedDefDef(ddef: DefDef): DefDef = { - // an accessor's type completer may mutate a type inside `ddef` (`== context.unit.synthetics(ddef.symbol)`) - // concretely: it sets the setter's parameter type or the getter's return type (when derived from a valdef with empty tpt) val meth = ddef.symbol.initialize + currentRun.profiler.beforeTypedImplDef(meth) + try { - reenterTypeParams(ddef.tparams) - reenterValueParams(ddef.vparamss) + reenterTypeParams(ddef.tparams) + reenterValueParams(ddef.vparamss) - // for `val` and `var` parameter, look at `target` meta-annotation - if (!isPastTyper && meth.isPrimaryConstructor) { - for (vparams <- ddef.vparamss; vd <- vparams) { - if (vd.mods.isParamAccessor) { - vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + // for `val` and `var` parameter, look at `target` meta-annotation + if (!isPastTyper && meth.isPrimaryConstructor) { + for (vparams <- ddef.vparamss; vd <- vparams) { + if (vd.mods.isParamAccessor) { + vd.symbol setAnnotations (vd.symbol.annotations filter AnnotationInfo.mkFilter(ParamTargetClass, defaultRetention = true)) + } } } - } - val tparams1 = ddef.tparams mapConserve typedTypeDef - val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) + val tparams1 = ddef.tparams mapConserve typedTypeDef + val vparamss1 = ddef.vparamss mapConserve (_ mapConserve typedValDef) - warnTypeParameterShadow(tparams1, meth) + warnTypeParameterShadow(tparams1, meth) - meth.annotations.map(_.completeInfo()) + meth.annotations.map(_.completeInfo()) - for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) - if (isRepeatedParamType(vparam1.symbol.tpe)) - StarParamNotLastError(vparam1) + for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) + if (isRepeatedParamType(vparam1.symbol.tpe)) + StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) - checkNonCyclic(ddef, tpt1) - ddef.tpt.setType(tpt1.tpe) - val typedMods = typedModifiers(ddef.mods) - var rhs1 = - if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors - if (!meth.isPrimaryConstructor && + val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) + checkNonCyclic(ddef, tpt1) + ddef.tpt.setType(tpt1.tpe) + val typedMods = typedModifiers(ddef.mods) + var rhs1 = + if (ddef.name == nme.CONSTRUCTOR && !ddef.symbol.hasStaticFlag) { // need this to make it possible to generate static ctors + if (!meth.isPrimaryConstructor && (!meth.owner.isClass || - meth.owner.isModuleClass || - meth.owner.isAnonOrRefinementClass)) - InvalidConstructorDefError(ddef) - typed(ddef.rhs) - } else if (meth.isMacro) { - // typechecking macro bodies is sort of unconventional - // that's why we employ our custom typing scheme orchestrated outside of the typer - transformedOr(ddef.rhs, typedMacroBody(this, ddef)) - } else { - transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) - } + meth.owner.isModuleClass || + meth.owner.isAnonOrRefinementClass)) + InvalidConstructorDefError(ddef) + typed(ddef.rhs) + } else if (meth.isMacro) { + // typechecking macro bodies is sort of unconventional + // that's why we employ our custom typing scheme orchestrated outside of the typer + transformedOr(ddef.rhs, typedMacroBody(this, ddef)) + } else { + transformedOrTyped(ddef.rhs, EXPRmode, tpt1.tpe) + } - if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { - // There are no supercalls for AnyVal or constructors from Java sources, which + if (meth.isClassConstructor && !isPastTyper && !meth.owner.isSubClass(AnyValClass) && !meth.isJava) { + // There are no supercalls for AnyVal or constructors from Java sources, which // would blow up in analyzeSuperConsructor; there's nothing to be computed for them - // anyway. - if (meth.isPrimaryConstructor) + // anyway. + if (meth.isPrimaryConstructor) analyzeSuperConsructor(meth, vparamss1, rhs1) - else - checkSelfConstructorArgs(ddef, meth.owner) - } + else + checkSelfConstructorArgs(ddef, meth.owner) + } - if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) - rhs1 = checkDead(context, rhs1) + if (tpt1.tpe.typeSymbol != NothingClass && !context.returnsSeen && rhs1.tpe.typeSymbol != NothingClass) + rhs1 = checkDead(context, rhs1) - if (!isPastTyper && meth.owner.isClass && + if (!isPastTyper && meth.owner.isClass && meth.paramss.exists(ps => ps.exists(_.hasDefault) && isRepeatedParamType(ps.last.tpe))) - StarWithDefaultError(meth) - - if (!isPastTyper) { - val allParams = meth.paramss.flatten - for (p <- allParams) { - for (n <- p.deprecatedParamName) { - if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) - DeprecatedParamNameError(p, n) + StarWithDefaultError(meth) + + if (!isPastTyper) { + val allParams = meth.paramss.flatten + for (p <- allParams) { + for (n <- p.deprecatedParamName) { + if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) + DeprecatedParamNameError(p, n) + } } - } - if (meth.isStructuralRefinementMember) - checkMethodStructuralCompatible(ddef) + if (meth.isStructuralRefinementMember) + checkMethodStructuralCompatible(ddef) - if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { - case List(param) :: _ if !param.isImplicit => - checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) - case _ => + if (meth.isImplicit && !meth.isSynthetic) meth.info.paramss match { + case List(param) :: _ if !param.isImplicit => + checkFeature(ddef.pos, currentRun.runDefinitions.ImplicitConversionsFeature, meth.toString) + case _ => + } } - } - treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType + treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType + } finally { + currentRun.profiler.afterTypedImplDef(meth) + } } def typedTypeDef(tdef: TypeDef): TypeDef = diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 99fd5edd7ac7..6b24d90bd489 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -21,6 +21,7 @@ import java.util.concurrent.TimeUnit import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.{TreeGen => InternalTreeGen} +import scala.reflect.io.AbstractFile abstract class SymbolTable extends macros.Universe with Collections @@ -493,6 +494,9 @@ abstract class SymbolTable extends macros.Universe * Adds the `sm` String interpolator to a [[scala.StringContext]]. */ implicit val StringContextStripMarginOps: StringContext => StringContextStripMarginOps = util.StringContextStripMarginOps + + protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () + protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = () } trait SymbolTableStats { diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala new file mode 100644 index 000000000000..69da5d5982c9 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -0,0 +1,189 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.reflect.internal.util + +import java.io.Closeable +import java.lang.management.ManagementFactory +import java.nio.file.{Files, Path} +import java.util +import java.util.concurrent.TimeUnit + +import scala.collection.mutable + +object ChromeTrace { + + private object EventType { + final val Start = "B" + final val Instant = "I" + final val End = "E" + final val Complete = "X" + + final val Counter = "C" + + final val AsyncStart = "b" + final val AsyncInstant = "n" + final val AsyncEnd = "e" + } + +} + +/** Allows writing a subset of of https://docs.google.com/document/d/1CvAClvFfyA5R-PhYUmn5OOQtYMH4h6I0nSsKchNAySU/preview# + * for use in Chrome's about://tracing or the tooling in https://www.google.com.au/search?q=catapult+tracing&oq=catapult+tracing+&aqs=chrome..69i57.3974j0j4&sourceid=chrome&ie=UTF-8 */ +final class ChromeTrace(f: Path) extends Closeable { + import ChromeTrace.EventType + private val traceWriter = FileUtils.newAsyncBufferedWriter(f) + private val context = mutable.ArrayStack[JsonContext](TopContext) + private val tidCache = new ThreadLocal[String]() { + override def initialValue(): String = Thread.currentThread().getId.formatted("%05d") + } + objStart() + fld("traceEvents") + context.push(ValueContext) + arrStart() + traceWriter.newLine() + + private val pid = ManagementFactory.getRuntimeMXBean().getName().replaceAll("@.*", "") + + override def close(): Unit = { + arrEnd() + objEnd() + context.pop() + tidCache.remove() + traceWriter.close() + } + + def traceDurationEvent(name: String, startNanos: Long, durationNanos: Long, tid: String = this.tid(), pidSuffix: String = ""): Unit = { + val durationMicros = nanosToMicros(durationNanos) + val startMicros = nanosToMicros(startNanos) + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Complete) + str("tid", tid) + writePid(pidSuffix) + lng("ts", startMicros) + lng("dur", durationMicros) + objEnd() + traceWriter.newLine() + } + + private def writePid(pidSuffix: String) = { + if (pidSuffix == "") + str("pid", pid) + else + str2("pid", pid, "-", pidSuffix) + } + + def traceCounterEvent(name: String, counterName: String, count: Long, processWide: Boolean): Unit = { + objStart() + str("cat", "scalac") + str("name", name) + str("ph", EventType.Counter) + str("tid", tid()) + writePid(pidSuffix = if (processWide) "" else tid()) + lng("ts", microTime()) + fld("args") + objStart() + lng(counterName, count) + objEnd() + objEnd() + traceWriter.newLine() + } + + def traceDurationEventStart(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.Start, cat, name, colour, pidSuffix) + def traceDurationEventEnd(cat: String, name: String, colour: String = "", pidSuffix: String = tid()): Unit = traceDurationEventStartEnd(EventType.End, cat, name, colour, pidSuffix) + + private def traceDurationEventStartEnd(eventType: String, cat: String, name: String, colour: String, pidSuffix: String = ""): Unit = { + objStart() + str("cat", cat) + str("name", name) + str("ph", eventType) + writePid(pidSuffix) + str("tid", tid()) + lng("ts", microTime()) + if (colour != "") { + str("cname", colour) + } + objEnd() + traceWriter.newLine() + } + + private def tid(): String = tidCache.get() + + private def nanosToMicros(t: Long): Long = TimeUnit.NANOSECONDS.toMicros(t) + + private def microTime(): Long = nanosToMicros(System.nanoTime()) + + sealed abstract class JsonContext + case class ArrayContext(var first: Boolean) extends JsonContext + case class ObjectContext(var first: Boolean) extends JsonContext + case object ValueContext extends JsonContext + case object TopContext extends JsonContext + + private def str(name: String, value: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def str2(name: String, value: String, valueContinued1: String, valueContinued2: String): Unit = { + fld(name) + traceWriter.write("\"") + traceWriter.write(value) // This assumes no escaping is needed + traceWriter.write(valueContinued1) // This assumes no escaping is needed + traceWriter.write(valueContinued2) // This assumes no escaping is needed + traceWriter.write("\"") + } + private def lng(name: String, value: Long): Unit = { + fld(name) + traceWriter.write(String.valueOf(value)) + traceWriter.write("") + } + private def objStart(): Unit = { + context.top match { + case ac @ ArrayContext(first) => + if (first) ac.first = false + else traceWriter.write(",") + case _ => + } + context.push(ObjectContext(true)) + traceWriter.write("{") + } + private def objEnd(): Unit = { + traceWriter.write("}") + context.pop() + } + private def arrStart(): Unit = { + traceWriter.write("[") + context.push(ArrayContext(true)) + } + private def arrEnd(): Unit = { + traceWriter.write("]") + context.pop() + } + + private def fld(name: String) = { + val topContext = context.top + topContext match { + case oc @ ObjectContext(first) => + if (first) oc.first = false + else traceWriter.write(",") + case context => + throw new IllegalStateException("Wrong context: " + context) + } + traceWriter.write("\"") + traceWriter.write(name) + traceWriter.write("\"") + traceWriter.write(":") + } +} diff --git a/src/reflect/scala/reflect/internal/util/FileUtils.scala b/src/reflect/scala/reflect/internal/util/FileUtils.scala new file mode 100644 index 000000000000..ef5955775648 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/FileUtils.scala @@ -0,0 +1,199 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.reflect.internal.util + +import java.io.{BufferedWriter, IOException, OutputStreamWriter, Writer} +import java.nio.CharBuffer +import java.nio.charset.{Charset, CharsetEncoder, StandardCharsets} +import java.nio.file.{Files, OpenOption, Path} +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.atomic.AtomicBoolean + + +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Promise} +import scala.util.{Failure, Success} + +object FileUtils { + def newAsyncBufferedWriter(path: Path, charset: Charset = StandardCharsets.UTF_8, options: Array[OpenOption] = NO_OPTIONS, threadsafe: Boolean = false): LineWriter = { + val encoder: CharsetEncoder = charset.newEncoder + val writer = new OutputStreamWriter(Files.newOutputStream(path, options: _*), encoder) + newAsyncBufferedWriter(new BufferedWriter(writer), threadsafe) + } + def newAsyncBufferedWriter(underlying: Writer, threadsafe: Boolean): LineWriter = { + val async = new AsyncBufferedWriter(underlying) + if (threadsafe) new ThreadsafeWriter(async) else async + } + private val NO_OPTIONS = new Array[OpenOption](0) + + sealed abstract class LineWriter extends Writer { + def newLine(): Unit + } + private class ThreadsafeWriter(val underlying: AsyncBufferedWriter) extends LineWriter { + lock = underlying + override def write(c: Int): Unit = + lock.synchronized (underlying.write(c)) + + override def write(cbuf: Array[Char]): Unit = + lock.synchronized (underlying.write(cbuf)) + + override def write(cbuf: Array[Char], off: Int, len: Int): Unit = + lock.synchronized (underlying.write(cbuf, off, len)) + + override def write(str: String): Unit = + lock.synchronized (underlying.write(str)) + + override def write(str: String, off: Int, len: Int): Unit = + lock.synchronized (underlying.write(str, off, len)) + + override def flush(): Unit = + lock.synchronized (underlying.flush()) + + override def close(): Unit = + lock.synchronized (underlying.close()) + + override def newLine(): Unit = + lock.synchronized (underlying.newLine()) + + } + + private object AsyncBufferedWriter { + private val Close = CharBuffer.allocate(0) + private val Flush = CharBuffer.allocate(0) + } + private class AsyncBufferedWriter(val underlying: Writer, bufferSize : Int = 4096) extends LineWriter { + private var current: CharBuffer = allocate + override def write(c: Int): Unit = super.write(c) + private def flushAsync(): Unit = { + background.ensureProcessed(current) + current = allocate + } +// allocate or reuse a CharArray which is guaranteed to have a backing array + private def allocate: CharBuffer = { + val reused = background.reuseBuffer + if (reused eq null) CharBuffer.allocate(bufferSize) + else { + //we don't care about race conditions + background.reuseBuffer = null + reused.clear() + reused + } + } + + override def write(cbuf: Array[Char], initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(cbuf, offset, length) + length = 0 + } else { + current.put(cbuf, offset, capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + override def write(s: String, initialOffset: Int, initialLength: Int): Unit = { + var offset = initialOffset + var length = initialLength + while (length > 0) { + val capacity = current.remaining() + if (length <= capacity) { + current.put(s, offset, offset + length) + length = 0 + } else { + current.put(s, offset, offset + capacity) + flushAsync() + length -= capacity + offset += capacity + } + } + } + + def newLine(): Unit = write(scala.util.Properties.lineSeparator) + + /** slightly breaks the flush contract in that the flush is not complete when the method returns */ + override def flush(): Unit = { + flushAsync() + } + + override def close(): Unit = { + background.ensureProcessed(current) + background.ensureProcessed(AsyncBufferedWriter.Close) + current = null + Await.result(background.asyncStatus.future, Duration.Inf) + underlying.close() + } + private object background extends Runnable{ + + import scala.concurrent.ExecutionContext.Implicits.global + + private val pending = new LinkedBlockingQueue[CharBuffer] + //a failure detected will case an Failure, Success indicates a close + val asyncStatus = Promise[Unit]() + private val scheduled = new AtomicBoolean + @volatile var reuseBuffer: CharBuffer = _ + + def ensureProcessed(buffer: CharBuffer): Unit = { + if (asyncStatus.isCompleted) { + asyncStatus.future.value.get match { + case Success(()) => throw new IllegalStateException("closed") + case Failure(t) => throw new IOException("async failure", t) + } + } + + //order is essential - add to the queue before the CAS + pending.add(buffer) + if (scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + + def run(): Unit = { + try { + while (!pending.isEmpty) { + val next = pending.poll() + if (next eq AsyncBufferedWriter.Flush) { + underlying.flush() + } else if (next eq AsyncBufferedWriter.Close) { + underlying.flush() + underlying.close() + asyncStatus.trySuccess(()) + } else { + val array = next.array() + next.flip() + underlying.write(array, next.arrayOffset() + next.position(), next.limit()) + reuseBuffer = next + } + } + } catch { + case t: Throwable => + asyncStatus.tryFailure(t) + throw t + } + finally scheduled.set(false) + + //we are not scheduled any more + //as a last check ensure that we didnt race with an addition to the queue + //order is essential - queue is checked before CAS + if ((!pending.isEmpty) && scheduled.compareAndSet(false, true)) { + global.execute(background) + } + } + } + } +} diff --git a/test/junit/scala/reflect/internal/util/FileUtilsTest.scala b/test/junit/scala/reflect/internal/util/FileUtilsTest.scala new file mode 100644 index 000000000000..21eba42985b9 --- /dev/null +++ b/test/junit/scala/reflect/internal/util/FileUtilsTest.scala @@ -0,0 +1,89 @@ +package scala.reflect.internal.util + +import java.io._ + +import org.junit.Assert._ +import org.junit._ + +class FileUtilsTest { + + @Test def writeIsSame(): Unit = { + val fileTest = File.createTempFile("FileUtilsTest", "t1") + val fileExpected = File.createTempFile("FileUtilsTest", "t2") + + val sTest = FileUtils.newAsyncBufferedWriter(new FileWriter(fileTest), false) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + def writeBoth(s:String, asChars: Boolean) = { + if (asChars) { + sTest.write(s.toCharArray) + sExpected.write(s.toCharArray) + } else { + sTest.write(s) + sExpected.write(s) + } + } + + for (i <- 1 to 2000) { + writeBoth(s"line $i text;", true) + writeBoth(s"line $i chars", false) + sTest.newLine + sExpected.newLine + } + sTest.close() + sExpected.close() + + assertEquals(fileExpected.length(),fileTest.length()) + + val expIn = new BufferedReader(new FileReader(fileExpected)) + val testIn = new BufferedReader(new FileReader(fileTest)) + + var exp = expIn.readLine() + while (exp ne null) { + val actual = testIn.readLine() + assertEquals(exp, actual) + exp = expIn.readLine() + } + expIn.close() + testIn.close() + fileTest.delete() + fileExpected.delete() + } + + @Test def showPerformance: Unit = { + //warmup + for (i <- 1 to 1000) { + writeIsSame() + } + + val fileTest = File.createTempFile("FileUtilsTest", "t1") + val fileExpected = File.createTempFile("FileUtilsTest", "t2") + + for (i <- 1 to 10) { + val sTest = FileUtils.newAsyncBufferedWriter(fileTest.toPath) + val sExpected = new BufferedWriter(new FileWriter(fileExpected)) + + val t1 = System.nanoTime() + List.tabulate(10000) {i => + sTest.write(s"line $i text;") + sTest.newLine + } + val t2 = System.nanoTime() + sTest.close() + val t3 = System.nanoTime() + List.tabulate(10000) {i => + sExpected.write(s"line $i text;") + sExpected.newLine + } + val t4 = System.nanoTime() + sExpected.close() + + println(s"async took ${t2 - t1} ns") + println(s"buffered took ${t4 - t3} ns") + + fileTest.delete() + fileExpected.delete() + } + } + +} From d6de6d4705eddde2cad89c1dba297c1f0471d668 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 15 Oct 2018 16:59:05 +0200 Subject: [PATCH 1557/2793] TypeMap more conservative mapping over TypeVar This is one way to fix scala/bug#10911, but only incidentally. Regardless, we should avoid allocating a new TypeVar if not needed. Adds a test originally provided by NirvanaNrv in #7057, who diagnosed the problem and proposed a fix (though a bit too ambitious for 2.12). --- src/reflect/scala/reflect/internal/tpe/TypeMaps.scala | 7 ++++++- test/files/pos/t10911.scala | 11 +++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t10911.scala diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 3f4449a0bc50..cfdc85b985fd 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -182,7 +182,12 @@ private[internal] trait TypeMaps { else AntiPolyType(pre1, args1) case tv@TypeVar(_, constr) => if (constr.instValid) this(constr.inst) - else tv.applyArgs(mapOverArgs(tv.typeArgs, tv.params)) //@M !args.isEmpty implies !typeParams.isEmpty + else { + val args = tv.typeArgs + val args1 = mapOverArgs(args, tv.params) //@M !args.isEmpty implies !typeParams.isEmpty + if (args1 eq args) tv + else tv.applyArgs(args1) + } case AnnotatedType(annots, atp) => val annots1 = mapOverAnnotations(annots) val atp1 = this(atp) diff --git a/test/files/pos/t10911.scala b/test/files/pos/t10911.scala new file mode 100644 index 000000000000..72f4bedfaf43 --- /dev/null +++ b/test/files/pos/t10911.scala @@ -0,0 +1,11 @@ +object Test { + trait Super[X] + trait Template[T] { + type Repr + trait Sub extends Super[Repr] + } + + // create a compound type that has a type variable in the decls of one of its parents + implicit def reprTSub[T, Rpr[X]]: (Template[T]{type Repr = Rpr[T]})#Sub = ??? + implicitly[Super[Any]] // bug is not really related to implicit search, but is hard to trigger without +} \ No newline at end of file From 5c90826c421d347521f3b8982592ec0689b1ffdd Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 30 Oct 2018 15:36:32 +0100 Subject: [PATCH 1558/2793] Ensure termination of BTS of RefinedType containing TypeVar Fix scala/bug#10911 by really making sure the refined type's parents no longer contain `TypeVar`s before recursing. Before, we missed `AppliedTypeVar`s because they result in a new instance each time they are applied to new args (or, in the case of the bug, before the parent commit, when they were mapped over). --- .../scala/reflect/internal/Types.scala | 37 ++++++++++--------- 1 file changed, 19 insertions(+), 18 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a4d8b5028dda..2fc28412029b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1474,32 +1474,33 @@ trait Types if (period != currentPeriod) { tpe.baseTypeSeqPeriod = currentPeriod if (!isValidForBaseClasses(period)) { + // If the BTS contains TypeVars, replace those with typerefs to the original type params before taking BTS, + // after BTS, map them back. + // TODO: rework BTS to deal with TypeVars in the same way on the fly if (tpe.parents exists typeContainsTypeVar) { - // rename type vars to fresh type params, take base type sequence of - // resulting type, and rename back all the entries in that sequence - var tvs = Set[TypeVar]() - for (p <- tpe.parents) - for (t <- p) t match { - case tv: TypeVar => tvs += tv - case _ => - } - val varToParamMap: Map[Type, Symbol] = - mapFrom[TypeVar, Type, Symbol](tvs.toList)(_.origin.typeSymbol.cloneSymbol) - val paramToVarMap = varToParamMap map (_.swap) + val tvarFor = mutable.Map.empty[Type, TypeVar] + // After this TypeMap, it's safe to recurse (`tpe.parents exists typeContainsTypeVar` above is `false`) val varToParam = new TypeMap { - def apply(tp: Type) = varToParamMap get tp match { - case Some(sym) => sym.tpe_* + def apply(tp: Type) = tp match { + case tv: TypeVar => // Applying a type constructor variable to arguments results in a new instance of AppliedTypeVar each time + val toOrigin = appliedType(tv.origin.typeSymbol.typeConstructor, tv.typeArgs.mapConserve(this)) + tvarFor(toOrigin) = tv + toOrigin case _ => mapOver(tp) } } + // computes tvarFor + val tpWithoutTypeVars = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls) + val paramToVar = new TypeMap { - def apply(tp: Type) = tp match { - case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym) - case _ => mapOver(tp) + val paramToVarMap = tvarFor.toMap // capture the map so we can undo the rewrite when the BTS is queried later + def apply(tp: Type): Type = tp match { + case tr: TypeRef => paramToVarMap.getOrElse(tr, mapOver(tp)) + case _ => mapOver(tp) } } - val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq - tpe.baseTypeSeqCache = bts lateMap paramToVar + + tpe.baseTypeSeqCache = tpWithoutTypeVars.baseTypeSeq lateMap paramToVar } else { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(compoundBaseTypeSeqCount) val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseTypeSeqNanos) else null From 697701cb38aa0e8851df944d162409d238b58e2a Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 30 Aug 2018 12:03:32 -0400 Subject: [PATCH 1559/2793] Don't evaluate devWarning conditions unless under -Xdev The shape of `devWarningIf` is such that the inliner produces bytecode similar to ```scala if (global.isDeveloper) { if (condition) { devWarning(() => msg) } } ``` so that the closure elimination and the condition evaluation are both guarded by the `isDeveloper` check. (This does mean that the bytecode for the condition gets emitted into the containing method.) Several of these `devWarning` conditions were based on list traversals or other non-constant-time conditions, so this should speed that up a bit. --- .../nsc/symtab/classfile/ClassfileParser.scala | 16 ++++++++-------- .../scala/tools/nsc/transform/UnCurry.scala | 5 +++-- .../nsc/transform/patmat/PatternMatching.scala | 7 ++++--- .../scala/tools/nsc/typechecker/Checkable.scala | 7 ++++--- .../scala/reflect/internal/BaseTypeSeqs.scala | 5 +++-- .../scala/reflect/internal/SymbolTable.scala | 4 ++++ src/reflect/scala/reflect/internal/Types.scala | 5 +++-- .../scala/reflect/internal/tpe/TypeMaps.scala | 6 +++--- 8 files changed, 32 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 81f8dfe44543..935a100effe8 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1107,12 +1107,11 @@ abstract class ClassfileParser { param.resetFlag(SYNTHETIC) param.name = name } - if (isDeveloper && !sameLength(paramNames.toList, params)) { + devWarningIf(!sameLength(paramNames.toList, params)) { // there's not anything we can do, but it's slightly worrisome - devWarning( - sm"""MethodParameters length mismatch while parsing $sym: - | rawInfo.params: ${sym.rawInfo.params} - | MethodParameters: ${paramNames.toList}""") + sm"""MethodParameters length mismatch while parsing $sym: + | rawInfo.params: ${sym.rawInfo.params} + | MethodParameters: ${paramNames.toList}""" } } @@ -1260,9 +1259,10 @@ abstract class ClassfileParser { def entries = inners.values def add(entry: InnerClassEntry): Unit = { - inners get entry.externalName foreach (existing => - devWarning(s"Overwriting inner class entry! Was $existing, now $entry") - ) + devWarningIf(inners contains entry.externalName) { + val existing = inners(entry.externalName) + s"Overwriting inner class entry! Was $existing, now $entry" + } inners(entry.externalName) = entry } def innerSymbol(externalName: Name): Symbol = this getEntry externalName match { diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 00d5a90a73b1..b1893487893b 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -616,8 +616,9 @@ abstract class UnCurry extends InfoTransform flatdd case tree: Try => - if (tree.catches exists (cd => !treeInfo.isCatchCase(cd))) - devWarning("VPM BUG - illegal try/catch " + tree.catches) + devWarningIf(tree.catches exists (!treeInfo.isCatchCase(_))) { + "VPM BUG - illegal try/catch " + tree.catches + } tree case Apply(Apply(fn, args), args1) => diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala index 01c742a3e6e0..74c7aa21a84e 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternMatching.scala @@ -217,7 +217,6 @@ trait Interface extends ast.TreeDSL { class Substitution(val from: List[Symbol], val to: List[Tree]) { import global.{Transformer, Ident, NoType, TypeTree, SingleType} - private val toIdents = to.forall(_.isInstanceOf[Ident]) private def typedStable(t: Tree) = typer.typed(t.shallowDuplicate, Mode.MonoQualifierModes | Mode.TYPEPATmode) lazy val toTypes: List[Type] = to map (tree => typedStable(tree).tpe) @@ -233,7 +232,9 @@ trait Interface extends ast.TreeDSL { tp match { case SingleType(_, sym) => if (from contains sym) { - if (!toIdents) global.devWarning(s"Unexpected substitution of non-Ident into TypeTree, subst= $this") + global.devWarningIf(to.exists(!_.isInstanceOf[Ident])) { + s"Unexpected substitution of non-Ident into TypeTree, subst= $this" + } result = true } case _ => @@ -277,7 +278,7 @@ trait Interface extends ast.TreeDSL { } } if (containsSym) { - if (toIdents) + if (to.forall(_.isInstanceOf[Ident])) tree.duplicate.substituteSymbols(from, to.map(_.symbol)) // scala/bug#7459 catches `case t => new t.Foo` else substIdentsForTrees.transform(tree) diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index bf90a267c730..3a3485e20ad7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -90,10 +90,11 @@ trait Checkable { bases foreach { bc => val tps1 = (from baseType bc).typeArgs val tps2 = (tvarType baseType bc).typeArgs - if (tps1.size != tps2.size) - devWarning(s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)") + devWarningIf(!sameLength(tps1, tps2)) { + s"Unequally sized type arg lists in propagateKnownTypes($from, $to): ($tps1, $tps2)" + } - (tps1, tps2).zipped foreach (_ =:= _) + foreach2(tps1, tps2)(_ =:= _) // Alternate, variance respecting formulation causes // neg/unchecked3.scala to fail (abstract types). TODO - // figure it out. It seems there is more work to do if I diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 74dc92927ca9..288f4e4ca1f7 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -78,8 +78,9 @@ trait BaseTypeSeqs { throw CyclicInheritance } else { def computeLazyType(rtp: RefinedType): Type = { - if (!isIntersectionTypeForLazyBaseType(rtp)) - devWarning("unexpected RefinedType in base type seq, lazy BTS elements should be created via intersectionTypeForLazyBaseType: " + rtp) + devWarningIf(!isIntersectionTypeForLazyBaseType(rtp)) { + "unexpected RefinedType in base type seq, lazy BTS elements should be created via intersectionTypeForLazyBaseType: " + rtp + } val variants = rtp.parents // can't assert decls.isEmpty; see t0764 //if (!decls.isEmpty) abort("computing closure of "+this+":"+this.isInstanceOf[RefinedType]+"/"+closureCache(j)) diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 99fd5edd7ac7..d3a3c7063d11 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -87,6 +87,10 @@ abstract class SymbolTable extends macros.Universe /** Override with final implementation for inlining. */ def debuglog(msg: => String): Unit = if (settings.debug) log(msg) + + /** dev-warns if dev-warning is enabled and `cond` is true; no-op otherwise */ + @inline final def devWarningIf(cond: => Boolean)(msg: => String): Unit = + if (isDeveloper && cond) devWarning(msg) def devWarning(msg: => String): Unit = if (isDeveloper) Console.err.println(msg) def throwableAsString(t: Throwable): String = "" + t def throwableAsString(t: Throwable, maxFrames: Int): String = t.getStackTrace take maxFrames mkString "\n at " diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a4d8b5028dda..8a50f182d023 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1763,8 +1763,9 @@ trait Types tp match { case tr @ TypeRef(_, sym, args) if args.nonEmpty => val tparams = tr.initializedTypeParams - if (settings.debug && !sameLength(tparams, args)) - devWarning(s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args") + devWarningIf(!sameLength(tparams, args)) { + s"Mismatched zip in computeRefs(): ${sym.info.typeParams}, $args" + } foreach2(tparams, args) { (tparam1, arg) => if (arg contains tparam) { diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 3f4449a0bc50..12c8537cea47 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -591,9 +591,9 @@ private[internal] trait TypeMaps { // @M! don't just replace the whole thing, might be followed by type application val result = appliedType(targ, lhsArgs mapConserve this) def msg = s"Created $result, though could not find ${own_s(lhsSym)} among tparams of ${own_s(rhsSym)}" - if (!rhsSym.typeParams.contains(lhsSym)) - devWarning(s"Inconsistent tparam/owner views: had to fall back on names\n$msg\n$explain") - + devWarningIf(!rhsSym.typeParams.contains(lhsSym)) { + s"Inconsistent tparam/owner views: had to fall back on names\n$msg\n$explain" + } result } } From 0e36653901c553faa0efdda198c1ef67777e37b9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 5 Sep 2018 16:30:41 +1000 Subject: [PATCH 1560/2793] Simplify implementation of isDeveloper --- src/compiler/scala/tools/nsc/Global.scala | 2 -- .../mima-filters/2.12.0.backwards.excludes | 2 ++ .../scala/reflect/internal/Required.scala | 28 ------------------- .../scala/reflect/internal/SymbolTable.scala | 12 ++++++-- 4 files changed, 12 insertions(+), 32 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a115eac0927b..82f3e600ab8b 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -285,8 +285,6 @@ class Global(var currentSettings: Settings, reporter0: Reporter) body } - override def isDeveloper = settings.developer || super.isDeveloper - /** This is for WARNINGS which should reach the ears of scala developers * whenever they occur, but are not useful for normal users. They should * be precise, explanatory, and infrequent. Please don't use this as a diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index 6064fc88b800..ffa7f91a7eb8 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -12,3 +12,5 @@ ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.runtime.Sync ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") + +ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.runtime.JavaUniverse") diff --git a/src/reflect/scala/reflect/internal/Required.scala b/src/reflect/scala/reflect/internal/Required.scala index a22a11eaf453..e69de29bb2d1 100644 --- a/src/reflect/scala/reflect/internal/Required.scala +++ b/src/reflect/scala/reflect/internal/Required.scala @@ -1,28 +0,0 @@ -/* - * Scala (https://www.scala-lang.org) - * - * Copyright EPFL and Lightbend, Inc. - * - * Licensed under Apache License 2.0 - * (http://www.apache.org/licenses/LICENSE-2.0). - * - * See the NOTICE file distributed with this work for - * additional information regarding copyright ownership. - */ - -package scala -package reflect -package internal - -import settings.MutableSettings - -trait Required { self: SymbolTable => - def picklerPhase: Phase - - def erasurePhase: Phase - - def settings: MutableSettings - - @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false - @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false -} diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index d3a3c7063d11..47a3c32fff54 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -20,6 +20,7 @@ import util._ import java.util.concurrent.TimeUnit import scala.collection.mutable.ArrayBuffer +import scala.reflect.internal.settings.MutableSettings import scala.reflect.internal.{TreeGen => InternalTreeGen} abstract class SymbolTable extends macros.Universe @@ -46,7 +47,6 @@ abstract class SymbolTable extends macros.Universe with Positions with TypeDebugging with Importers - with Required with CapturedVariables with StdAttachments with StdCreators @@ -80,7 +80,15 @@ abstract class SymbolTable extends macros.Universe def shouldLogAtThisPhase = false def isPastTyper = false - def isDeveloper: Boolean = settings.debug + final def isDeveloper: Boolean = settings.debug.value || settings.developer.value + def picklerPhase: Phase + + def erasurePhase: Phase + + def settings: MutableSettings + + @deprecated("Interactive is implemented with a custom Global; this flag is ignored", "2.11.0") def forInteractive = false + @deprecated("Scaladoc is implemented with a custom Global; this flag is ignored", "2.11.0") def forScaladoc = false @deprecated("use devWarning if this is really a warning; otherwise use log", "2.11.0") def debugwarn(msg: => String): Unit = devWarning(msg) From 8d94ac63a7bcbfaf43a3be2434df48cdbd7e93ba Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Nov 2018 19:34:36 +1000 Subject: [PATCH 1561/2793] Make detection of polymorphic signature methods work with -release 8 --- src/reflect/scala/reflect/internal/Definitions.scala | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 05aebaf3ca1e..84f54bc0e1e8 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1587,10 +1587,18 @@ trait Definitions extends api.StandardDefinitions { lazy val PartialManifestClass = getTypeMember(ReflectPackage, tpnme.ClassManifest) lazy val ManifestSymbols = Set[Symbol](PartialManifestClass, FullManifestClass, OptManifestClass) private lazy val PolymorphicSignatureClass = MethodHandleClass.companionModule.info.decl(TypeName("PolymorphicSignature")) + private val PolymorphicSignatureName = TypeName("java.lang.invoke.MethodHandle$PolymorphicSignature") def isPolymorphicSignature(sym: Symbol) = sym != null && sym.isJavaDefined && { val owner = sym.safeOwner - (owner == MethodHandleClass || owner == VarHandleClass) && sym.hasAnnotation(PolymorphicSignatureClass) + (owner == MethodHandleClass || owner == VarHandleClass) && { + if (PolymorphicSignatureClass eq NoSymbol) { + // Hack to find the annotation under `scalac -release 8` on JDK 9+, in which the lookup of `PolymorphicSignatureClass` above fails + // We fall back to looking for a stub symbol with the expected flattened name. + sym.annotations.exists(_.atp.typeSymbolDirect.name == PolymorphicSignatureName) + } + else sym.hasAnnotation(PolymorphicSignatureClass) + } } lazy val Scala_Java8_CompatPackage = rootMirror.getPackageIfDefined("scala.runtime.java8") From 8afd2565210e3f2b121492d4e8c3c982d2a5cc06 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 1 Nov 2018 12:24:08 +0100 Subject: [PATCH 1562/2793] [nomerge] No static forwarders for bridges implementing abstract methods In 2.12.7, #7035 added the `bridge` flag to static forwarders that are generated for bridge methods. (2.13 geneartes no forwarders for bridges, but we wanted to stay binary compatible in 2.12.) Unfortunately the change caused even more bridges to be generated, namely for bridge methods that implement an abstract member. Now we exclude them again, which brings the binary interface back to the state of 2.12.6. Fixes scala/bug#11207 --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 5 +++- .../tools/nsc/backend/jvm/BytecodeTest.scala | 24 +++++++++++++------ 2 files changed, 21 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 48541b661cdf..f7ce5a1cca29 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -896,7 +896,10 @@ abstract class BCodeHelpers extends BCodeIdiomatic { debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") for (m <- moduleClass.info.membersBasedOnFlags(BCodeHelpers.ExcludedForwarderFlags, symtab.Flags.METHOD)) { - if (m.isType || m.isDeferred || (m.owner eq definitions.ObjectClass) || m.isConstructor) + // Fix for scala/bug#11207, see https://github.com/scala/scala/pull/7035/files#r226274350. This makes sure that 2.12.8 generates + // the same forwarder methods as in 2.12.6 (but includes bridge flags). In 2.13 we don't generate any forwarders for bridges. + val bridgeImplementingAbstract = m.isBridge && m.nextOverriddenSymbol.isDeferred + if (m.isType || m.isDeferred || bridgeImplementingAbstract || (m.owner eq definitions.ObjectClass) || m.isConstructor) debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass': ${m.isType} || ${m.isDeferred} || ${m.owner eq definitions.ObjectClass} || ${m.isConstructor}") else if (conflictingNames(m.name)) log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}") diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index dd433db1dc7a..879283de9db5 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -16,18 +16,28 @@ class BytecodeTest extends BytecodeTesting { import compiler._ @Test - def bridgeFlag(): Unit = { + def staticForwardersBridgeFlag(): Unit = { val code = - """ A { def f: Object = null } - |object B extends A { override def f: String = "b" } + """ A { + | def f: Object = null + | def g: Object + |} + |object B extends A { + | override def f: String = "b" // "bridge" forwarder + | def g: String = "b" // no "bridge" forwarder, as the overridden method is abstract, scala/bug#11207 + |} + |case class K(x: Int, s: String) """.stripMargin - for (base <- List("trait", "class")) { - val List(a, bMirror, bModule) = compileClasses(base + code) + for (base <- List("trait", "abstract class")) { + val List(a, bMirror, bModule, kClass, kModule) = compileClasses(base + code) assertEquals("B", bMirror.name) - assertEquals(List("f()Ljava/lang/Object;0x49", "f()Ljava/lang/String;0x9"), + assertEquals(List("f()Ljava/lang/Object;0x49", "f()Ljava/lang/String;0x9", "g()Ljava/lang/String;0x9"), bMirror.methods.asScala - .filter(_.name == "f") + .filter(m => m.name == "f" || m.name == "g") .map(m => m.name + m.desc + "0x" + Integer.toHexString(m.access)).toList.sorted) + assertEquals("K", kClass.name) + val List(app) = kClass.methods.asScala.filter(_.name == "apply").toList + assertEquals("apply(ILjava/lang/String;)LK;0x9", app.name + app.desc + "0x" + Integer.toHexString(app.access)) } } From ff094cd40a1f5aa7663c9bce86726bfdbb0ef312 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 5 Nov 2018 16:13:39 +0100 Subject: [PATCH 1563/2793] Upgrade jekyll for spec --- Gemfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Gemfile b/Gemfile index f91279b3e693..6fe508207eef 100644 --- a/Gemfile +++ b/Gemfile @@ -1,7 +1,7 @@ # To build the spec on Travis CI source "https://rubygems.org" -gem "jekyll", "3.3.0" +gem "jekyll", "3.6.3" gem "rouge" # gem 's3_website' gem "redcarpet", "3.3.2" From eda5d59b3f86ad8a72d65827c39b62d36b97959d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Jan 2018 23:05:54 +1000 Subject: [PATCH 1564/2793] Fix non-termination with java strictfp Also test that the Java parser doesn't force entry of new symbols when it parses modifiers that it translates into symbol annotations. Regressed in #7356 --- .../scala/tools/nsc/javac/JavaParsers.scala | 1 + test/files/jvm/strictfp/StrictFpJava.java | 5 +++++ test/files/presentation/parse-invariants.check | 7 +++++++ .../presentation/parse-invariants/Test.scala | 17 +++++++++++------ .../presentation/parse-invariants/src/a/A.java | 16 ++++++++++++++++ 5 files changed, 40 insertions(+), 6 deletions(-) create mode 100644 test/files/jvm/strictfp/StrictFpJava.java create mode 100644 test/files/presentation/parse-invariants/src/a/A.java diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index 08468cb505b9..d87fa7e8da81 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -395,6 +395,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { in.nextToken() case STRICTFP => addAnnot(ScalaStrictFPAttr) + in.nextToken() case SYNCHRONIZED => in.nextToken() case _ => diff --git a/test/files/jvm/strictfp/StrictFpJava.java b/test/files/jvm/strictfp/StrictFpJava.java new file mode 100644 index 000000000000..89e4e94ae528 --- /dev/null +++ b/test/files/jvm/strictfp/StrictFpJava.java @@ -0,0 +1,5 @@ +strictfp class StrictFpJava {} + +class StrictFpJavaMethod { + strictfp void test() {} +} diff --git a/test/files/presentation/parse-invariants.check b/test/files/presentation/parse-invariants.check index 32e9c846ab5c..961bc6df7938 100644 --- a/test/files/presentation/parse-invariants.check +++ b/test/files/presentation/parse-invariants.check @@ -1,3 +1,10 @@ +parseTree +NoNewSymbolsEntered OK +Unique OK +Unattributed OK +NeverModify OK +AlwaysParseTree OK +parseTree NoNewSymbolsEntered OK Unique OK Unattributed OK diff --git a/test/files/presentation/parse-invariants/Test.scala b/test/files/presentation/parse-invariants/Test.scala index 128896ccaae0..29b51a3f3fbe 100644 --- a/test/files/presentation/parse-invariants/Test.scala +++ b/test/files/presentation/parse-invariants/Test.scala @@ -5,12 +5,16 @@ import scala.tools.nsc.interactive.Response object Test extends InteractiveTest { override def execute(): Unit = { - val sf = sourceFiles.find(_.file.name == "A.scala").head - noNewSymbols(sf) - uniqueParseTree(sf) - unattributedParseTree(sf) - neverModifyParseTree(sf) - shouldAlwaysReturnParseTree(sf) + def test(fileName: String): Unit = { + val sf = sourceFiles.find(_.file.name == fileName).head + noNewSymbols(sf) + uniqueParseTree(sf) + unattributedParseTree(sf) + neverModifyParseTree(sf) + shouldAlwaysReturnParseTree(sf) + } + test("A.scala") + test("A.java") } /** @@ -19,6 +23,7 @@ object Test extends InteractiveTest { private def noNewSymbols(sf: SourceFile) { def nextId() = compiler.NoSymbol.newTermSymbol(compiler.TermName("dummy"), compiler.NoPosition, compiler.NoFlags).id val id = nextId() + println("parseTree") val tree = compiler.parseTree(sf) val id2 = nextId() if (id2 == id + 1) { diff --git a/test/files/presentation/parse-invariants/src/a/A.java b/test/files/presentation/parse-invariants/src/a/A.java new file mode 100644 index 000000000000..a04478149103 --- /dev/null +++ b/test/files/presentation/parse-invariants/src/a/A.java @@ -0,0 +1,16 @@ +package syntax; + +class A { + transient volatile int x; + strictfp void test() { + } + + native void nativeMethod() + + synchronized void syncMethod() {} + + void thrower() throws Throwable {} + +} + +strictfp class B {} \ No newline at end of file From f33d4e159ee034387195683f31448a48286b8c5f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 1 Nov 2018 10:05:02 +1000 Subject: [PATCH 1565/2793] Fix crasher regression with implicit classes and default params Since the changes to make the compiler output deterministic, default getter symbols must be entered eagerly before the trees are created. This happens in `enterDefDef`, but that method is bypassed when entering the synthetic symbol for an implicit class factory method. This commit enters the default getter symbols in this case, as well, avoiding a later crash. --- .../scala/tools/nsc/typechecker/MethodSynthesis.scala | 7 +++++++ .../implicit-class-implicit-param-with-default.check | 5 +++++ .../implicit-class-implicit-param-with-default.scala | 11 +++++++++++ 3 files changed, 23 insertions(+) create mode 100644 test/files/run/implicit-class-implicit-param-with-default.check create mode 100644 test/files/run/implicit-class-implicit-param-with-default.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 20535e89f413..898fce90cef3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -231,6 +231,13 @@ trait MethodSynthesis { val methDef = factoryMeth(classDef.mods & AccessFlags | METHOD | IMPLICIT | SYNTHETIC, classDef.name.toTermName, classDef) val methSym = enterInScope(assignMemberSymbol(methDef)) context.unit.synthetics(methSym) = methDef + + treeInfo.firstConstructor(classDef.impl.body) match { + case primaryConstructor: DefDef => + if (mexists(primaryConstructor.vparamss)(_.mods.hasDefault)) + enterDefaultGetters(methSym, primaryConstructor, primaryConstructor.vparamss, primaryConstructor.tparams) + case _ => + } methSym setInfo implicitFactoryMethodCompleter(methDef, classDef.symbol) } diff --git a/test/files/run/implicit-class-implicit-param-with-default.check b/test/files/run/implicit-class-implicit-param-with-default.check new file mode 100644 index 000000000000..f0ab6fd76b86 --- /dev/null +++ b/test/files/run/implicit-class-implicit-param-with-default.check @@ -0,0 +1,5 @@ +default +default +default +explicit +explicit diff --git a/test/files/run/implicit-class-implicit-param-with-default.scala b/test/files/run/implicit-class-implicit-param-with-default.scala new file mode 100644 index 000000000000..9c8919f529e8 --- /dev/null +++ b/test/files/run/implicit-class-implicit-param-with-default.scala @@ -0,0 +1,11 @@ +object Test { + implicit class C(self: String)(implicit val foo: String = "default") + + def main(args: Array[String]) { + println("".foo) + println(C("").foo) + println(new C("").foo) + println(C("")("explicit").foo) + println(new C("")("explicit").foo) + } +} From c1e6ed6110a27a2e45dc72f1b28b3413721026b6 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Fri, 9 Nov 2018 21:34:08 +0000 Subject: [PATCH 1566/2793] Finish re-licensing to Apache License 2.0 --- doc/License.rtf | 17 ++++++++++------- doc/README | 4 ++-- src/manual/scala/man1/Command.scala | 2 +- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/doc/License.rtf b/doc/License.rtf index 30e6912281d5..7099e57296ec 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -4,7 +4,7 @@ \margl1440\margr1440\vieww25140\viewh18960\viewkind0 \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural -\f0\fs26 \cf0 Scala is licensed under the {\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}.\ +\f0\fs26 \cf0 Scala is licensed under the {\field{\*\fldinst{HYPERLINK "https://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt Apache License Version 2.0}}.\ \ \fs48 Scala License @@ -14,12 +14,15 @@ Copyright (c) 2011-2018 Lightbend, Inc.\ All rights reserved.\ \ -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:\ - \'95 Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.\ - \'95 Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.\ - \'95 Neither the name of the EPFL nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.\ -\ -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \'93AS IS\'94 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\ +Licensed under the Apache License, Version 2.0 (the "License");\ +you may not use this file except in compliance with the License.\ +You may obtain a copy of the License at\ + http://www.apache.org/licenses/LICENSE-2.0\ + Unless required by applicable law or agreed to in writing, software\ +distributed under the License is distributed on an "AS IS" BASIS,\ +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\ +See the License for the specific language governing permissions and\ +limitations under the License.\ \fs52 \ diff --git a/doc/README b/doc/README index 81295ce5c7bd..3361044f73d4 100644 --- a/doc/README +++ b/doc/README @@ -30,7 +30,7 @@ environment variable. Licenses -------- -Scala is licensed under the standard 3-clause BSD license, +Scala is licensed under the Apache License 2.0, included in the distribution as the file `doc/LICENSE.md`. The licenses of the software included in the Scala distribution can -be found in the `doc/licenses` directory. \ No newline at end of file +be found in the `doc/licenses` directory. diff --git a/src/manual/scala/man1/Command.scala b/src/manual/scala/man1/Command.scala index 4f061d334691..bc622393d5b3 100644 --- a/src/manual/scala/man1/Command.scala +++ b/src/manual/scala/man1/Command.scala @@ -45,7 +45,7 @@ trait Command { def copyright = Section("COPYRIGHT", - "This is open-source software, available to you under a BSD-like license. " & + "This is open-source software, available to you under the Apache License 2.0. " & "See accompanying \"copyright\" or \"LICENSE\" file for copying conditions. " & "There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A " & "PARTICULAR PURPOSE.") From b3ab8605c64823849c212f2cf2f1be5a014cbfb6 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 12 Nov 2018 10:29:25 +0100 Subject: [PATCH 1567/2793] License.rtf parses I copy/pasted the html-rendered License.md into License.rtf using TextEdit on Mac. --- doc/License.rtf | 114 ++++++++++++++++++++++++++++++------------------ 1 file changed, 71 insertions(+), 43 deletions(-) diff --git a/doc/License.rtf b/doc/License.rtf index 7099e57296ec..3d0f81fa68ee 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -1,59 +1,87 @@ -{\rtf1\ansi\ansicpg1252\cocoartf1187\cocoasubrtf400 -{\fonttbl\f0\fswiss\fcharset0 Helvetica;} -{\colortbl;\red255\green255\blue255;} -\margl1440\margr1440\vieww25140\viewh18960\viewkind0 -\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural +{\rtf1\ansi\ansicpg1252\cocoartf1671 +{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;} +{\colortbl;\red255\green255\blue255;\red27\green31\blue34;\red10\green77\blue204;\red0\green0\blue0; +\red21\green23\blue26;} +{\*\expandedcolortbl;;\cssrgb\c14118\c16078\c18039;\cssrgb\c1176\c40000\c83922;\csgray\c0\c0; +\cssrgb\c10588\c12157\c13725\c4706;} +{\*\listtable{\list\listtemplateid1\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid1\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid1} +{\list\listtemplateid2\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid101\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid2} +{\list\listtemplateid3\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid201\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid3} +{\list\listtemplateid4\listhybrid{\listlevel\levelnfc23\levelnfcn23\leveljc0\leveljcn0\levelfollow0\levelstartat1\levelspace360\levelindent0{\*\levelmarker \{disc\}}{\leveltext\leveltemplateid301\'01\uc0\u8226 ;}{\levelnumbers;}\fi-360\li720\lin720 }{\listname ;}\listid4}} +{\*\listoverridetable{\listoverride\listid1\listoverridecount0\ls1}{\listoverride\listid2\listoverridecount0\ls2}{\listoverride\listid3\listoverridecount0\ls3}{\listoverride\listid4\listoverridecount0\ls4}} +\paperw11900\paperh16840\margl1440\margr1440\vieww17360\viewh22480\viewkind0 +\deftab720 +\pard\pardeftab720\sl360\sa320\partightenfactor0 -\f0\fs26 \cf0 Scala is licensed under the {\field{\*\fldinst{HYPERLINK "https://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt Apache License Version 2.0}}.\ -\ +\f0\fs28 \cf2 \expnd0\expndtw0\kerning0 +Scala is licensed under the\'a0{\field{\*\fldinst{HYPERLINK "https://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt \cf3 Apache License Version 2.0}}.\ +\pard\pardeftab720\sl360\partightenfactor0 -\fs48 Scala License -\fs40 \ +\f1\b \cf3 \ +\pard\pardeftab720\sl440\sa320\partightenfactor0 -\fs26 Copyright (c) 2002-2018 EPFL\ +\fs48 \cf2 Scala License\ +\pard\pardeftab720\sl360\sa320\partightenfactor0 + +\f0\b0\fs28 \cf2 Copyright (c) 2002-2018 EPFL\ Copyright (c) 2011-2018 Lightbend, Inc.\ All rights reserved.\ -\ -Licensed under the Apache License, Version 2.0 (the "License");\ -you may not use this file except in compliance with the License.\ -You may obtain a copy of the License at\ - http://www.apache.org/licenses/LICENSE-2.0\ - Unless required by applicable law or agreed to in writing, software\ -distributed under the License is distributed on an "AS IS" BASIS,\ -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\ -See the License for the specific language governing permissions and\ -limitations under the License.\ +\pard\pardeftab720\sl360\sa320\partightenfactor0 +\cf2 \cb4 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at {\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt http://www.apache.org/licenses/LICENSE-2.0}}.\ +Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.\ +\pard\pardeftab720\sl480\partightenfactor0 -\fs52 \ +\f1\b \cf3 \cb1 \ +\pard\pardeftab720\sl600\sa320\partightenfactor0 -\fs48 Other Licenses -\fs52 \ +\fs48 \cf2 Other Licenses\ +\pard\pardeftab720\sl360\sa320\partightenfactor0 -\fs26 This software includes projects with the following licenses, which are also included in the -\fs24 licenses/ -\fs26 directory:\ +\f0\b0\fs28 \cf2 This software includes projects with the following licenses, which are also included in the\'a0\cb5 licenses/\cb1 \'a0directory:\ +\pard\pardeftab720\sl300\partightenfactor0 -\fs30 \ -{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0.html"}}{\fldrslt Apache License}}\ +\f1\b \cf3 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 +{\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0.html"}}{\fldrslt \cf3 Apache License}}\cf2 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 -\fs26 This license is used by the following third-party libraries:\ - \'95 jansi\ +\f0\b0 \cf2 This license is used by the following third-party libraries:\ +\pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 +\ls1\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 +jansi\ +\pard\pardeftab720\sl300\partightenfactor0 -\fs30 \ -{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/bsd-license.php"}}{\fldrslt BSD License}}\ +\f1\b \cf3 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 +{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/bsd-license.php"}}{\fldrslt \cf3 BSD License}}\cf2 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 -\fs26 This license is used by the following third-party libraries:\ - \'95 jline\ +\f0\b0 \cf2 This license is used by the following third-party libraries:\ +\pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 +\ls2\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 +jline\ +\pard\pardeftab720\sl300\partightenfactor0 -\fs30 \ -{\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt BSD 3-Clause License}}\ +\f1\b \cf3 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 +{\field{\*\fldinst{HYPERLINK "http://opensource.org/licenses/BSD-3-Clause"}}{\fldrslt \cf3 BSD 3-Clause License}}\cf2 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 -\fs26 This license is used by the following third-party libraries:\ - \'95 asm\ +\f0\b0 \cf2 This license is used by the following third-party libraries:\ +\pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 +\ls3\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 +asm\ +\pard\pardeftab720\sl300\partightenfactor0 -\fs30 \ -{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/MIT"}}{\fldrslt MIT License}}\ +\f1\b \cf3 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 +{\field{\*\fldinst{HYPERLINK "http://www.opensource.org/licenses/MIT"}}{\fldrslt \cf3 MIT License}}\cf2 \ +\pard\pardeftab720\sl360\sa320\partightenfactor0 -\fs26 This license is used by the following third-party libraries:\ - \'95 jquery\ - \'95 tools tooltip\ +\f0\b0 \cf2 This license is used by the following third-party libraries:\ +\pard\tx220\tx720\pardeftab720\li720\fi-720\sl360\partightenfactor0 +\ls4\ilvl0\cf2 \kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 +jquery\ +\ls4\ilvl0\kerning1\expnd0\expndtw0 {\listtext \uc0\u8226 }\expnd0\expndtw0\kerning0 +tools tooltip\ +} From 5f83efe5070fa30b78867e23c5cb1058af349c26 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 13 Nov 2018 13:47:32 +0100 Subject: [PATCH 1568/2793] Extractor type may depend on (implicit) arguments Fix scala/bug#11162 See also scala/bug#6130 --- .../transform/patmat/PatternExpansion.scala | 16 ++++++++----- test/files/pos/t11162.scala | 23 +++++++++++++++++++ 2 files changed, 33 insertions(+), 6 deletions(-) create mode 100644 test/files/pos/t11162.scala diff --git a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala index cf484c7c8485..7b4501c2bf05 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/PatternExpansion.scala @@ -134,12 +134,16 @@ trait PatternExpansion { private def caseCtorParamTypes: Option[List[Type]] = if (isUnapply || isUnapplySeq) None else Some(fun.tpe.paramTypes) - // bug#6130 can't really say what the result type is without referring to the binder we're extracting, - // as an unapply's result type could depend on its argument, e.g. crazy stuff like `def unapply(x: T): Option[(x.T, x.U)]` - // NOTE: we skip a potential implicit method type here -- could this be another avenue of craziness where the result type depends on the input? - private def unapplyResultType(extractedBinder: Symbol = unapplySelector): Type = - if (extractedBinder == NoSymbol) fun.tpe.finalResultType - else fun.tpe.resultType(List(SingleType(NoPrefix, extractedBinder))).finalResultType + // scala/bug#6130 scala/bug#11162 unapply's result type may refer to the binder we're extracting, + // as well as implicit args. Example: `def unapply(x: T)(implicit ops: Foo): Option[(x.T, ops.U)]`. + // Existentially abstract over any unknown values to approximate the type. + private def unapplyResultType(extractedBinder: Symbol = unapplySelector): Type = { + val appliedToExtractedBinder = + if (extractedBinder != NoSymbol) fun.tpe.resultType(List(SingleType(NoPrefix, extractedBinder))) + else fun.tpe + + packSymbols(appliedToExtractedBinder.paramss.flatten, appliedToExtractedBinder.finalResultType) + } private def resultOfGetInMonad(arg: Symbol = unapplySelector) = elementTypeFromGet(unapplyResultType(arg)) diff --git a/test/files/pos/t11162.scala b/test/files/pos/t11162.scala new file mode 100644 index 000000000000..bedb4879abcc --- /dev/null +++ b/test/files/pos/t11162.scala @@ -0,0 +1,23 @@ +class Ops[X] { + type T +} + +object Meh { + // unapply result type depends on an implicit arg + def unapply[X](i: Int)(implicit ops: Ops[X]): Option[ops.T] = None +} + +class Test { + def foo[X](implicit oops: Ops[X]): Unit = { + /* error: error during expansion of this match (this is a scalac bug). + The underlying error was: type mismatch; + found : oops.T + required: ops.T + */ + def bar() = 1 match { + case Meh(z) => z + } + + bar() + } +} From ca4b94f6a9d4aad4dc670f8378022fdc6ebbc514 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 14 Nov 2018 14:51:04 +1000 Subject: [PATCH 1569/2793] Make access to current run during Run. safe for Global subclasses SBT's EvalGlobal overrides `currentRun` in a way that makes the initial null value visible, even after the part of `Run.` that assigns `Global.curRun`. --- src/compiler/scala/tools/nsc/Global.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index cbda492b0ed1..93fd46d01887 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1110,8 +1110,10 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def newJavaUnitParser(unit: CompilationUnit): JavaUnitParser = new JavaUnitParser(unit) - override protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.beforeCompletion(root, associatedFile) - override protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = currentRun.profiler.afterCompletion(root, associatedFile) + override protected[scala] def currentRunProfilerBeforeCompletion(root: Symbol, associatedFile: AbstractFile): Unit = + curRun.profiler.beforeCompletion(root, associatedFile) + override protected[scala] def currentRunProfilerAfterCompletion(root: Symbol, associatedFile: AbstractFile): Unit = + curRun.profiler.afterCompletion(root, associatedFile) /** A Run is a single execution of the compiler on a set of units. */ From 5e4d34aec806774f46a212d76f84837d02a9dc06 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Fri, 5 Oct 2018 09:07:11 +0200 Subject: [PATCH 1570/2793] [backport] Fix potential bugs in SpecializeTypes Using `contains` with unrelated types which always returns false. (cherry picked from commit 447bfb8e00be1cdbc8e819a470281e23784f9232) --- .../tools/nsc/transform/SpecializeTypes.scala | 27 ++++++++----------- 1 file changed, 11 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index bddaf1e8bdb3..10d733d04378 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -322,7 +322,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def isSpecializedIn(sym: Symbol, site: Type) = specializedTypeVars(sym) exists { tvar => val concretes = concreteTypes(tvar) - (concretes contains AnyRefClass) || (concretes contains site.memberType(tvar)) + (concretes contains AnyRefTpe) || (concretes contains site.memberType(tvar)) } @@ -416,7 +416,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { else specializedOn(sym).map(s => specializesClass(s).tpe).sorted - if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass)) + if (isBoundedGeneric(sym.tpe) && (types contains AnyRefTpe)) reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefTpe + ".") types @@ -987,23 +987,18 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { specMember } - if (sym.isMethod) { - if (hasUnspecializableAnnotation(sym)) { - List() - } else { - val stvars = specializedTypeVars(sym) - if (stvars.nonEmpty) - debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", "))) + if (!sym.isMethod || sym.isConstructor || hasUnspecializableAnnotation(sym)) { + Nil + } else { + val stvars = specializedTypeVars(sym) + if (stvars.nonEmpty) + debuglog("specialized %s on %s".format(sym.fullLocationString, stvars.map(_.name).mkString(", "))) - val tps1 = if (sym.isConstructor) tps filter (sym.info.paramTypes contains _) else tps - val tps2 = tps1 filter stvars - if (!sym.isDeferred) - addConcreteSpecMethod(sym) + if (!sym.isDeferred) + addConcreteSpecMethod(sym) - specializeOn(tps2) - } + specializeOn(tps filter stvars) } - else Nil } /** Return the specialized overload of `m`, in the given environment. */ From 954c5d32d71a43b141be546877b01183a994a1b2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 14 Nov 2018 16:41:09 +1000 Subject: [PATCH 1571/2793] Stabilize order of annotations in the class file Regressed in #6846, which added support for encoding repeated annotations. Test failure before replacing `groupBy` with `LinkedHashMap`: ``` $ sbt junit/testOnly scala.tools.nsc.DeterminismTest ... java.lang.AssertionError: assertion failed: Difference detected between recompiling List(b.scala, Annot1.java) Run: jardiff -r /var/folders/tz/p8vd07wn7wxck3b9v54grlzw0000gp/T/reference814657788418452571 /var/folders/tz/p8vd07wn7wxck3b9v54grlzw0000gp/T/recompileOutput4882243280168823330 $ jardiff -r /var/folders/tz/p8vd07wn7wxck3b9v54grlzw0000gp/T/reference814657788418452571 /var/folders/tz/p8vd07wn7wxck3b9v54grlzw0000gp/T/recompileOutput4882243280168823330 diff --git a/Test.class.asm b/Test.class.asm index 98bfd80..a056f9a 100644 --- a/Test.class.asm +++ b/Test.class.asm @@ -4,10 +4,10 @@ // compiled from: b.scala - @LAnnot2;(value=java.lang.Object.class) - @LAnnot1;(value="foo") + @LAnnot2;(value=java.lang.Object.class) + @Lscala/reflect/ScalaSignature;(bytes="\u0006\u0001u1AAA\u0002\u0001\r!)Q\u0002\u0001C\u0001\u001d\u0009!A+Z:u\u0015\u0005!\u0011a\u0002\u001ff[B$\u0018PP\u0002\u0001'\u0009\u0001q\u0001\u0005\u0002\u0009\u00175\u0009\u0011BC\u0001\u000b\u0003\u0015\u00198-\u00197b\u0013\u0009a\u0011B\u0001\u0004B]f\u0014VMZ\u0001\u0007y%t\u0017\u000e\u001e \u0015\u0003=\u0001\"\u0001\u0005\u0001\u000e\u0003\rAC\u0001\u0001\n\u0016-A\u0011\u0001cE\u0005\u0003)\r\u0011a!\u00118o_R\u0014\u0014!\u0002 groupRepeatableAnnotations(x._1, x._2.toList)).toList } // assumes non-empty `anns` diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index 8651f23dcf0f..fabd2eb9e87f 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -1,12 +1,16 @@ package scala.tools.nsc +import java.io.{File, OutputStreamWriter} +import java.nio.charset.Charset import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} import java.util +import javax.tools.ToolProvider import org.junit.Test -import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.collection.JavaConverters.{asScalaIteratorConverter, seqAsJavaListConverter} +import scala.collection.immutable import scala.language.implicitConversions import scala.reflect.internal.util.{BatchSourceFile, SourceFile} import scala.reflect.io.PlainNioFile @@ -187,6 +191,78 @@ class DeterminismTest { test(List(code)) } + @Test def testAnnotations1(): Unit = { + def code = List[SourceFile]( + source("a.scala", + """ + |class Annot1(s: String) extends scala.annotation.StaticAnnotation + |class Annot2(s: Class[_]) extends scala.annotation.StaticAnnotation + | + """.stripMargin), + source("b.scala", + """ + |@Annot1("foo") + |@Annot2(classOf[AnyRef]) + |class Test + """.stripMargin) + ) + test(List(code)) + } + + @Test def testAnnotationsJava(): Unit = { + def code = List[SourceFile]( + source("Annot1.java", + """ + |import java.lang.annotation.*; + |@Retention(RetentionPolicy.RUNTIME) + |@Target(ElementType.TYPE) + |@Inherited + |@interface Annot1 { String value() default ""; } + | + |@Retention(RetentionPolicy.RUNTIME) + |@Target(ElementType.TYPE) + |@Inherited + |@interface Annot2 { Class value(); } + | + """.stripMargin), + source("b.scala", + """ + |@Annot1("foo") @Annot2(classOf[AnyRef]) class Test + """.stripMargin) + ) + test(List(code)) + } + + @Test def testAnnotationsJavaRepeatable(): Unit = { + val javaAnnots = source("Annot1.java", + """ + |import java.lang.annotation.*; + |@Repeatable(Annot1.Container.class) + |@Retention(RetentionPolicy.RUNTIME) + |@Target(ElementType.TYPE) + |@interface Annot1 { String value() default ""; + | + | @Retention(RetentionPolicy.RUNTIME) + | @Target(ElementType.TYPE) + | public static @interface Container { + | Annot1[] value(); + | } + |} + | + |@Retention(RetentionPolicy.RUNTIME) + |@Target(ElementType.TYPE) + |@Inherited + |@interface Annot2 { Class value(); } + """.stripMargin) + def code = + List(source("dummy.scala", ""), source("b.scala", + """ + |@Annot1("foo") @Annot2(classOf[String]) @Annot1("bar") class Test + """.stripMargin) + ) + test(List(javaAnnots) :: code :: Nil) + } + def source(name: String, code: String): SourceFile = new BatchSourceFile(name, code) private def test(groups: List[List[SourceFile]]): Unit = { val referenceOutput = Files.createTempDirectory("reference") @@ -202,7 +278,22 @@ class DeterminismTest { val r = new Run // println("scalac " + files.mkString(" ")) r.compileSources(files) - assert(!storeReporter.hasErrors, storeReporter.infos.mkString("\n")) + Predef.assert(!storeReporter.hasErrors, storeReporter.infos.mkString("\n")) + files.filter(_.file.name.endsWith(".java")) match { + case Nil => + case javaSources => + def tempFileFor(s: SourceFile): Path = { + val f = output.resolve(s.file.name) + Files.write(f, new String(s.content).getBytes(Charset.defaultCharset())) + } + val options = List("-d", output.toString) + val javac = ToolProvider.getSystemJavaCompiler + val fileMan = javac.getStandardFileManager(null, null, null) + val javaFileObjects = fileMan.getJavaFileObjects(javaSources.map(s => tempFileFor(s).toAbsolutePath.toString): _*) + val task = javac.getTask(new OutputStreamWriter(System.out), fileMan, null, options.asJava, Nil.asJava, javaFileObjects) + val result = task.call() + Predef.assert(result) + } } for (group <- groups.init) { From 75ceb799682acf616516b79d6910abadef7951da Mon Sep 17 00:00:00 2001 From: Som Snytt Date: Sun, 25 Nov 2018 15:37:10 -0800 Subject: [PATCH 1572/2793] [no-merge] Iterator.flatMap clears reference Clear the reference to the previous iterator before producing the next, so that any references held by the old iterator become collectable. --- src/library/scala/collection/Iterator.scala | 2 +- test/files/run/t11272.javaopts | 1 + test/files/run/t11272.scala | 12 ++++++++ .../junit/scala/collection/IteratorTest.scala | 28 +++++++++++++++++++ 4 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t11272.javaopts create mode 100644 test/files/run/t11272.scala diff --git a/src/library/scala/collection/Iterator.scala b/src/library/scala/collection/Iterator.scala index b80a19f73177..e2a1d6da56cb 100644 --- a/src/library/scala/collection/Iterator.scala +++ b/src/library/scala/collection/Iterator.scala @@ -481,7 +481,7 @@ trait Iterator[+A] extends TraversableOnce[A] { */ def flatMap[B](f: A => GenTraversableOnce[B]): Iterator[B] = new AbstractIterator[B] { private var cur: Iterator[B] = empty - private def nextCur() { cur = f(self.next()).toIterator } + private def nextCur(): Unit = { cur = null ; cur = f(self.next()).toIterator } def hasNext: Boolean = { // Equivalent to cur.hasNext || self.hasNext && { nextCur(); hasNext } // but slightly shorter bytecode (better JVM inlining!) diff --git a/test/files/run/t11272.javaopts b/test/files/run/t11272.javaopts new file mode 100644 index 000000000000..88ac6a3f37f4 --- /dev/null +++ b/test/files/run/t11272.javaopts @@ -0,0 +1 @@ +-Xmx196m diff --git a/test/files/run/t11272.scala b/test/files/run/t11272.scala new file mode 100644 index 000000000000..24a14daef4ca --- /dev/null +++ b/test/files/run/t11272.scala @@ -0,0 +1,12 @@ + +object Test { + def main(args: Array[String]): Unit = { + test() + } + def test() = { + val iter = Iterator(128*1024*1024, 128*1024*1024).flatMap(new Array[Byte](_)) + while (iter.hasNext) { + iter.next() + } + } +} diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 6a427bbdc078..5e8ca1e53a59 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -357,4 +357,32 @@ class IteratorTest { assertTrue(hi.hasNext) // no longer delegated assertTrue(hi.hasNext) } + @Test def `flatMap is memory efficient in previous element`(): Unit = { + import java.lang.ref._ + // Array.iterator holds onto array reference; by contrast, iterating over List walks tail. + // Avoid reaching seq1 through test class. + val seq1 = new WeakReference(Array("first", "second")) + val seq2 = List("third") + val it0: Iterator[Int] = Iterator(1, 2) + lazy val it: Iterator[String] = it0.flatMap { + case 1 => seq1.get + case _ => check() ; seq2 + } + def check() = assertNotReachable(seq1.get, it)(()) + def checkHasElement() = assertNotReachable(seq1.get.apply(1), it)(()) + assert(it.hasNext) + assertEquals("first", it.next()) + + // verify that we're in the middle of seq1 + assertThrows[AssertionError](checkHasElement()) + assertThrows[AssertionError](check()) + assert(it.hasNext) + assertEquals("second", it.next()) + + assert(it.hasNext) + assertNotReachable(seq1.get, it) { + assertEquals("third", it.next()) + } + assert(!it.hasNext) + } } From 3edeaac047c78ab4f28bff100aa408ba775bd629 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 27 Nov 2018 09:52:23 +0100 Subject: [PATCH 1573/2793] [backport] Don't emit forwarder in mirror class for bridge methods In 2.12.6 and before, the Scala compiler emits static forwarders for bridge methods in top-level modules. These forwarders are emitted by mistake, the filter to exclude bridges did not work as expected. These bridge forwarders make the Java compiler on JDK 11 report ambiguity errors when using static forwarders (scala/bug#11061). PR #7035 fixed this for 2.12.7 by adding the `ACC_BRIDGE` flag to static forwarders for bridges. We decided to keep these bridges for binary compatibility. However, the new flag causes the eclipse Java compiler (and apparently also IntelliJ) to report ambiguity errors when using static forwarders (scala/bug#11271). In 2.13.x the Scala compiler no longer emits static forwarders for bridges (PR #6531). This PR brings the same behavior to 2.12.8. This change breaks binary compatibility. However, in the examples we tested, the Java compiler emits references to the non-bridge methods, so compiled code continues to work if a library is replaced by a new version that doesn't have forwarders for bridges: ``` $> cat T.scala class A[T] { def get: T = ??? } object T extends A[String] { override def get: String = "hi" } $> ~/scala/scala-2.12.7/bin/scalac T.scala ``` Generates two forwarders in `T.class` ``` // access flags 0x49 public static bridge get()Ljava/lang/Object; // access flags 0x9 public static get()Ljava/lang/String; ``` ``` $> javac -version javac 1.8.0_181 $> cat Test.java public class Test { public static void main(String[] args) { System.out.println(T.get()); } } $> javac Test.java ``` Generates in Test.class ``` INVOKESTATIC T.get ()Ljava/lang/String; ``` --- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 46 ++++++------------- src/library/scala/runtime/SymbolLiteral.java | 2 +- .../tools/nsc/backend/jvm/BytecodeTest.scala | 10 ++-- 3 files changed, 20 insertions(+), 38 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index f7ce5a1cca29..a6c8eb7f5229 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -806,7 +806,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ private def addForwarder( isRemoteClass: Boolean, - isBridge: Boolean, jclass: asm.ClassVisitor, moduleClass: Symbol, m: Symbol): Unit = { @@ -834,7 +833,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ // TODO: evaluate the other flags we might be dropping on the floor here. val flags = GenBCode.PublicStatic | - (if (isBridge) asm.Opcodes.ACC_BRIDGE else 0) | (if (m.isVarargsMethod) asm.Opcodes.ACC_VARARGS else 0) | (if (m.isDeprecated) asm.Opcodes.ACC_DEPRECATED else 0) @@ -887,32 +885,23 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ def addForwarders(isRemoteClass: Boolean, jclass: asm.ClassVisitor, jclassName: String, moduleClass: Symbol) { assert(moduleClass.isModuleClass, moduleClass) - debuglog(s"Dumping mirror class for object: $moduleClass") - val linkedClass = moduleClass.companionClass + val linkedClass = moduleClass.companionClass lazy val conflictingNames: Set[Name] = { (linkedClass.info.members collect { case sym if sym.name.isTermName => sym.name }).toSet } - debuglog(s"Potentially conflicting names for forwarders: $conflictingNames") - - for (m <- moduleClass.info.membersBasedOnFlags(BCodeHelpers.ExcludedForwarderFlags, symtab.Flags.METHOD)) { - // Fix for scala/bug#11207, see https://github.com/scala/scala/pull/7035/files#r226274350. This makes sure that 2.12.8 generates - // the same forwarder methods as in 2.12.6 (but includes bridge flags). In 2.13 we don't generate any forwarders for bridges. - val bridgeImplementingAbstract = m.isBridge && m.nextOverriddenSymbol.isDeferred - if (m.isType || m.isDeferred || bridgeImplementingAbstract || (m.owner eq definitions.ObjectClass) || m.isConstructor) - debuglog(s"No forwarder for '$m' from $jclassName to '$moduleClass': ${m.isType} || ${m.isDeferred} || ${m.owner eq definitions.ObjectClass} || ${m.isConstructor}") - else if (conflictingNames(m.name)) - log(s"No forwarder for $m due to conflict with ${linkedClass.info.member(m.name)}") - else if (m.hasAccessBoundary) - log(s"No forwarder for non-public member $m") - else { - log(s"Adding static forwarder for '$m' from $jclassName to '$moduleClass'") - addForwarder(isRemoteClass, - isBridge = m.isBridge, - jclass, - moduleClass, - m) - } + + // Before erasure * to exclude bridge methods. Excluding them by flag doesn't work, because then + // the method from the base class that the bridge overrides is included (scala/bug#10812). + // * Using `exitingUncurry` (not `enteringErasure`) because erasure enters bridges in traversal, + // not in the InfoTransform, so it actually modifies the type from the previous phase. + // Uncurry adds java varargs, which need to be included in the mirror class. + val members = exitingUncurry(moduleClass.info.membersBasedOnFlags(BCodeHelpers.ExcludedForwarderFlags, symtab.Flags.METHOD)) + for (m <- members) { + val excl = m.isDeferred || m.isConstructor || m.hasAccessBoundary || + { val o = m.owner; (o eq ObjectClass) || (o eq AnyRefClass) || (o eq AnyClass) } || + conflictingNames(m.name) + if (!excl) addForwarder(isRemoteClass, jclass, moduleClass, m) } } @@ -1184,14 +1173,9 @@ abstract class BCodeHelpers extends BCodeIdiomatic { } object BCodeHelpers { - val ExcludedForwarderFlags = { + val ExcludedForwarderFlags: Long = { import scala.tools.nsc.symtab.Flags._ - // Should include DEFERRED but this breaks findMember. - // Note that BRIDGE is *not* excluded. Trying to exclude bridges by flag doesn't work, findMembers - // will then include the member from the parent (which the bridge overrides / implements). - // This caused scala/bug#11061 and scala/bug#10812. In 2.13, they are fixed by not emitting - // forwarders for bridges. But in 2.12 that's not binary compatible, so instead we continue to - // emit forwarders for bridges, but mark them with ACC_BRIDGE. + // Don't include DEFERRED but filter afterwards, see comment on `findMembers` SPECIALIZED | LIFTED | PROTECTED | STATIC | EXPANDEDNAME | PRIVATE | MACRO } diff --git a/src/library/scala/runtime/SymbolLiteral.java b/src/library/scala/runtime/SymbolLiteral.java index 3638dca3eda3..560fef53333c 100644 --- a/src/library/scala/runtime/SymbolLiteral.java +++ b/src/library/scala/runtime/SymbolLiteral.java @@ -22,7 +22,7 @@ public static CallSite bootstrap(MethodHandles.Lookup lookup, String invokedName MethodType invokedType, String value) throws Throwable { ClassLoader classLoader = lookup.lookupClass().getClassLoader(); - MethodType type = MethodType.fromMethodDescriptorString("(Ljava/lang/Object;)Ljava/lang/Object;", classLoader); + MethodType type = MethodType.fromMethodDescriptorString("(Ljava/lang/String;)Lscala/Symbol;", classLoader); Class symbolClass = Class.forName("scala.Symbol", false, classLoader); MethodHandle factoryMethod = lookup.findStatic(symbolClass, "apply", type); Object symbolValue = factoryMethod.invokeWithArguments(value); diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 879283de9db5..1b1eedeceb09 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -23,15 +23,15 @@ class BytecodeTest extends BytecodeTesting { | def g: Object |} |object B extends A { - | override def f: String = "b" // "bridge" forwarder - | def g: String = "b" // no "bridge" forwarder, as the overridden method is abstract, scala/bug#11207 + | override def f: String = "b" + | def g: String = "b" |} |case class K(x: Int, s: String) """.stripMargin for (base <- List("trait", "abstract class")) { val List(a, bMirror, bModule, kClass, kModule) = compileClasses(base + code) assertEquals("B", bMirror.name) - assertEquals(List("f()Ljava/lang/Object;0x49", "f()Ljava/lang/String;0x9", "g()Ljava/lang/String;0x9"), + assertEquals(List("f()Ljava/lang/String;0x9", "g()Ljava/lang/String;0x9"), bMirror.methods.asScala .filter(m => m.name == "f" || m.name == "g") .map(m => m.name + m.desc + "0x" + Integer.toHexString(m.access)).toList.sorted) @@ -42,7 +42,7 @@ class BytecodeTest extends BytecodeTesting { } @Test - def varArg(): Unit = { + def staticForwardersVarargFlag(): Unit = { val code = """ A { @annotation.varargs def f(i: Int*): Object = null } |object B extends A { @annotation.varargs override def f(i: Int*): String = "b" } @@ -51,9 +51,7 @@ class BytecodeTest extends BytecodeTesting { val List(a, bMirror, bModule) = compileClasses(base + code) assertEquals("B", bMirror.name) assertEquals(List( - "f(Lscala/collection/Seq;)Ljava/lang/Object;0x49", "f(Lscala/collection/Seq;)Ljava/lang/String;0x9", - "f([I)Ljava/lang/Object;0xc9", "f([I)Ljava/lang/String;0x89"), bMirror.methods.asScala .filter(_.name == "f") From f98135015ce043e7ebfd70cedeb7a520ff93b58c Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 27 Nov 2018 13:33:18 -0500 Subject: [PATCH 1574/2793] Don't compactify Java inner class names. The Java compiler won't, and neither should we. Includes a virtual-directory-backed compiler agglomeration so that the test can possibly be run and pass on Windows. Fixes scala/bug#11277 --- .../tools/nsc/backend/JavaPlatform.scala | 2 +- .../scala/reflect/internal/Definitions.scala | 2 +- .../scala/reflect/internal/StdNames.scala | 6 +- .../scala/reflect/internal/Symbols.scala | 4 +- .../reflect/runtime/JavaUniverseForce.scala | 2 +- .../interpreter/PresentationCompilation.scala | 2 +- test/files/run/t6240-universe-code-gen.scala | 2 +- .../reflect/internal/LongNamesTest.scala | 43 +++++++ .../testing/VirtualCompilerTesting.scala | 116 ++++++++++++++++++ 9 files changed, 170 insertions(+), 9 deletions(-) create mode 100644 test/junit/scala/reflect/internal/LongNamesTest.scala create mode 100644 test/junit/scala/tools/testing/VirtualCompilerTesting.scala diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index ff11f434710b..2d609dcb17a6 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -26,7 +26,7 @@ trait JavaPlatform extends Platform { private[nsc] var currentClassPath: Option[ClassPath] = None - private[nsc] def classPath: ClassPath = { + protected[nsc] def classPath: ClassPath = { if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) currentClassPath.get } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 84f54bc0e1e8..31a54e35f4d1 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1270,7 +1270,7 @@ trait Definitions extends api.StandardDefinitions { getMemberIfDefined(owner, name) orElse { if (phase.flatClasses && name.isTypeName && !owner.isPackageObjectOrClass) { val pkg = owner.owner - val flatname = tpnme.flattenedName(owner.name, name) + val flatname = tpnme.flattenedName(owner, name) getMember(pkg, flatname) } else fatalMissingSymbol(owner, name) diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index da4138fa45b7..38b64f63dc56 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -96,8 +96,10 @@ trait StdNames { protected val stringToTypeName = null protected implicit def createNameType(name: String): NameType - def flattenedName(segments: Name*): NameType = - compactify(segments mkString NAME_JOIN_STRING) + def flattenedName(owner: Symbol, name: Name): NameType = { + val flat = owner.name.toString + NAME_JOIN_STRING + name.toString + if (owner.isJava) flat else compactify(flat) // scala/bug#11277 + } // TODO: what is the purpose of all this duplication!?!?! // I made these constants because we cannot change them without bumping our major version anyway. diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3a25d830a20f..2552580a9820 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2959,7 +2959,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def name: TermName = { if (!isMethod && needsFlatClasses) { if (flatname eq null) - flatname = nme.flattenedName(rawowner.name, rawname) + flatname = nme.flattenedName(rawowner, rawname) flatname } @@ -3380,7 +3380,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def name: TypeName = { if (needsFlatClasses) { if (flatname eq null) - flatname = tpnme.flattenedName(rawowner.name, rawname) + flatname = tpnme.flattenedName(rawowner, rawname) flatname } diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 7c5dfe17296b..0b4d7131fbeb 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -17,7 +17,7 @@ package runtime trait JavaUniverseForce { self: runtime.JavaUniverse => def force() { Literal(Constant(42)).duplicate - nme.flattenedName() + nme.flattenedName(NoSymbol, nme.NO_NAME) nme.raw WeakTypeTag TypeTag diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 4c7f05318c53..106e649ac69f 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -78,7 +78,7 @@ trait PresentationCompilation { override lazy val platform: ThisPlatform = { new JavaPlatform { lazy val global: self.type = self - override private[nsc] lazy val classPath: ClassPath = mergedFlatClasspath + override lazy val classPath: ClassPath = mergedFlatClasspath } } } diff --git a/test/files/run/t6240-universe-code-gen.scala b/test/files/run/t6240-universe-code-gen.scala index e5a49921577c..f628299a3e4b 100644 --- a/test/files/run/t6240-universe-code-gen.scala +++ b/test/files/run/t6240-universe-code-gen.scala @@ -44,7 +44,7 @@ object Test extends App { |trait JavaUniverseForce { self: runtime.JavaUniverse => | def force() { | Literal(Constant(42)).duplicate - | nme.flattenedName() + | nme.flattenedName(NoSymbol, nme.NO_NAME) | nme.raw | WeakTypeTag | TypeTag diff --git a/test/junit/scala/reflect/internal/LongNamesTest.scala b/test/junit/scala/reflect/internal/LongNamesTest.scala new file mode 100644 index 000000000000..9855a97e5b63 --- /dev/null +++ b/test/junit/scala/reflect/internal/LongNamesTest.scala @@ -0,0 +1,43 @@ +package scala.reflect.internal + +import org.junit._ +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.tools.testing.VirtualCompiler +import scala.language.reflectiveCalls + +@RunWith(classOf[JUnit4]) +class LongNamesTest { + + @Test def t11227: Unit = { + val compiler = new VirtualCompiler + + val longClassName = (0 to 512).map(_ => 'X').mkString + + val javaCode = + s"""package pkg; + | + |public class Outer { + | public static class $longClassName {} + |} + """.stripMargin + + val scalaCode = + s"""package pkg + | + |class Test { + | def test = new Outer.$longClassName().getClass.getName + |} + """.stripMargin + + compiler.compileJava("Outer.java" -> javaCode) + + compiler.compileScala("Test.scala" -> scalaCode) + + val testClass = compiler.classloader.loadClass("pkg.Test") + + val output = testClass.newInstance().asInstanceOf[{ def test(): String }].test() + Assert.assertEquals(s"pkg.Outer$$$longClassName", output) + } +} diff --git a/test/junit/scala/tools/testing/VirtualCompilerTesting.scala b/test/junit/scala/tools/testing/VirtualCompilerTesting.scala new file mode 100644 index 000000000000..8025bfcf1932 --- /dev/null +++ b/test/junit/scala/tools/testing/VirtualCompilerTesting.scala @@ -0,0 +1,116 @@ +package scala +package tools +package testing + +import java.io.OutputStreamWriter +import java.net.URI +import java.nio.charset.StandardCharsets +import java.util.Locale + +import javax.tools._ + +import scala.collection.JavaConverters._ +import scala.reflect.internal.util.AbstractFileClassLoader +import scala.reflect.io.{AbstractFile, VirtualDirectory} +import scala.tools.nsc.classpath.{AggregateClassPath, VirtualDirectoryClassPath} +import scala.tools.nsc.{Global, Settings} + +/** Utilities for testing with javac/scalac without using the actual filesystem, + * presumably because one doesn't wish to deal with platform idiosyncracies. + */ +class VirtualCompiler { + /** A java compiler instance that we can use. */ + lazy val javac = ToolProvider.getSystemJavaCompiler + + /** The directory in which are placed classfiles. */ + lazy val output = new VirtualDirectory("out", maybeContainer = None) + + /** A javac file manager that places classfiles in `output`. */ + lazy val fileManager: JavaFileManager = { + val dflt = javac.getStandardFileManager(null, Locale.ENGLISH, StandardCharsets.UTF_8) + new VirtualFileManager(output, dflt) + } + + /** A scala compiler. */ + lazy val scalac: Global = { + val settings = new Settings() + settings.usejavacp.value = true + settings.outputDirs setSingleOutput output + new Global(settings) { + override lazy val platform = new super.GlobalPlatform() { + override val classPath = AggregateClassPath(List( + super.classPath, + VirtualDirectoryClassPath(output), + )) + } + } + } + + def compileJava(sources: (String, String)*): Unit = { + val sourcefiles = sources.map { + case (filename, content) => + new InMemorySourcefile(new URI("vc:/" + filename), content) + } + val writer = new OutputStreamWriter(System.out) + assert { + javac + .getTask(writer, fileManager, null, null, null, sourcefiles.asJava) + .call() + } + } + + def compileScala(sources: (String, String)*): Unit = { + val run = new scalac.Run() + val units = sources.map { + case (filename, content) => scalac.newCompilationUnit(content, filename) + } + run.compileUnits(units.toList, run.parserPhase) + } + + def classloader: ClassLoader = + new AbstractFileClassLoader(output, getClass.getClassLoader) +} + +final class VirtualFileManager(dir: VirtualDirectory, del: StandardJavaFileManager) + extends ForwardingJavaFileManager[StandardJavaFileManager](del) { + import JavaFileManager.Location + import JavaFileObject.Kind + + override def getJavaFileForOutput( + loc: Location, + clasz: String, + kind: Kind, + sibling: FileObject, + ): JavaFileObject = { + assert(loc == StandardLocation.CLASS_OUTPUT, loc) + assert(kind == Kind.CLASS, kind) + val (file, uri) = mkFile(clasz) + new SimpleJavaFileObject(uri, Kind.CLASS) { + override def openOutputStream() = file.output + } + } + + override def getJavaFileForInput(loc: Location, clasz: String, kind: Kind): JavaFileObject = { + if (loc == StandardLocation.CLASS_PATH) { + assert(kind == Kind.CLASS, kind) + val (file, uri) = mkFile(clasz) + new SimpleJavaFileObject(uri, Kind.CLASS) { + override def openInputStream() = file.input + } + } else super.getJavaFileForInput(loc, clasz, kind) + } + + private def mkFile(clasz: String): (AbstractFile, URI) = { + val parts = clasz.split('.') + val pkg = parts.init.foldLeft[AbstractFile](dir)(_ subdirectoryNamed _) + val file = pkg.fileNamed(parts.last + ".class") + val uri = new URI("vc:/" + parts.mkString("/") + ".class") + (file, uri) + } +} + + +final class InMemorySourcefile(uri: URI, contents: String) + extends SimpleJavaFileObject(uri, JavaFileObject.Kind.SOURCE) { + override def getCharContent(ignoreEncodingErrors: Boolean) = contents +} From 8b095a954b2f27038ee773b61123a0628646750d Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 4 Dec 2018 14:06:13 +0100 Subject: [PATCH 1575/2793] Bump starr to 2.12.8 --- build.sbt | 2 +- versions.properties | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/build.sbt b/build.sbt index fa233d7d6a68..10e40da107d8 100644 --- a/build.sbt +++ b/build.sbt @@ -106,7 +106,7 @@ lazy val publishSettings : Seq[Setting[_]] = Seq( // should not be set directly. It is the same as the Maven version and derived automatically from `baseVersion` and // `baseVersionSuffix`. globalVersionSettings -baseVersion in Global := "2.12.8" +baseVersion in Global := "2.12.9" baseVersionSuffix in Global := "SNAPSHOT" organization in ThisBuild := "org.scala-lang" homepage in ThisBuild := Some(url("https://www.scala-lang.org")) diff --git a/versions.properties b/versions.properties index a3bee7a23d7f..d3ff92c49a31 100644 --- a/versions.properties +++ b/versions.properties @@ -1,5 +1,5 @@ # Scala version used for bootstrapping (see README.md) -starr.version=2.12.7 +starr.version=2.12.8 # The scala.binary.version determines how modules are resolved. It is set as follows: # - After 2.x.0 is released, the binary version is 2.x From 72886ea2258e8b6d7c51838ff757a9645cd129db Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 21 Nov 2018 09:47:07 -0500 Subject: [PATCH 1576/2793] avoid some allocations in computing enclosingContextChain --- .../scala/tools/nsc/typechecker/Namers.scala | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 6e7f4fe19ab7..22f8f9057866 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -151,11 +151,15 @@ trait Namers extends MethodSynthesis { setPrivateWithin(tree, sym, tree.mods) def inConstructorFlag: Long = { - val termOwnedContexts: List[Context] = - context.enclosingContextChain.takeWhile(c => c.owner.isTerm && !c.owner.isAnonymousFunction) - val constructorNonSuffix = termOwnedContexts exists (c => c.owner.isConstructor && !c.inConstructorSuffix) - val earlyInit = termOwnedContexts exists (_.owner.isEarlyInitialized) - if (constructorNonSuffix || earlyInit) INCONSTRUCTOR else 0L + var c = context + def inTermOwnedContext = c.owner.isTerm && !c.owner.isAnonymousFunction + def constructorNonSuffix = c.owner.isConstructor && !c.inConstructorSuffix + def earlyInit = c.owner.isEarlyInitialized + while (inTermOwnedContext) { + if (constructorNonSuffix || earlyInit) return INCONSTRUCTOR + c = c.outer + } + 0L } def moduleClassFlags(moduleFlags: Long) = From f4ec73898629f4e7b5cb0c5f5a32781a8fd9f835 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 21 Nov 2018 11:12:29 -0500 Subject: [PATCH 1577/2793] Avoid allocation of *Ref objects for vars in Context#lookupSymbol Even after 30+ iterations, I was unable to get the JIT to eliminate these allocations. Add in the single-element cache that could be used for scala/scala-dev#498. --- .../tools/nsc/typechecker/Contexts.scala | 137 ++++++++++-------- .../internal/util/ReusableInstance.scala | 37 +++++ 2 files changed, 113 insertions(+), 61 deletions(-) create mode 100644 src/reflect/scala/reflect/internal/util/ReusableInstance.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index ad643bc9a9f8..5b543c8f494b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -15,7 +15,7 @@ package typechecker import scala.collection.{ immutable, mutable } import scala.annotation.tailrec -import scala.reflect.internal.util.{ shortClassOfInstance, SomeOfNil } +import scala.reflect.internal.util.{ ReusableInstance, shortClassOfInstance, SomeOfNil } import scala.tools.nsc.reporters.Reporter /** @@ -970,7 +970,7 @@ trait Contexts { self: Analyzer => * package object foo { type InputStream = java.io.InputStream } * import foo._, java.io._ */ - private def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = { + private[Contexts] def resolveAmbiguousImport(name: Name, imp1: ImportInfo, imp2: ImportInfo): Option[ImportInfo] = { val imp1Explicit = imp1 isExplicitImport name val imp2Explicit = imp2 isExplicitImport name val ambiguous = if (imp1.depth == imp2.depth) imp1Explicit == imp2Explicit else !imp1Explicit && imp2Explicit @@ -1020,10 +1020,10 @@ trait Contexts { self: Analyzer => /** The symbol with name `name` imported via the import in `imp`, * if any such symbol is accessible from this context. */ - private def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean, record: Boolean): Symbol = + private[Contexts] def importedAccessibleSymbol(imp: ImportInfo, name: Name, requireExplicit: Boolean, record: Boolean): Symbol = imp.importedSymbol(name, requireExplicit, record) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) - private def requiresQualifier(s: Symbol): Boolean = ( + private[Contexts] def requiresQualifier(s: Symbol): Boolean = ( s.owner.isClass && !s.owner.isPackageClass && !s.isTypeParameterOrSkolem @@ -1040,17 +1040,63 @@ trait Contexts { self: Analyzer => def isNameInScope(name: Name) = lookupSymbol(name, _ => true).isSuccess - /** Find the symbol of a simple name starting from this context. - * All names are filtered through the "qualifies" predicate, - * the search continuing as long as no qualifying name is found. - */ - def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = { - var lookupError: NameLookup = null // set to non-null if a definite error is encountered - var inaccessible: NameLookup = null // records inaccessible symbol for error reporting in case none is found - var defSym: Symbol = NoSymbol // the directly found symbol - var pre: Type = NoPrefix // the prefix type of defSym, if a class member - var cx: Context = this // the context under consideration - var symbolDepth: Int = -1 // the depth of the directly found symbol + def lookupSymbol(name: Name, qualifies: Symbol => Boolean): NameLookup = + symbolLookupCache.using(_(this, name)(qualifies)) + + final def lookupCompanionInIncompleteOwner(original: Symbol): Symbol = { + // Must have both a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions. + def isCompanion(sym: Symbol): Boolean = + (original.isModule && sym.isClass || sym.isModule && original.isClass) && sym.isCoDefinedWith(original) + lookupSibling(original, original.name.companionName).filter(isCompanion) + } + + final def lookupSibling(original: Symbol, name: Name): Symbol = { + /* Search scopes in current and enclosing contexts for the definition of `symbol` */ + def lookupScopeEntry(symbol: Symbol): ScopeEntry = { + var res: ScopeEntry = null + var ctx = this + while (res == null && ctx.outer != ctx) { + val s = ctx.scope lookupSymbolEntry symbol + if (s != null) + res = s + else + ctx = ctx.outer + } + res + } + + // Must be owned by the same Scope, to ensure that in + // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object. + lookupScopeEntry(original) match { + case null => NoSymbol + case entry => + entry.owner.lookupNameInSameScopeAs(original, name) + } + } + + } //class Context + + /** Find the symbol of a simple name starting from this context. + * All names are filtered through the "qualifies" predicate, + * the search continuing as long as no qualifying name is found. + */ + // OPT: moved this into a (cached) object to avoid costly and non-eliminated {Object,Int}Ref allocations + private[Contexts] final val symbolLookupCache = ReusableInstance[SymbolLookup](new SymbolLookup) + private[Contexts] final class SymbolLookup { + private[this] var lookupError: NameLookup = _ // set to non-null if a definite error is encountered + private[this] var inaccessible: NameLookup = _ // records inaccessible symbol for error reporting in case none is found + private[this] var defSym: Symbol = _ // the directly found symbol + private[this] var pre: Type = _ // the prefix type of defSym, if a class member + private[this] var cx: Context = _ // the context under consideration + private[this] var symbolDepth: Int = _ // the depth of the directly found symbol + + def apply(thisContext: Context, name: Name)(qualifies: Symbol => Boolean): NameLookup = { + lookupError = null + inaccessible = null + defSym = NoSymbol + pre = NoPrefix + cx = thisContext + symbolDepth = -1 def finish(qual: Tree, sym: Symbol): NameLookup = ( if (lookupError ne null) lookupError @@ -1061,7 +1107,7 @@ trait Contexts { self: Analyzer => } ) def finishDefSym(sym: Symbol, pre0: Type): NameLookup = - if (requiresQualifier(sym)) + if (thisContext.requiresQualifier(sym)) finish(gen.mkAttributedQualifier(pre0), sym) else finish(EmptyTree, sym) @@ -1069,20 +1115,21 @@ trait Contexts { self: Analyzer => def isPackageOwnedInDifferentUnit(s: Symbol) = ( s.isDefinedInPackage && ( !currentRun.compiles(s) - || unit.exists && s.sourceFile != unit.source.file + || thisContext.unit.exists && s.sourceFile != thisContext.unit.source.file ) ) - def lookupInPrefix(name: Name) = { + def lookupInPrefix(name: Name) = { val sym = pre.member(name).filter(qualifies) def isNonPackageNoModuleClass(sym: Symbol) = sym.isClass && !sym.isModuleClass && !sym.isPackageClass - if (!sym.exists && unit.isJava && isNonPackageNoModuleClass(pre.typeSymbol)) { + if (!sym.exists && thisContext.unit.isJava && isNonPackageNoModuleClass(pre.typeSymbol)) { // TODO factor out duplication with Typer::inCompanionForJavaStatic - val pre1 = companionSymbolOf(pre.typeSymbol, this).typeOfThis + val pre1 = companionSymbolOf(pre.typeSymbol, thisContext).typeOfThis pre1.member(name).filter(qualifies).andAlso(_ => pre = pre1) } else sym } - def accessibleInPrefix(s: Symbol) = isAccessible(s, pre, superAccess = false) + def accessibleInPrefix(s: Symbol) = + thisContext.isAccessible(s, pre, superAccess = false) def searchPrefix = { cx = cx.enclClass @@ -1147,11 +1194,11 @@ trait Contexts { self: Analyzer => symbolDepth = cx.depth var impSym: Symbol = NoSymbol - val importCursor = new ImportCursor(this, name) + val importCursor = new ImportCursor(thisContext, name) import importCursor.{imp1, imp2} def lookupImport(imp: ImportInfo, requireExplicit: Boolean) = - importedAccessibleSymbol(imp, name, requireExplicit, record = true) filter qualifies + thisContext.importedAccessibleSymbol(imp, name, requireExplicit, record = true) filter qualifies // Java: A single-type-import declaration d in a compilation unit c of package p // that imports a type named n shadows, throughout c, the declarations of: @@ -1168,7 +1215,7 @@ trait Contexts { self: Analyzer => // 2) Explicit imports have next highest precedence. def depthOk(imp: ImportInfo) = ( imp.depth > symbolDepth - || (unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth) + || (thisContext.unit.isJava && imp.isExplicitImport(name) && imp.depth == symbolDepth) ) while (!impSym.exists && importCursor.imp1Exists && depthOk(importCursor.imp1)) { @@ -1212,13 +1259,13 @@ trait Contexts { self: Analyzer => // actually used. val other = lookupImport(imp2, requireExplicit = !importCursor.sameDepth) - def imp1wins() { importCursor.advanceImp2() } - def imp2wins() { impSym = other; importCursor.advanceImp1Imp2() } + @inline def imp1wins() { importCursor.advanceImp2() } + @inline def imp2wins() { impSym = other; importCursor.advanceImp1Imp2() } if (!other.exists) // imp1 wins; drop imp2 and continue. imp1wins() else if (importCursor.imp2Wins) // imp2 wins; drop imp1 and continue. imp2wins() - else resolveAmbiguousImport(name, imp1, imp2) match { + else thisContext.resolveAmbiguousImport(name, imp1, imp2) match { case Some(imp) => if (imp eq imp1) imp1wins() else imp2wins() case _ => lookupError = ambiguousImports(imp1, imp2) } @@ -1228,39 +1275,7 @@ trait Contexts { self: Analyzer => } else finish(EmptyTree, NoSymbol) } - - final def lookupCompanionInIncompleteOwner(original: Symbol): Symbol = { - // Must have both a class and module symbol, so that `{ class C; def C }` or `{ type T; object T }` are not companions. - def isCompanion(sym: Symbol): Boolean = - (original.isModule && sym.isClass || sym.isModule && original.isClass) && sym.isCoDefinedWith(original) - lookupSibling(original, original.name.companionName).filter(isCompanion) - } - - final def lookupSibling(original: Symbol, name: Name): Symbol = { - /* Search scopes in current and enclosing contexts for the definition of `symbol` */ - def lookupScopeEntry(symbol: Symbol): ScopeEntry = { - var res: ScopeEntry = null - var ctx = this - while (res == null && ctx.outer != ctx) { - val s = ctx.scope lookupSymbolEntry symbol - if (s != null) - res = s - else - ctx = ctx.outer - } - res - } - - // Must be owned by the same Scope, to ensure that in - // `{ class C; { ...; object C } }`, the class is not seen as a companion of the object. - lookupScopeEntry(original) match { - case null => NoSymbol - case entry => - entry.owner.lookupNameInSameScopeAs(original, name) - } - } - - } //class Context + } /** A `Context` focussed on an `Import` tree */ trait ImportContext extends Context { @@ -1486,7 +1501,7 @@ trait Contexts { self: Analyzer => var result: Symbol = NoSymbol var renamed = false var selectors = tree.selectors - def current = selectors.head + @inline def current = selectors.head while ((selectors ne Nil) && result == NoSymbol) { if (current.rename == name.toTermName) result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports diff --git a/src/reflect/scala/reflect/internal/util/ReusableInstance.scala b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala new file mode 100644 index 000000000000..fa397d4cdf07 --- /dev/null +++ b/src/reflect/scala/reflect/internal/util/ReusableInstance.scala @@ -0,0 +1,37 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala +package reflect +package internal +package util + +/** A wrapper for a re-entrant, cached instance of a value of type `T`. + * + * Not thread safe. + */ +final class ReusableInstance[T <: AnyRef](make: () => T) { + private val cached = make() + private var taken = false + + @inline def using[R](action: T => R): R = + if (taken) action(make()) + else try { + taken = true + action(cached) + } finally taken = false +} + +object ReusableInstance { + def apply[T <: AnyRef](make: => T): ReusableInstance[T] = + new ReusableInstance[T](make _) +} \ No newline at end of file From 4abad550cd46469952ace40ed7cd5cff946919f0 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Wed, 21 Nov 2018 18:15:45 -0500 Subject: [PATCH 1578/2793] Simplify Symbol#name to read from a field (pre-flatten) The previous implementation of `name` had 5 implementations: - delegating to a private `rawname` in `TermSymbol` and `TypeSymbol` - possibly flattening the raw name (post-flatten) in `ClassSymbol` and `ModuleSymbol` - `nme.NO_NAME` in `NoSymbol` `name` shows up as very hot on some profiles, and before this patch compiled to a relatively-expensive virtual dispatch (even after C2). However, the majority of calls to `name` only read (eventually) from the `rawname` field in `TermSymbol` or `TypeSymbol`! Therefore, this patch moves `rawname` into `Symbol` and prefers to directly access that if possible. --- .../scala/reflect/internal/Symbols.scala | 47 ++++++------------- 1 file changed, 14 insertions(+), 33 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 2552580a9820..56ae2d8086d1 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -228,6 +228,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => with HasFlags with Annotatable[Symbol] with Attachable { + protected[this] final var _rawname = initName // makes sure that all symbols that runtime reflection deals with are synchronized private def isSynchronized = this.isInstanceOf[scala.reflect.runtime.SynchronizedSymbols#SynchronizedSymbol] private def isAprioriThreadsafe = isThreadsafe(AllOps) @@ -242,16 +243,15 @@ trait Symbols extends api.Symbols { self: SymbolTable => // Rename as little as possible. Enforce invariants on all renames. type TypeOfClonedSymbol >: Null <: Symbol { type NameType = Symbol.this.NameType } - // Abstract here so TypeSymbol and TermSymbol can have a private[this] field - // with the proper specific type. - def rawname: NameType - def name: NameType + final def rawname: NameType = _rawname.asInstanceOf[NameType] + final def name: NameType = if (needsFlatClasses) flattenedName else _rawname.asInstanceOf[NameType] def name_=(n: Name): Unit = { if (shouldLogAtThisPhase) { def msg = s"In $owner, renaming $name -> $n" if (isSpecialized) debuglog(msg) else log(msg) } } + protected[this] def flattenedName: NameType = rawname def asNameType(n: Name): NameType // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api @@ -840,7 +840,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME) final def isDelambdafyTarget = isArtifact && isMethod && hasAttachment[DelambdafyTarget.type] final def isDefinedInPackage = effectiveOwner.isPackageClass - final def needsFlatClasses = phase.flatClasses && (rawowner ne NoSymbol) && !rawowner.isPackageClass + final def needsFlatClasses = !isMethod && phase.flatClasses && (rawowner ne NoSymbol) && !rawowner.isPackageClass // TODO introduce a flag for these? final def isPatternTypeVariable: Boolean = @@ -2818,11 +2818,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => type TypeOfClonedSymbol = TermSymbol - private[this] var _rawname: TermName = initName - def rawname = _rawname - def name = { - _rawname - } override def name_=(name: Name) { if (name != rawname) { super.name_=(name) // logging @@ -2944,7 +2939,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** A class for module symbols */ class ModuleSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName) extends TermSymbol(initOwner, initPos, initName) with ModuleSymbolApi { - private var flatname: TermName = null + private var flatname: TermName = _ override def associatedFile = moduleClass.associatedFile override def associatedFile_=(f: AbstractFile) { moduleClass.associatedFile = f } @@ -2956,14 +2951,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (!isMethod && needsFlatClasses) rawowner.owner else rawowner } - override def name: TermName = { - if (!isMethod && needsFlatClasses) { - if (flatname eq null) - flatname = nme.flattenedName(rawowner, rawname) + override protected[this] def flattenedName: TermName = { + if (flatname eq null) + flatname = nme.flattenedName(rawowner, rawname) - flatname - } - else rawname + flatname } } implicit val ModuleSymbolTag = ClassTag[ModuleSymbol](classOf[ModuleSymbol]) @@ -3081,15 +3073,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => abstract class TypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends Symbol(initOwner, initPos, initName) with TypeSymbolApi { privateWithin = NoSymbol - private[this] var _rawname: TypeName = initName type TypeOfClonedSymbol >: Null <: TypeSymbol // cloneSymbolImpl still abstract in TypeSymbol. - def rawname = _rawname - def name = { - _rawname - } final def asNameType(n: Name) = n.toTypeName override def isNonClassType = true @@ -3377,14 +3364,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (needsFlatClasses) rawowner.owner else rawowner } - override def name: TypeName = { - if (needsFlatClasses) { - if (flatname eq null) - flatname = tpnme.flattenedName(rawowner, rawname) - - flatname - } - else rawname + override protected[this] def flattenedName: TypeName = { + if (flatname eq null) + flatname = tpnme.flattenedName(rawowner, rawname) + flatname } /** A symbol carrying the self type of the class as its type */ @@ -3573,8 +3556,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => type TypeOfClonedSymbol = NoSymbol def asNameType(n: Name) = n.toTermName - def rawname = nme.NO_NAME - def name = nme.NO_NAME override def name_=(n: Name) = abort("Cannot set NoSymbol's name to " + n) // Syncnote: no need to synchronize this, because NoSymbol's initialization is triggered by JavaUniverse.init From da229e133452b5aaf02fcf2fbcd78b1640484738 Mon Sep 17 00:00:00 2001 From: Anselm von Wangenheim Date: Fri, 16 Nov 2018 01:31:29 +0100 Subject: [PATCH 1579/2793] [nomerge] Fix bug in immutable.HashMap.split --- .../scala/collection/immutable/HashMap.scala | 5 ++++- .../collection/immutable/HashMapTest.scala | 17 +++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 37a4c48e8ef9..1c8157d54bc4 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -295,7 +295,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f) override def split: Seq[HashMap[A, B]] = { val (x, y) = kvs.splitAt(kvs.size / 2) - def newhm(lm: ListMap[A, B @uV]) = new HashMapCollision1(hash, lm) + def newhm(lm: ListMap[A, B @uV]) = { + if (lm.size > 1) new HashMapCollision1(hash, lm) + else new HashMap1(lm.head._1, hash, lm.head._2, lm.head) + } List(newhm(x), newhm(y)) } protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[A, B1]): HashMap[A, B1] = { diff --git a/test/junit/scala/collection/immutable/HashMapTest.scala b/test/junit/scala/collection/immutable/HashMapTest.scala index 8b036f26ac4c..7547d0668caf 100644 --- a/test/junit/scala/collection/immutable/HashMapTest.scala +++ b/test/junit/scala/collection/immutable/HashMapTest.scala @@ -55,4 +55,21 @@ class HashMapTest { val expected = HashMap(A(0) -> 1, A(1) -> 1) assertEquals(merged, expected) } + + @Test + def t11257(): Unit = { + case class PoorlyHashed(i: Int) { + override def hashCode(): Int = i match { + case 0 | 1 => 42 + case _ => super.hashCode() + } + } + val hashMapCollision = HashMap(PoorlyHashed(0) -> 0, PoorlyHashed(1) -> 1) + val singleElementMap = hashMapCollision.split.head + assert(singleElementMap.isInstanceOf[HashMap.HashMap1[_, _]]) + val stillSingleElement = singleElementMap.split.head + assert(stillSingleElement.isInstanceOf[HashMap.HashMap1[_, _]]) + val twoElemTrie = stillSingleElement + (PoorlyHashed(2) -> 2) + assert(twoElemTrie.isInstanceOf[HashMap.HashTrieMap[_, _]]) + } } From 24d6fe81086279e2045bc780cad04f08fa1df07c Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 11 Dec 2018 21:12:12 +0100 Subject: [PATCH 1580/2793] [squash] improve test --- test/junit/scala/collection/IteratorTest.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 5e8ca1e53a59..9099b6b85d10 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -361,26 +361,28 @@ class IteratorTest { import java.lang.ref._ // Array.iterator holds onto array reference; by contrast, iterating over List walks tail. // Avoid reaching seq1 through test class. - val seq1 = new WeakReference(Array("first", "second")) + var seq1 = Array("first", "second") // captured, need to set to null + var seq11: String = null val seq2 = List("third") val it0: Iterator[Int] = Iterator(1, 2) lazy val it: Iterator[String] = it0.flatMap { - case 1 => seq1.get + case 1 => val r = seq1; seq1 = null; seq11 = r(1); r case _ => check() ; seq2 } - def check() = assertNotReachable(seq1.get, it)(()) - def checkHasElement() = assertNotReachable(seq1.get.apply(1), it)(()) + def check() = assertNotReachable(seq1, it)(()) + def checkHasElement() = assertNotReachable(seq11, it)(()) assert(it.hasNext) assertEquals("first", it.next()) // verify that we're in the middle of seq1 assertThrows[AssertionError](checkHasElement()) + seq11 = null assertThrows[AssertionError](check()) assert(it.hasNext) assertEquals("second", it.next()) assert(it.hasNext) - assertNotReachable(seq1.get, it) { + assertNotReachable(seq1, it) { assertEquals("third", it.next()) } assert(!it.hasNext) From 49a77477658a68f24eb4ab627a1a56c9d2141404 Mon Sep 17 00:00:00 2001 From: Alex Vayda Date: Mon, 17 Dec 2018 18:55:27 +0100 Subject: [PATCH 1581/2793] Fix documentation on the Future.reduce() method --- src/library/scala/concurrent/Future.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 19762042f4b0..8f6983b27d1d 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -792,7 +792,7 @@ object Future { } /** Initiates a non-blocking, asynchronous, fold over the supplied futures - * where the fold-zero is the result value of the `Future` that's completed first. + * where the fold-zero is the result value of the first `Future` in the collection. * * Example: * {{{ From 9d792e0483c43cbf881cd4672bd8d7cdc1ffc4f2 Mon Sep 17 00:00:00 2001 From: Enno Runne <458526+ennru@users.noreply.github.com> Date: Fri, 14 Dec 2018 19:37:46 +0100 Subject: [PATCH 1582/2793] [backport] Scaladoc: Introduce new variables to create better links to source Introduces new variables for -doc-source-url FILE_PATH_EXT - same as FILE_PATH, but including the file extension (which might be .java) FILE_EXT - the file extension (.scala or .java) FILE_LINE - containing the line number of the Symbol Fixes FILE_PATH to never contain the file extension (see scala/bug#5388) --- build.sbt | 2 +- .../scala/tools/nsc/doc/Settings.scala | 2 +- .../tools/nsc/doc/model/ModelFactory.scala | 10 +++-- test/scaladoc/resources/doc-source-url.java | 13 +++++++ test/scaladoc/resources/doc-source-url.scala | 13 +++++++ test/scaladoc/run/doc-source-url-java.check | 1 + test/scaladoc/run/doc-source-url-java.scala | 39 +++++++++++++++++++ test/scaladoc/run/doc-source-url.check | 1 + test/scaladoc/run/doc-source-url.scala | 39 +++++++++++++++++++ 9 files changed, 115 insertions(+), 5 deletions(-) create mode 100644 test/scaladoc/resources/doc-source-url.java create mode 100644 test/scaladoc/resources/doc-source-url.scala create mode 100644 test/scaladoc/run/doc-source-url-java.check create mode 100644 test/scaladoc/run/doc-source-url-java.scala create mode 100644 test/scaladoc/run/doc-source-url.check create mode 100644 test/scaladoc/run/doc-source-url.scala diff --git a/build.sbt b/build.sbt index 10e40da107d8..fca522798fa2 100644 --- a/build.sbt +++ b/build.sbt @@ -204,7 +204,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + "-doc-version", versionProperties.value.canonicalVersion, "-doc-title", description.value, "-sourcepath", (baseDirectory in ThisBuild).value.toString, - "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH}.scala#L1" + "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH_EXT}#L€{FILE_LINE}" ), incOptions := (incOptions in LocalProject("root")).value, apiURL := Some(url("http://www.scala-lang.org/api/" + versionProperties.value.mavenVersion + "/")), diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 5b815fa12404..59e5088ef64c 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -76,7 +76,7 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) val docsourceurl = StringSetting ( "-doc-source-url", "url", - s"A URL pattern used to link to the source file; the following variables are available: €{TPL_NAME}, €{TPL_OWNER} and respectively €{FILE_PATH}. For example, for `scala.collection.Seq`, the variables will be expanded to `Seq`, `scala.collection` and respectively `scala/collection/Seq` (without the backquotes). To obtain a relative path for €{FILE_PATH} instead of an absolute one, use the ${sourcepath.name} setting.", + s"A URL pattern used to link to the source file, with some variables supported: For example, for `scala.collection.Seq` €{TPL_NAME} gives `Seq`, €{TPL_OWNER} gives `scala.collection`, €{FILE_PATH} gives `scala/collection/Seq`, €{FILE_EXT} gives `.scala`, €{FILE_PATH_EXT} gives `scala/collection/Seq.scala`, and €{FILE_LINE} gives `25` (without the backquotes). To obtain a relative path for €{FILE_PATH} and €{FILE_PATH_EXT} instead of an absolute one, use the ${sourcepath.name} setting.", "" ) diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index 03376d8e9b73..dee00a35cf5d 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -302,13 +302,17 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { val assumedSourceRoot = fixPath(settings.sourcepath.value) stripSuffix "/" if (!settings.docsourceurl.isDefault) - inSource map { case (file, _) => - val filePath = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "").stripSuffix(".scala") + inSource map { case (file, line) => + val filePathExt = fixPath(file.path).replaceFirst("^" + assumedSourceRoot, "") + val (filePath, fileExt) = filePathExt.splitAt(filePathExt.indexOf(".", filePathExt.lastIndexOf("/"))) val tplOwner = this.inTemplate.qualifiedName val tplName = this.name - val patches = new Regex("""€\{(FILE_PATH|TPL_OWNER|TPL_NAME)\}""") + val patches = new Regex("""€\{(FILE_PATH|FILE_EXT|FILE_PATH_EXT|FILE_LINE|TPL_OWNER|TPL_NAME)\}""") def substitute(name: String): String = name match { case "FILE_PATH" => filePath + case "FILE_EXT" => fileExt + case "FILE_PATH_EXT" => filePathExt + case "FILE_LINE" => line.toString case "TPL_OWNER" => tplOwner case "TPL_NAME" => tplName } diff --git a/test/scaladoc/resources/doc-source-url.java b/test/scaladoc/resources/doc-source-url.java new file mode 100644 index 000000000000..b127cddf02aa --- /dev/null +++ b/test/scaladoc/resources/doc-source-url.java @@ -0,0 +1,13 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +public class WithSource {} diff --git a/test/scaladoc/resources/doc-source-url.scala b/test/scaladoc/resources/doc-source-url.scala new file mode 100644 index 000000000000..2d97d53e7f72 --- /dev/null +++ b/test/scaladoc/resources/doc-source-url.scala @@ -0,0 +1,13 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +class WithSource diff --git a/test/scaladoc/run/doc-source-url-java.check b/test/scaladoc/run/doc-source-url-java.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/doc-source-url-java.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/doc-source-url-java.scala b/test/scaladoc/run/doc-source-url-java.scala new file mode 100644 index 000000000000..4c323d41d17b --- /dev/null +++ b/test/scaladoc/run/doc-source-url-java.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import java.net.URL + +import scala.tools.nsc.ScalaDocReporter +import scala.tools.nsc.doc.Universe +import scala.tools.nsc.doc.html.Page +import scala.tools.nsc.doc.html.page.EntityPage +import scala.tools.nsc.doc.html.page.diagram.{DiagramGenerator, DotDiagramGenerator} +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def resourceFile = "test/scaladoc/resources/doc-source-url.java" + + override def model: Option[Universe] = newDocFactory.makeUniverse(Left(List(resourceFile))) + + def scaladocSettings = "-doc-source-url file:€{FILE_PATH}||€{FILE_EXT}||€{FILE_PATH_EXT}||€{FILE_LINE}" + + def testModel(rootPackage: Package) = { + import access._ + + val clazz = rootPackage._class("WithSource") + + val expect = s"file:test/scaladoc/resources/doc-source-url||.java||test/scaladoc/resources/doc-source-url.java||13" + assert(clazz.sourceUrl.contains(new URL(expect)), s"got ${clazz.sourceUrl}") + } +} diff --git a/test/scaladoc/run/doc-source-url.check b/test/scaladoc/run/doc-source-url.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/doc-source-url.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/doc-source-url.scala b/test/scaladoc/run/doc-source-url.scala new file mode 100644 index 000000000000..2d104722075d --- /dev/null +++ b/test/scaladoc/run/doc-source-url.scala @@ -0,0 +1,39 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import java.net.URL + +import scala.tools.nsc.ScalaDocReporter +import scala.tools.nsc.doc.Universe +import scala.tools.nsc.doc.html.Page +import scala.tools.nsc.doc.html.page.EntityPage +import scala.tools.nsc.doc.html.page.diagram.{DiagramGenerator, DotDiagramGenerator} +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def resourceFile = "test/scaladoc/resources/doc-source-url.scala" + + override def model: Option[Universe] = newDocFactory.makeUniverse(Left(List(resourceFile))) + + def scaladocSettings = "-doc-source-url file:€{FILE_PATH}||€{FILE_EXT}||€{FILE_PATH_EXT}||€{FILE_LINE}" + + def testModel(rootPackage: Package) = { + import access._ + + val clazz = rootPackage._class("WithSource") + + val expect = s"file:test/scaladoc/resources/doc-source-url||.scala||test/scaladoc/resources/doc-source-url.scala||13" + assert(clazz.sourceUrl.contains(new URL(expect)), s"got ${clazz.sourceUrl}") + } +} From 1a6c691ec051071117dceab4eda37b3778c0c3f7 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Wed, 2 Jan 2019 22:19:42 -0400 Subject: [PATCH 1583/2793] Fix rvm in Travis config An "rvm" entry, here "2.2", in a Travis configuration file without a "language: ruby" declaration seems to silently ignore the former. Travis doesn't support multiple languages, so it seems manual installation of a ruby version with rvm is necessary. Diagnostic commands for ruby have been added to help avoid this problem in the future. --- .travis.yml | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index e83fd018e548..f1ca37a233af 100644 --- a/.travis.yml +++ b/.travis.yml @@ -45,8 +45,14 @@ jobs: # build the spec using jekyll - stage: build - rvm: 2.2 - install: bundle install + language: ruby + install: + - rvm install 2.2 + - rvm use 2.2 + - rvm info + - ruby -v + - bundler --version + - bundle install script: - 'if [ "$TRAVIS_PULL_REQUEST" = "false" ]; then (cd admin && ./init.sh); fi' - bundle exec jekyll build -s spec/ -d build/spec From b6fab5e6f3161f71257fe0ac6e96b3ee566c631e Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Fri, 4 Jan 2019 16:43:36 -0400 Subject: [PATCH 1584/2793] Add caching for Ruby to Travis config --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index f1ca37a233af..25c9a81c3b5e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,6 +8,7 @@ cache: directories: - $HOME/.ivy2/cache - $HOME/.sbt + - $HOME/.rvm/ stages: - name: build From dd3ae5dde09fd78b187b6803ebd54f0084e74582 Mon Sep 17 00:00:00 2001 From: Ryo Fukumuro Date: Tue, 1 Jan 2019 19:15:47 +0900 Subject: [PATCH 1585/2793] Don't make lifted method static if it's synchronized This fixes an issue where the combination of a nested method and `self.synchronized` unexpectedly locks `self.getClass` instead of `self`. ``` class C { self => def f = { def g = self.synchronized {} } } ``` `g` should be compiled into ``` private final synchronized void g$1(); ``` , not ``` private static final synchronized void g$1(); ``` Fixes scala/bug#11331 --- .../tools/nsc/transform/Delambdafy.scala | 3 +- test/files/run/synchronized.check | 12 +++++ test/files/run/synchronized.scala | 48 +++++++++++++++++++ 3 files changed, 62 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index a5ca807db36a..1bfef5aff429 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -306,7 +306,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre case dd: DefDef if dd.symbol.isLiftedMethod && !dd.symbol.isDelambdafyTarget => // scala/bug#9390 emit lifted methods that don't require a `this` reference as STATIC // delambdafy targets are excluded as they are made static by `transformFunction`. - if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) { + // a synchronized method cannot be static (`methodReferencesThis` will not see the implicit this reference due to `this.synchronized`) + if (!dd.symbol.hasFlag(STATIC | SYNCHRONIZED) && !methodReferencesThis(dd.symbol)) { dd.symbol.setFlag(STATIC) dd.symbol.removeAttachment[mixer.NeedStaticImpl.type] } diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check index a7d75fa673af..05234cf85534 100644 --- a/test/files/run/synchronized.check +++ b/test/files/run/synchronized.check @@ -4,6 +4,7 @@ .|... c1.ff: OK .|. c1.fl: OK .|... c1.fo: OK + .|. c1.fc: OK |.. c1.g1: OK |.. c1.gi: OK |.... c1.gv: OK @@ -15,6 +16,7 @@ .|... c1.c.fl: OK .|..... c1.c.fo: OK .|... c1.c.fn: OK + .|... c1.c.fc: OK |.... c1.c.g1: OK |.... c1.c.gi: OK |...... c1.c.gv: OK @@ -26,6 +28,7 @@ .|... c1.O.fl: OK .|..... c1.O.fo: OK .|... c1.O.fn: OK + .|... c1.O.fc: OK |.... c1.O.g1: OK |.... c1.O.gi: OK |...... c1.O.gv: OK @@ -36,6 +39,7 @@ .|... O1.ff: OK .|. O1.fl: OK .|... O1.fo: OK + .|. O1.fc: OK |.. O1.g1: OK |.. O1.gi: OK |.... O1.gv: OK @@ -47,6 +51,7 @@ .|... O1.c.fl: OK .|..... O1.c.fo: OK .|... O1.c.fn: OK + .|... O1.c.fc: OK |.... O1.c.g1: OK |.... O1.c.gi: OK |...... O1.c.gv: OK @@ -58,6 +63,7 @@ .|... O1.O.fl: OK .|..... O1.O.fo: OK .|... O1.O.fn: OK + .|... O1.O.fc: OK |.... O1.O.g1: OK |.... O1.O.gi: OK |...... O1.O.gv: OK @@ -68,6 +74,7 @@ .|...... c2.ff: OK .|.... c2.fl: OK .|...... c2.fo: OK + .|.... c2.fc: OK |...... c2.g1: OK |...... c2.gi: OK |........ c2.gv: OK @@ -79,6 +86,7 @@ .|....... c2.c.fl: OK .|......... c2.c.fo: OK .|...... c2.c.fn: OK + .|....... c2.c.fc: OK |........ c2.c.g1: OK |........ c2.c.gi: OK |.......... c2.c.gv: OK @@ -90,6 +98,7 @@ .|....... c2.O.fl: OK .|......... c2.O.fo: OK .|...... c2.O.fn: OK + .|....... c2.O.fc: OK |........ c2.O.g1: OK |........ c2.O.gi: OK |.......... c2.O.gv: OK @@ -100,6 +109,7 @@ .|...... O2.ff: OK .|.... O2.fl: OK .|...... O2.fo: OK + .|.... O2.fc: OK |...... O2.g1: OK |...... O2.gi: OK |........ O2.gv: OK @@ -111,6 +121,7 @@ .|....... O2.c.fl: OK .|......... O2.c.fo: OK .|...... O2.c.fn: OK + .|....... O2.c.fc: OK |........ O2.c.g1: OK |........ O2.c.gi: OK |.......... O2.c.gv: OK @@ -122,6 +133,7 @@ .|....... O2.O.fl: OK .|......... O2.O.fo: OK .|...... O2.O.fn: OK + .|....... O2.O.fc: OK |........ O2.O.g1: OK |........ O2.O.gi: OK |.......... O2.O.gv: OK diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala index d777b85b2c6b..b1457af32c35 100644 --- a/test/files/run/synchronized.scala +++ b/test/files/run/synchronized.scala @@ -39,6 +39,10 @@ class C1 { flv } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass)) + } def g1 = checkLocks()(this, this.getClass) @inline final def gi = checkLocks()(this, this.getClass) @@ -66,6 +70,10 @@ class C1 { } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) } def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, C1.this, C1.this.getClass)) + } def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) @@ -95,6 +103,10 @@ class C1 { } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) } def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, C1.this, C1.this.getClass)) + } def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) @@ -127,6 +139,10 @@ object O1 { flv } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass)) + } def g1 = checkLocks()(this, this.getClass) @inline final def gi = checkLocks()(this, this.getClass) @@ -154,6 +170,10 @@ object O1 { } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) } def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, O1, O1.getClass)) + } def g1 = checkLocks()(this, this.getClass, O1, O1.getClass) @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass) @@ -183,6 +203,10 @@ object O1 { } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) } def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, O1, O1.getClass)) + } def g1 = checkLocks()(this, this.getClass, O1, O1.getClass) @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass) @@ -215,6 +239,10 @@ trait T { flv } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, classOf[T], classOf[C2], O2.getClass)) + } def g1 = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2, O2.getClass) @inline final def gi = checkLocks()(this, this.getClass, classOf[T], classOf[C2], O2, O2.getClass) @@ -242,6 +270,10 @@ trait T { } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) } def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)) + } def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) @@ -271,6 +303,10 @@ trait T { } def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) } def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], classOf[C2], O2.getClass) } + def fc = { + def fcf(f0: => Boolean) = synchronized { f0 } + fcf(checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass)) + } def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], classOf[C2], O2, O2.getClass) @@ -301,6 +337,7 @@ object Test extends App { check("c1.ff", c1.ff) check("c1.fl", c1.fl) check("c1.fo", c1.fo) + check("c1.fc", c1.fc) check("c1.g1", c1.g1) check("c1.gi", c1.gi) check("c1.gv", c1.gv()) @@ -314,6 +351,7 @@ object Test extends App { check("c1.c.fl", c1.c.fl) check("c1.c.fo", c1.c.fo) check("c1.c.fn", c1.c.fn) + check("c1.c.fc", c1.c.fc) check("c1.c.g1", c1.c.g1) check("c1.c.gi", c1.c.gi) check("c1.c.gv", c1.c.gv()) @@ -327,6 +365,7 @@ object Test extends App { check("c1.O.fl", c1.O.fl) check("c1.O.fo", c1.O.fo) check("c1.O.fn", c1.O.fn) + check("c1.O.fc", c1.O.fc) check("c1.O.g1", c1.O.g1) check("c1.O.gi", c1.O.gi) check("c1.O.gv", c1.O.gv()) @@ -339,6 +378,7 @@ object Test extends App { check("O1.ff", O1.ff) check("O1.fl", O1.fl) check("O1.fo", O1.fo) + check("O1.fc", O1.fc) check("O1.g1", O1.g1) check("O1.gi", O1.gi) check("O1.gv", O1.gv()) @@ -352,6 +392,7 @@ object Test extends App { check("O1.c.fl", O1.c.fl) check("O1.c.fo", O1.c.fo) check("O1.c.fn", O1.c.fn) + check("O1.c.fc", O1.c.fc) check("O1.c.g1", O1.c.g1) check("O1.c.gi", O1.c.gi) check("O1.c.gv", O1.c.gv()) @@ -365,6 +406,7 @@ object Test extends App { check("O1.O.fl", O1.O.fl) check("O1.O.fo", O1.O.fo) check("O1.O.fn", O1.O.fn) + check("O1.O.fc", O1.O.fc) check("O1.O.g1", O1.O.g1) check("O1.O.gi", O1.O.gi) check("O1.O.gv", O1.O.gv()) @@ -378,6 +420,7 @@ object Test extends App { check("c2.ff", c2.ff) check("c2.fl", c2.fl) check("c2.fo", c2.fo) + check("c2.fc", c2.fc) check("c2.g1", c2.g1) check("c2.gi", c2.gi) check("c2.gv", c2.gv()) @@ -391,6 +434,7 @@ object Test extends App { check("c2.c.fl", c2.c.fl) check("c2.c.fo", c2.c.fo) check("c2.c.fn", c2.c.fn) + check("c2.c.fc", c2.c.fc) check("c2.c.g1", c2.c.g1) check("c2.c.gi", c2.c.gi) check("c2.c.gv", c2.c.gv()) @@ -404,6 +448,7 @@ object Test extends App { check("c2.O.fl", c2.O.fl) check("c2.O.fo", c2.O.fo) check("c2.O.fn", c2.O.fn) + check("c2.O.fc", c2.O.fc) check("c2.O.g1", c2.O.g1) check("c2.O.gi", c2.O.gi) check("c2.O.gv", c2.O.gv()) @@ -416,6 +461,7 @@ object Test extends App { check("O2.ff", O2.ff) check("O2.fl", O2.fl) check("O2.fo", O2.fo) + check("O2.fc", O2.fc) check("O2.g1", O2.g1) check("O2.gi", O2.gi) check("O2.gv", O2.gv()) @@ -429,6 +475,7 @@ object Test extends App { check("O2.c.fl", O2.c.fl) check("O2.c.fo", O2.c.fo) check("O2.c.fn", O2.c.fn) + check("O2.c.fc", O2.c.fc) check("O2.c.g1", O2.c.g1) check("O2.c.gi", O2.c.gi) check("O2.c.gv", O2.c.gv()) @@ -442,6 +489,7 @@ object Test extends App { check("O2.O.fl", O2.O.fl) check("O2.O.fo", O2.O.fo) check("O2.O.fn", O2.O.fn) + check("O2.O.fc", O2.O.fc) check("O2.O.g1", O2.O.g1) check("O2.O.gi", O2.O.gi) check("O2.O.gv", O2.O.gv()) From 846ee2b1a47014c69ebd2352d91d467be74918b5 Mon Sep 17 00:00:00 2001 From: Sunil Mishra Date: Sun, 6 Jan 2019 10:26:10 -0800 Subject: [PATCH 1586/2793] [backport] 11330: Port fix for byte array hashcode Issue 10690 fixed this issue for scala 2.13, and a similar fix has been implemented here. There are some additional changes over those in 10690, removing byte array hashing entirely. Hashing is now consistent in that it produces the same result regardless of the underlying numeric type, in so far as the numbers in question fit into the byte width of the type. --- src/library/scala/util/hashing/MurmurHash3.scala | 4 ++-- .../scala/collection/mutable/WrappedArrayTest.scala | 11 +++++++++++ 2 files changed, 13 insertions(+), 2 deletions(-) diff --git a/src/library/scala/util/hashing/MurmurHash3.scala b/src/library/scala/util/hashing/MurmurHash3.scala index 285e9407746b..180a0fb65a0a 100644 --- a/src/library/scala/util/hashing/MurmurHash3.scala +++ b/src/library/scala/util/hashing/MurmurHash3.scala @@ -210,14 +210,14 @@ object MurmurHash3 extends MurmurHash3 { final val setSeed = "Set".hashCode def arrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, arraySeed) - def bytesHash(data: Array[Byte]): Int = bytesHash(data, arraySeed) + def bytesHash(data: Array[Byte]): Int = arrayHash(data, arraySeed) def orderedHash(xs: TraversableOnce[Any]): Int = orderedHash(xs, symmetricSeed) def productHash(x: Product): Int = productHash(x, productSeed) def stringHash(x: String): Int = stringHash(x, stringSeed) def unorderedHash(xs: TraversableOnce[Any]): Int = unorderedHash(xs, traversableSeed) private[scala] def wrappedArrayHash[@specialized T](a: Array[T]): Int = arrayHash(a, seqSeed) - private[scala] def wrappedBytesHash(data: Array[Byte]): Int = bytesHash(data, seqSeed) + private[scala] def wrappedBytesHash(data: Array[Byte]): Int = arrayHash(data, seqSeed) /** To offer some potential for optimization. */ diff --git a/test/junit/scala/collection/mutable/WrappedArrayTest.scala b/test/junit/scala/collection/mutable/WrappedArrayTest.scala index 0786b3f1c368..d2bedd4c4993 100644 --- a/test/junit/scala/collection/mutable/WrappedArrayTest.scala +++ b/test/junit/scala/collection/mutable/WrappedArrayTest.scala @@ -1,9 +1,12 @@ package scala.collection.mutable +import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import org.junit.Test +import scala.collection.immutable + @RunWith(classOf[JUnit4]) class WrappedArrayTest { @Test @@ -16,4 +19,12 @@ class WrappedArrayTest { assertOfRef(Array(Int.box(65)), Array(Char.box('A'))) assertOfRef(Array(Char.box('A')), Array(Int.box(65))) } + + @Test + def byteArrayHashCodeEquality(): Unit = { + val x = immutable.Seq[Byte](10) + val y = Array[Byte](10).toSeq + assertEquals(x, y) + assertEquals(x.hashCode(), y.hashCode()) + } } From 89287c4d1a05c61b3817a8dfe305246f713bf67d Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Sun, 13 Jan 2019 06:41:24 +0000 Subject: [PATCH 1587/2793] Add regression test for Bugs 8584, 9485, 9963, These bugs have already been fixed in branch 2.12.8. Scala Bug 9963 involved the use of a for comprehension with a type annotation in the LHS of a flatmap line (<-) inside a for comprehension. This seems to involve `withFilter`. Scala bug 8584 was showing a compiler crash by NullPointer Exception from the typer phase, when using named arguments. This is already solved in versions `2.12.7`. We add a regression test file for it regression. --- test/files/neg/t9963.check | 4 ++++ test/files/neg/t9963.scala | 16 ++++++++++++++++ test/files/pos/t8584.scala | 19 +++++++++++++++++++ test/files/pos/t9485.scala | 17 +++++++++++++++++ 4 files changed, 56 insertions(+) create mode 100644 test/files/neg/t9963.check create mode 100644 test/files/neg/t9963.scala create mode 100644 test/files/pos/t8584.scala create mode 100644 test/files/pos/t9485.scala diff --git a/test/files/neg/t9963.check b/test/files/neg/t9963.check new file mode 100644 index 000000000000..38f0f7dcd04a --- /dev/null +++ b/test/files/neg/t9963.check @@ -0,0 +1,4 @@ +t9963.scala:14: error: value withFilter is not a member of t9963.MySet[A] + j: A <- new MySet[A]() // must have a typecheck patmat here to trigger this bug + ^ +one error found diff --git a/test/files/neg/t9963.scala b/test/files/neg/t9963.scala new file mode 100644 index 000000000000..8358aa1d2738 --- /dev/null +++ b/test/files/neg/t9963.scala @@ -0,0 +1,16 @@ +object t9963 { + class MyIterable[+A] { + def flatMap[B](f: A => MyIterable[B]): MyIterable[B] = ??? + def map[B](f: A => B): MyIterable[B] = ??? + } + + class MySet[A] { + def map[B: Equiv](f: A => B): MySet[B] = ??? // must have an implicit typeclass here to trigger this bug + def filter(f: A => Boolean): MySet[A] = ??? + } + + def f[A] = for { + i <- new MyIterable[A]() + j: A <- new MySet[A]() // must have a typecheck patmat here to trigger this bug + } yield (i, j) +} diff --git a/test/files/pos/t8584.scala b/test/files/pos/t8584.scala new file mode 100644 index 000000000000..2cffb86356df --- /dev/null +++ b/test/files/pos/t8584.scala @@ -0,0 +1,19 @@ +trait A { + def x: Double + def y: Double + + def thisA: A + def copy( x: Double = 0, y: Double = 0 ): A +} + +class B( in: A ) { + import in._ + + def foo( a: Double, b: Double ) = a + + def bar = thisA.copy( + x = foo( + b = 1, + a = 2 ) + ) +} \ No newline at end of file diff --git a/test/files/pos/t9485.scala b/test/files/pos/t9485.scala new file mode 100644 index 000000000000..62c2f80ce89d --- /dev/null +++ b/test/files/pos/t9485.scala @@ -0,0 +1,17 @@ +trait Traversable[+A] { + def flatMap[B](f: A => Traversable[B]): Traversable[B] = ??? +} + +trait Iterable[+A] extends Traversable[A] { + def flatMap[B](f: A => Iterable[B]): Iterable[B] = ??? +} + +trait Seq[+A] extends Iterable[A] { + def flatMap[B](f: A => Seq[B]): Seq[B] = ??? +} + +object Test extends App { + val a: Seq[Int] = new Seq[Int] {} + val b: Iterable[Int] = new Iterable[Int] {} + a.flatMap(i => b) +} From f248e3b891b0a92b8a78007470329ffb6d4809aa Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 21 Jan 2019 13:39:36 +0100 Subject: [PATCH 1588/2793] LMF only builds bridges for SAMmy --- src/compiler/scala/tools/nsc/transform/Delambdafy.scala | 6 +++++- test/files/run/t11373/Fun0.java | 5 +++++ test/files/run/t11373/Fun0Impl.java | 3 +++ test/files/run/t11373/Test.scala | 3 +++ 4 files changed, 16 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t11373/Fun0.java create mode 100644 test/files/run/t11373/Fun0Impl.java create mode 100644 test/files/run/t11373/Test.scala diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index a5ca807db36a..2ecc5200162a 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -115,7 +115,11 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre val samBridges = logResultIf[List[Symbol]](s"will add SAM bridges for $fun", _.nonEmpty) { userSamCls.fold[List[Symbol]](Nil) { - _.info.findMembers(excludedFlags = 0L, requiredFlags = BRIDGE).toList + _.info.findMember(sam.name, excludedFlags = 0L, requiredFlags = BRIDGE, stableOnly = false) match { + case NoSymbol => Nil + case bridges if bridges.isOverloaded => bridges.alternatives + case bridge => bridge :: Nil + } } } diff --git a/test/files/run/t11373/Fun0.java b/test/files/run/t11373/Fun0.java new file mode 100644 index 000000000000..ff55caaabeaf --- /dev/null +++ b/test/files/run/t11373/Fun0.java @@ -0,0 +1,5 @@ +public interface Fun0 { + String ap(); + + default Fun0 test(Fun0 b) { return null; } +} diff --git a/test/files/run/t11373/Fun0Impl.java b/test/files/run/t11373/Fun0Impl.java new file mode 100644 index 000000000000..22f0edf42117 --- /dev/null +++ b/test/files/run/t11373/Fun0Impl.java @@ -0,0 +1,3 @@ +public interface Fun0Impl extends Fun0 { + default Fun0Impl test(Fun0 b) { return null; } +} diff --git a/test/files/run/t11373/Test.scala b/test/files/run/t11373/Test.scala new file mode 100644 index 000000000000..eb05b461e946 --- /dev/null +++ b/test/files/run/t11373/Test.scala @@ -0,0 +1,3 @@ +object Test extends App { + val f: Fun0Impl = () => null +} From 94877e10f34838572bfe6f09122dff9e5f9d8638 Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Tue, 22 Jan 2019 08:16:48 -0500 Subject: [PATCH 1589/2793] Link useful Option functions --- src/library/scala/Option.scala | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 720ce453fce1..c8d4b390e5fd 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -63,6 +63,27 @@ object Option { * This allows for sophisticated chaining of $option values without * having to check for the existence of a value. * + * These are useful helper functions that exist for both $some and $none. + * - [[isDefined]] — True if not empty + * - [[isEmpty]] — True if empty + * - [[nonEmpty]] — True if not empty + * - [[orElse]] — Return default optional value if empty + * - [[getOrElse]] — Return default value if empty + * - [[get]] — Return value, throw exception if empty + * - [[fold]] — Apply function on optional value, return default if empty + * - [[map]] — Apply a function on the optional value + * - [[flatMap]] — Same as map but function must return an optional value + * - [[foreach]] — Apply a procedure on option value + * - [[collect]] — Apply partial pattern match on optional value + * - [[filter]] — An optional value satisfies predicate + * - [[filterNot]] — An optional value doesn't satisfy predicate + * - [[exists]] — Apply predicate on optional value, or false if empty + * - [[forall]] — Apply predicate on optional value, or true if empty + * - [[contains]] — Checks if value equals optional value, or false if empty + * - [[toList]] — Unary list of optional value, otherwise the empty list + * - [[toRight]] — Sum type for optional value is "Right", otherwise default value is "Left" + * - [[toLeft]] — Sum type for optional value is "Left", otherwise default value is "Right" + * * A less-idiomatic way to use $option values is via pattern matching: {{{ * val nameMaybe = request getParameter "name" * nameMaybe match { From a35f0d00647f123e2c571ceb54bfc5a3aa6369da Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 23 Jan 2019 13:22:03 +1000 Subject: [PATCH 1590/2793] Annotations in Java may be inner classes. --- .../scala/tools/nsc/typechecker/Typers.scala | 4 ++-- test/scaladoc/resources/t11365.java | 12 ++++++++++++ test/scaladoc/run/t11365.check | 1 + test/scaladoc/run/t11365.scala | 18 ++++++++++++++++++ 4 files changed, 33 insertions(+), 2 deletions(-) create mode 100644 test/scaladoc/resources/t11365.java create mode 100644 test/scaladoc/run/t11365.check create mode 100644 test/scaladoc/run/t11365.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1fa8add09001..66f840882e63 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1845,7 +1845,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (clazz.isTrait && clazz.info.parents.nonEmpty && clazz.info.firstParent.typeSymbol == AnyClass) checkEphemeral(clazz, impl2.body) - if ((clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { + if (!clazz.isJavaDefined && (clazz isNonBottomSubClass ClassfileAnnotationClass) && (clazz != ClassfileAnnotationClass)) { if (!clazz.owner.isPackageClass) context.error(clazz.pos, "inner classes cannot be classfile annotations") // Ignore @SerialVersionUID, because it is special-cased and handled completely differently. @@ -2008,7 +2008,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (clazz.isTrait && hasSuperArgs(parents1.head)) ConstrArgsInParentOfTraitError(parents1.head, clazz) - if ((clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) + if (!clazz.isJavaDefined && (clazz isSubClass ClassfileAnnotationClass) && !clazz.isTopLevel) context.error(clazz.pos, "inner classes cannot be classfile annotations") if (!phase.erasedTypes && !clazz.info.resultType.isError) // @S: prevent crash for duplicated type members diff --git a/test/scaladoc/resources/t11365.java b/test/scaladoc/resources/t11365.java new file mode 100644 index 000000000000..61828a41ed89 --- /dev/null +++ b/test/scaladoc/resources/t11365.java @@ -0,0 +1,12 @@ +/** + * A package header + */ +package test.scaladoc; + +/** + * Testing java comments don't flag Scala specific errors + */ +public class JavaComments { + static @interface Annot { + } +} diff --git a/test/scaladoc/run/t11365.check b/test/scaladoc/run/t11365.check new file mode 100644 index 000000000000..619c56180bb9 --- /dev/null +++ b/test/scaladoc/run/t11365.check @@ -0,0 +1 @@ +Done. diff --git a/test/scaladoc/run/t11365.scala b/test/scaladoc/run/t11365.scala new file mode 100644 index 000000000000..2de3844fab94 --- /dev/null +++ b/test/scaladoc/run/t11365.scala @@ -0,0 +1,18 @@ +import scala.tools.nsc.doc.Universe +import scala.tools.nsc.doc.model._ +import scala.tools.partest.ScaladocJavaModelTest + +object Test extends ScaladocJavaModelTest { + + override def resourceFile = "t11365.java" + override def scaladocSettings = "" + + def testModel(rootPackage: Package) = { + import access._ + val Tag = ":marker:" + + val base = rootPackage._package("test")._package("scaladoc") + val clazz = base._class("JavaComments") + // Just testing that we haven't hit a compiler error. + } +} From fdc49a108d485b2968c202250ce27aa6a95ef8d6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 23 Jan 2019 13:34:20 +1000 Subject: [PATCH 1591/2793] Don't check for escaping privates in Java sources --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- test/scaladoc/resources/t11365.java | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 66f840882e63..9fcf3ec17659 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -131,7 +131,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * (`owner` tells where the type occurs). */ def privates[T <: Tree](typer: Typer, owner: Symbol, tree: T): T = - check(typer, owner, EmptyScope, WildcardType, tree) + if (owner.isJavaDefined) tree else check(typer, owner, EmptyScope, WildcardType, tree) private def check[T <: Tree](typer: Typer, owner: Symbol, scope: Scope, pt: Type, tree: T): T = { this.owner = owner diff --git a/test/scaladoc/resources/t11365.java b/test/scaladoc/resources/t11365.java index 61828a41ed89..e5f54b87aa0a 100644 --- a/test/scaladoc/resources/t11365.java +++ b/test/scaladoc/resources/t11365.java @@ -9,4 +9,8 @@ public class JavaComments { static @interface Annot { } + + private class Route {} + final java.util.List routes = null; + } From 43385c26dbbf78e507b6439d24bc89d8cfe56b58 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 23 Jan 2019 13:42:39 +1000 Subject: [PATCH 1592/2793] Java classes may implement Java annotations --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 4 +++- test/scaladoc/resources/t11365.java | 2 ++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 9fcf3ec17659..ce298b78f3d7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1759,7 +1759,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper checkStablePrefixClassType(parent) if (psym != superclazz) { - if (psym.isTrait) { + if (context.unit.isJava && context.unit.isJava && psym.isJavaAnnotation) { + // allowed + } else if (psym.isTrait) { val ps = psym.info.parents if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol)) pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym) diff --git a/test/scaladoc/resources/t11365.java b/test/scaladoc/resources/t11365.java index e5f54b87aa0a..ea5edf9f8d38 100644 --- a/test/scaladoc/resources/t11365.java +++ b/test/scaladoc/resources/t11365.java @@ -13,4 +13,6 @@ public class JavaComments { private class Route {} final java.util.List routes = null; + abstract class AnnotImpl implements Annot {} + } From b92fed12d7029aec08308745ea8a4d5d0b6d0d06 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jan 2019 21:22:23 -0500 Subject: [PATCH 1593/2793] Change "helper functions" to "methods" --- src/library/scala/Option.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index c8d4b390e5fd..4448dd3484b7 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -63,7 +63,7 @@ object Option { * This allows for sophisticated chaining of $option values without * having to check for the existence of a value. * - * These are useful helper functions that exist for both $some and $none. + * These are useful methods that exist for both $some and $none. * - [[isDefined]] — True if not empty * - [[isEmpty]] — True if empty * - [[nonEmpty]] — True if not empty From 60cf518caaaf5950e02f71d1e2d5b25e15083386 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jan 2019 18:29:23 -0800 Subject: [PATCH 1594/2793] bump copyright year to 2019 --- LICENSE | 4 ++-- NOTICE | 4 ++-- doc/LICENSE.md | 4 ++-- doc/License.rtf | 4 ++-- project/VersionUtil.scala | 2 +- src/library/scala/util/Properties.scala | 2 +- src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala | 2 +- src/scalap/decoder.properties | 2 +- 8 files changed, 12 insertions(+), 12 deletions(-) diff --git a/LICENSE b/LICENSE index 8a51149ff94b..5c0cb71d6b61 100644 --- a/LICENSE +++ b/LICENSE @@ -187,8 +187,8 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright (c) 2002-2018 EPFL - Copyright (c) 2011-2018 Lightbend, Inc. + Copyright (c) 2002-2019 EPFL + Copyright (c) 2011-2019 Lightbend, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. diff --git a/NOTICE b/NOTICE index 2c4ab263d386..3472e558a794 100644 --- a/NOTICE +++ b/NOTICE @@ -1,6 +1,6 @@ Scala -Copyright (c) 2002-2018 EPFL -Copyright (c) 2011-2018 Lightbend, Inc. +Copyright (c) 2002-2019 EPFL +Copyright (c) 2011-2019 Lightbend, Inc. Scala includes software developed at LAMP/EPFL (https://lamp.epfl.ch/) and diff --git a/doc/LICENSE.md b/doc/LICENSE.md index cd337666c94d..d4f343394e87 100644 --- a/doc/LICENSE.md +++ b/doc/LICENSE.md @@ -2,9 +2,9 @@ Scala is licensed under the [Apache License Version 2.0](https://www.apache.org/ ## Scala License -Copyright (c) 2002-2018 EPFL +Copyright (c) 2002-2019 EPFL -Copyright (c) 2011-2018 Lightbend, Inc. +Copyright (c) 2011-2019 Lightbend, Inc. All rights reserved. diff --git a/doc/License.rtf b/doc/License.rtf index 3d0f81fa68ee..f9a3897cb472 100644 --- a/doc/License.rtf +++ b/doc/License.rtf @@ -23,8 +23,8 @@ Scala is licensed under the\'a0{\field{\*\fldinst{HYPERLINK "https://www.apache. \fs48 \cf2 Scala License\ \pard\pardeftab720\sl360\sa320\partightenfactor0 -\f0\b0\fs28 \cf2 Copyright (c) 2002-2018 EPFL\ -Copyright (c) 2011-2018 Lightbend, Inc.\ +\f0\b0\fs28 \cf2 Copyright (c) 2002-2019 EPFL\ +Copyright (c) 2011-2019 Lightbend, Inc.\ All rights reserved.\ \pard\pardeftab720\sl360\sa320\partightenfactor0 \cf2 \cb4 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at {\field{\*\fldinst{HYPERLINK "http://www.apache.org/licenses/LICENSE-2.0"}}{\fldrslt http://www.apache.org/licenses/LICENSE-2.0}}.\ diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index 233bfc633346..dd8e18dd8c16 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -27,7 +27,7 @@ object VersionUtil { ) lazy val generatePropertiesFileSettings = Seq[Setting[_]]( - copyrightString := "Copyright 2002-2018, LAMP/EPFL and Lightbend, Inc.", + copyrightString := "Copyright 2002-2019, LAMP/EPFL and Lightbend, Inc.", shellWelcomeString := """ | ________ ___ / / ___ | / __/ __// _ | / / / _ | diff --git a/src/library/scala/util/Properties.scala b/src/library/scala/util/Properties.scala index d70d2d174345..b55c08d1483b 100644 --- a/src/library/scala/util/Properties.scala +++ b/src/library/scala/util/Properties.scala @@ -108,7 +108,7 @@ private[scala] trait PropertiesTrait { * or "version (unknown)" if it cannot be determined. */ val versionString = "version " + scalaPropOrElse("version.number", "(unknown)") - val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2018, LAMP/EPFL and Lightbend, Inc.") + val copyrightString = scalaPropOrElse("copyright.string", "Copyright 2002-2019, LAMP/EPFL and Lightbend, Inc.") /** This is the encoding to use reading in source files, overridden with -encoding. * Note that it uses "prop" i.e. looks in the scala jar, not the system properties. diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 767a79a97ae2..d3c27057d46a 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -401,7 +401,7 @@ trait EntityPage extends HtmlPage { { if (Set("epfl", "EPFL").contains(tpl.universe.settings.docfooter.value)) - + else } diff --git a/src/scalap/decoder.properties b/src/scalap/decoder.properties index 44dcaeabb105..cbd7ad3c2d6b 100644 --- a/src/scalap/decoder.properties +++ b/src/scalap/decoder.properties @@ -1,2 +1,2 @@ version.number=2.0.1 -copyright.string=(c) 2002-2018 LAMP/EPFL +copyright.string=(c) 2002-2019 LAMP/EPFL From 607c414c3f453ffa31f3fe754e17b7aab01d40e3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jan 2019 21:44:28 -0500 Subject: [PATCH 1595/2793] Improve wording of orElse and getOrElse --- src/library/scala/Option.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index 4448dd3484b7..ea3fa3bab01c 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -67,8 +67,8 @@ object Option { * - [[isDefined]] — True if not empty * - [[isEmpty]] — True if empty * - [[nonEmpty]] — True if not empty - * - [[orElse]] — Return default optional value if empty - * - [[getOrElse]] — Return default value if empty + * - [[orElse]] — Evaluate and return alternate optional value if empty + * - [[getOrElse]] — Evaluate and return alternate value if empty * - [[get]] — Return value, throw exception if empty * - [[fold]] — Apply function on optional value, return default if empty * - [[map]] — Apply a function on the optional value From 6c8efe908caa4d64f44417f48894202071ed836a Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 23 Jan 2019 21:45:16 -0500 Subject: [PATCH 1596/2793] Drop toRight and toLeft from list --- src/library/scala/Option.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index ea3fa3bab01c..dddc57f29864 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -81,8 +81,6 @@ object Option { * - [[forall]] — Apply predicate on optional value, or true if empty * - [[contains]] — Checks if value equals optional value, or false if empty * - [[toList]] — Unary list of optional value, otherwise the empty list - * - [[toRight]] — Sum type for optional value is "Right", otherwise default value is "Left" - * - [[toLeft]] — Sum type for optional value is "Left", otherwise default value is "Right" * * A less-idiomatic way to use $option values is via pattern matching: {{{ * val nameMaybe = request getParameter "name" From cb53550485f1b72cb2a24a34856b2653c4194c66 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 30 Jan 2019 22:47:51 -0800 Subject: [PATCH 1597/2793] upgrade MiMa to 0.1.18 (was 0.1.15) motivation: older versions are incompatible with sbt-whitesource (reference: https://github.com/lightbend/migration-manager/releases/tag/0.1.18) it would be nice to further upgrade to a newer version like 0.2.0 or 0.3.0, but for now I just want to resolve the incompatibility with minimum fuss. --- project/plugins.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/plugins.sbt b/project/plugins.sbt index 7a95a915a763..d4461ac6ce88 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -19,7 +19,7 @@ buildInfoKeys := Seq[BuildInfoKey](buildClasspath) buildInfoPackage := "scalabuild" -addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.15") +addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.18") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", From 5125215b8f2e5cd269ed39c38418e8172a3a841f Mon Sep 17 00:00:00 2001 From: "Aaron S. Hawley" Date: Thu, 31 Jan 2019 19:10:02 -0500 Subject: [PATCH 1598/2793] Add doc to Option on pattern matches --- src/library/scala/Option.scala | 148 ++++++++++++- test/scalacheck/scala/OptionTest.scala | 284 +++++++++++++++++++++++++ 2 files changed, 430 insertions(+), 2 deletions(-) create mode 100644 test/scalacheck/scala/OptionTest.scala diff --git a/src/library/scala/Option.scala b/src/library/scala/Option.scala index dddc57f29864..3ee6582f9772 100644 --- a/src/library/scala/Option.scala +++ b/src/library/scala/Option.scala @@ -135,14 +135,38 @@ sealed abstract class Option[+A] extends Product with Serializable { self => /** Returns true if the option is $none, false otherwise. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(_) => false + * case None => true + * } + * }}} */ def isEmpty: Boolean /** Returns true if the option is an instance of $some, false otherwise. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(_) => true + * case None => false + * } + * }}} */ def isDefined: Boolean = !isEmpty /** Returns the option's value. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => x + * case None => throw new Exception + * } + * }}} * @note The option must be nonempty. * @throws java.util.NoSuchElementException if the option is empty. */ @@ -151,6 +175,14 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns the option's value if the option is nonempty, otherwise * return the result of evaluating `default`. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => x + * case None => default + * } + * }}} + * * @param default the default expression. */ @inline final def getOrElse[B >: A](default: => B): B = @@ -158,8 +190,17 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns the option's value if it is nonempty, * or `null` if it is empty. + * * Although the use of null is discouraged, code written to use * $option must often interface with code that expects and returns nulls. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => x + * case None => null + * } + * }}} * @example {{{ * val initialText: Option[String] = getInitialText * val textField = new JComponent(initialText.orNull,20) @@ -171,6 +212,13 @@ sealed abstract class Option[+A] extends Product with Serializable { * value if this $option is nonempty. * Otherwise return $none. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => Some(f(x)) + * case None => None + * } + * }}} * @note This is similar to `flatMap` except here, * $f does not need to wrap its result in an $option. * @@ -185,8 +233,17 @@ sealed abstract class Option[+A] extends Product with Serializable { * value if the $option is nonempty. Otherwise, evaluates * expression `ifEmpty`. * - * @note This is equivalent to `$option map f getOrElse ifEmpty`. - * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => f(x) + * case None => ifEmpty + * } + * }}} + * This is also equivalent to: + * {{{ + * option map f getOrElse ifEmpty + * }}} * @param ifEmpty the expression to evaluate if empty. * @param f the function to apply if nonempty. */ @@ -199,6 +256,13 @@ sealed abstract class Option[+A] extends Product with Serializable { * Slightly different from `map` in that $f is expected to * return an $option (which could be $none). * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => f(x) + * case None => None + * } + * }}} * @param f the function to apply * @see map * @see foreach @@ -212,6 +276,13 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns this $option if it is nonempty '''and''' applying the predicate $p to * this $option's value returns true. Otherwise, return $none. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) if p(x) => Some(x) + * case _ => None + * } + * }}} * @param p the predicate used for testing. */ @inline final def filter(p: A => Boolean): Option[A] = @@ -220,12 +291,27 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns this $option if it is nonempty '''and''' applying the predicate $p to * this $option's value returns false. Otherwise, return $none. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) if !p(x) => Some(x) + * case _ => None + * } + * }}} * @param p the predicate used for testing. */ @inline final def filterNot(p: A => Boolean): Option[A] = if (isEmpty || !p(this.get)) this else None /** Returns false if the option is $none, true otherwise. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(_) => true + * case None => false + * } + * }}} * @note Implemented here to avoid the implicit conversion to Iterable. */ final def nonEmpty = isDefined @@ -248,6 +334,13 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Tests whether the option contains a given value as an element. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => x == elem + * case None => false + * } + * }}} * @example {{{ * // Returns true because Some instance contains string "something" which equals "something". * Some("something") contains "something" @@ -270,6 +363,13 @@ sealed abstract class Option[+A] extends Product with Serializable { * $p returns true when applied to this $option's value. * Otherwise, returns false. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => p(x) + * case None => false + * } + * }}} * @param p the predicate to test */ @inline final def exists(p: A => Boolean): Boolean = @@ -278,6 +378,13 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns true if this option is empty '''or''' the predicate * $p returns true when applied to this $option's value. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => p(x) + * case None => true + * } + * }}} * @param p the predicate to test */ @inline final def forall(p: A => Boolean): Boolean = isEmpty || p(this.get) @@ -285,6 +392,13 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Apply the given procedure $f to the option's value, * if it is nonempty. Otherwise, do nothing. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => f(x) + * case None => () + * } + * }}} * @param f the procedure to apply. * @see map * @see flatMap @@ -319,6 +433,14 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns this $option if it is nonempty, * otherwise return the result of evaluating `alternative`. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => Some(x) + * case None => alternative + * } + * }}} * @param alternative the alternative expression. */ @inline final def orElse[B >: A](alternative: => Option[B]): Option[B] = @@ -332,6 +454,14 @@ sealed abstract class Option[+A] extends Product with Serializable { /** Returns a singleton list containing the $option's value * if it is nonempty, or the empty list if the $option is empty. + * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => List(x) + * case None => Nil + * } + * }}} */ def toList: List[A] = if (isEmpty) List() else new ::(this.get, Nil) @@ -341,6 +471,13 @@ sealed abstract class Option[+A] extends Product with Serializable { * a [[scala.util.Right]] containing this $option's value if * this is nonempty. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => Right(x) + * case None => Left(left) + * } + * }}} * @param left the expression to evaluate and return if this is empty * @see toLeft */ @@ -352,6 +489,13 @@ sealed abstract class Option[+A] extends Product with Serializable { * a [[scala.util.Left]] containing this $option's value * if this $option is nonempty. * + * This is equivalent to: + * {{{ + * option match { + * case Some(x) => Left(x) + * case None => Right(right) + * } + * }}} * @param right the expression to evaluate and return if this is empty * @see toRight */ diff --git a/test/scalacheck/scala/OptionTest.scala b/test/scalacheck/scala/OptionTest.scala new file mode 100644 index 000000000000..45effce7934a --- /dev/null +++ b/test/scalacheck/scala/OptionTest.scala @@ -0,0 +1,284 @@ +package scala + +import org.scalacheck.Prop +import org.scalacheck.Properties +import org.scalacheck.Prop.AnyOperators + +/** + * Property tests for code in [[scala.Option]]'s documentation. + */ +object OptionTest extends Properties("scala.Option") { + + property("map") = { + Prop.forAll { (option: Option[Int], i: Int) => + val f: Function1[Int,Int] = (_ => i) + option.map(f(_)) ?= { + option match { + case Some(x) => Some(f(x)) + case None => None + } + } + } + } + + property("flatMap") = { + Prop.forAll { (option: Option[Int], i: Int) => + val f: Function1[Int,Option[Int]] = (_ => Some(i)) + option.flatMap(f(_)) ?= { + option match { + case Some(x) => f(x) + case None => None + } + } + } + } + + property("foreach") = { + Prop.forAll { (option: Option[Int], unit: Unit) => + val proc: Function1[Int,Unit] = (_ => unit) + option.foreach(proc(_)) ?= { + option match { + case Some(x) => proc(x) + case None => () + } + } + } + } + + property("fold") = { + Prop.forAll { (option: Option[Int], i: Int, y: Int) => + val f: Function1[Int,Int] = (_ => i) + option.fold(y)(f(_)) ?= { + option match { + case Some(x) => f(x) + case None => y + } + } + } + } + + property("foldLeft") = { + Prop.forAll { (option: Option[Int], i: Int, y: Int) => + val f: Function2[Int,Int,Int] = ((_, _) => i) + option.foldLeft(y)(f(_, _)) ?= { + option match { + case Some(x) => f(y, x) + case None => y + } + } + } + } + + property("foldRight") = { + Prop.forAll { (option: Option[Int], i: Int, y: Int) => + val f: Function2[Int,Int,Int] = ((_, _) => i) + option.foldRight(y)(f(_, _)) ?= { + option match { + case Some(x) => f(x, y) + case None => y + } + } + } + } + + property("collect") = { + Prop.forAll { (option: Option[Int], i: Int) => + val pf: PartialFunction[Int,Int] = { + case x if x > 0 => i + } + option.collect(pf) ?= { + option match { + case Some(x) if pf.isDefinedAt(x) => Some(pf(x)) + case _ => None + } + } + } + } + + property("isDefined") = { + Prop.forAll { option: Option[Int] => + option.isDefined ?= { + option match { + case Some(_) => true + case None => false + } + } + } + } + + property("isEmpty") = { + Prop.forAll { option: Option[Int] => + option.isEmpty ?= { + option match { + case Some(_) => false + case None => true + } + } + } + } + + property("nonEmpty") = { + Prop.forAll { option: Option[Int] => + option.nonEmpty ?= { + option match { + case Some(_) => true + case None => false + } + } + } + } + + property("orElse") = { + Prop.forAll { (option: Option[Int], y: Option[Int]) => + option.orElse(y) ?= { + option match { + case Some(x) => Some(x) + case None => y + } + } + } + } + + property("getOrElse") = { + Prop.forAll { (option: Option[Int], y: Int) => + option.getOrElse(y) ?= { + option match { + case Some(x) => x + case None => y + } + } + } + } + + property("get") = { + Prop.forAll { (option: Option[Int]) => + Prop.iff[Option[Int]](option, { + case Some(x) => + option.get ?= { + option match { + case Some(x) => x + case None => throw new Exception + } + } + case None => + Prop.throws(classOf[Exception]) { + option.get + } + }) + } + } + + property("orNull") = { + Prop.forAll { (option: Option[String]) => + option.orNull ?= { + option match { + case Some(s) => s + case None => null + } + } + } + } + + property("filter") = { + Prop.forAll { (option: Option[Int], bool: Boolean) => + val pred: Function1[Int,Boolean] = (_ => bool) + option.filter(pred(_)) ?= { + option match { + case Some(x) if pred(x) => Some(x) + case _ => None + } + } + } + } + + property("filterNot") = { + Prop.forAll { (option: Option[Int], bool: Boolean) => + val pred: Function1[Int,Boolean] = (_ => bool) + option.filterNot(pred(_)) ?= { + option match { + case Some(x) if !pred(x) => Some(x) + case _ => None + } + } + } + } + + property("exists") = { + Prop.forAll { (option: Option[Int], bool: Boolean) => + val pred: Function1[Int,Boolean] = (_ => bool) + option.exists(pred(_)) ?= { + option match { + case Some(x) => pred(x) + case None => false + } + } + } + } + + property("forall") = { + Prop.forAll { (option: Option[Int], bool: Boolean) => + val pred: Function1[Int,Boolean] = (_ => bool) + option.forall(pred(_)) ?= { + option match { + case Some(x) => pred(x) + case None => true + } + } + } + } + + property("contains") = { + Prop.forAll { (option: Option[Int], y: Int) => + option.contains(y) ?= { + option match { + case Some(x) => x == y + case None => false + } + } + } + } + + property("size") = { + Prop.forAll { option: Option[Int] => + option.size ?= { + option match { + case Some(x) => 1 + case None => 0 + } + } + } + } + + property("toList") = { + Prop.forAll { option: Option[Int] => + option.toList ?= { + option match { + case Some(x) => List(x) + case None => Nil + } + } + } + } + + property("toRight") = { + Prop.forAll { (option: Option[Int], i: Int) => + option.toRight(i) ?= { + option match { + case Some(x) => scala.util.Right(x) + case None => scala.util.Left(i) + } + } + } + } + + property("toLeft") = { + Prop.forAll { (option: Option[Int], i: Int) => + option.toLeft(i) ?= { + option match { + case Some(x) => scala.util.Left(x) + case None => scala.util.Right(i) + } + } + } + } +} From 390612dcac8ceb7d0a831283bbd0c2a14269bc36 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 1 Feb 2019 11:17:58 +1000 Subject: [PATCH 1599/2793] Move isMethod virtual call after cheaper phase.flatClasses Also makes a few methods on Symbol final. --- src/reflect/scala/reflect/internal/Symbols.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 56ae2d8086d1..d56c5988da85 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -260,12 +260,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => private[this] var _rawowner = if (initOwner eq null) this else initOwner private[this] var _rawflags: Long = _ - def rawowner = _rawowner - def rawflags = _rawflags + final def rawowner = _rawowner + final def rawflags = _rawflags rawatt = initPos - val id = nextId() // identity displayed when -uniqid + final val id = nextId() // identity displayed when -uniqid //assert(id != 3390, initName) private[this] var _validTo: Period = NoPeriod @@ -840,7 +840,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isDelambdafyFunction = isSynthetic && (name containsName tpnme.DELAMBDAFY_LAMBDA_CLASS_NAME) final def isDelambdafyTarget = isArtifact && isMethod && hasAttachment[DelambdafyTarget.type] final def isDefinedInPackage = effectiveOwner.isPackageClass - final def needsFlatClasses = !isMethod && phase.flatClasses && (rawowner ne NoSymbol) && !rawowner.isPackageClass + final def needsFlatClasses = phase.flatClasses && (rawowner ne NoSymbol) && !rawowner.isPackageClass && !isMethod // TODO introduce a flag for these? final def isPatternTypeVariable: Boolean = From eacc4e6ea19016852671dc42096e3427cedf99c0 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 5 Feb 2019 17:16:07 -0800 Subject: [PATCH 1600/2793] [nomerge] sbt 0.13.18 (was 0.13.17) just keeping current --- project/build.properties | 2 +- scripts/common | 2 +- src/intellij/scala.ipr.SAMPLE | 66 ++++++++++++------------ test/benchmarks/project/build.properties | 2 +- 4 files changed, 36 insertions(+), 36 deletions(-) diff --git a/project/build.properties b/project/build.properties index 133a8f197e36..8e682c526d5d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.17 +sbt.version=0.13.18 diff --git a/scripts/common b/scripts/common index c05ddef34144..75a27aca03bb 100644 --- a/scripts/common +++ b/scripts/common @@ -15,7 +15,7 @@ rm -rf "$WORKSPACE/resolutionScratch_" mkdir -p "$WORKSPACE/resolutionScratch_" SBT_CMD=${SBT_CMD-sbt} -SBT_CMD="$SBT_CMD -sbt-version 0.13.17" +SBT_CMD="$SBT_CMD -sbt-version 0.13.18" # repo to publish builds integrationRepoUrl=${integrationRepoUrl-"https://scala-ci.typesafe.com/artifactory/scala-integration/"} diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 632fc64940cf..10985a2de98e 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -351,31 +351,31 @@ - - - - - - - + + + + + + + - - + + - - - - - - - - - + + + + + + + + + - - - + + + @@ -385,20 +385,20 @@ - - - - - - - + + + + + + + - - - + + + - - + +
diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties index 133a8f197e36..8e682c526d5d 100644 --- a/test/benchmarks/project/build.properties +++ b/test/benchmarks/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.17 +sbt.version=0.13.18 From 11ba5d13b41aca60ce49c726e7c91615ab776882 Mon Sep 17 00:00:00 2001 From: Dale Wijnand Date: Thu, 7 Feb 2019 16:52:13 +0100 Subject: [PATCH 1601/2793] Type-annotate & fully qualify ExecutionContext in its implicitNotFound message Not type annotating an implicit val is only safe in method-local vals, which isn't necessarily where this val will end up, so let's err on the side of caution. And for the same reason, let's fully-qualify the names. --- src/library/scala/concurrent/ExecutionContext.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index f53add40f1de..7590226e9dcf 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -68,7 +68,7 @@ If your application does not define an ExecutionContext elsewhere, consider using Scala's global ExecutionContext by defining the following: -implicit val ec = ExecutionContext.global""") +implicit val ec: scala.concurrent.ExecutionContext = scala.concurrent.ExecutionContext.global""") trait ExecutionContext { /** Runs a block of code on this execution context. From fff4ec3539ac58f56fdc8f1382c365f32a9fd25a Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Fri, 8 Feb 2019 14:07:01 -0800 Subject: [PATCH 1602/2793] restore verbatim Apache license the custom copyright info goes in NOTICE, not here --- LICENSE | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/LICENSE b/LICENSE index 5c0cb71d6b61..261eeb9e9f8b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,4 +1,3 @@ - Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ @@ -187,8 +186,7 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright (c) 2002-2019 EPFL - Copyright (c) 2011-2019 Lightbend, Inc. + Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. From 91bf0c78fcbb58bfa0849eb4fa14f8f0a2c3ff7f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 14 Jan 2019 10:30:01 +1000 Subject: [PATCH 1603/2793] [backport] Use Java rules for member lookup in .java sources - Inherited type declarations are in scope in Java code - For static innner classes, we need to check in the companion module of each base classes. - Incorporate and accomodate test case from #6053 - Tests to java code referring to module-class owned classes via companion class prefix Backport of scala/scala#7671 --- .../tools/nsc/typechecker/Contexts.scala | 53 ++++++++++++++----- .../scala/tools/nsc/typechecker/Typers.scala | 37 +++++++------ .../pos/java-inherited-type/Client.scala | 19 +++++++ test/files/pos/java-inherited-type/Test.java | 30 +++++++++++ test/files/pos/java-inherited-type1/J.java | 9 ++++ test/files/pos/java-inherited-type1/S.scala | 9 ++++ .../files/pos/java-inherited-type1/Test.scala | 8 +++ test/files/run/t10490-2.check | 1 + test/files/run/t10490-2/JavaClass.java | 4 ++ test/files/run/t10490-2/ScalaClass.scala | 18 +++++++ test/files/run/t10490.check | 1 + test/files/run/t10490/JavaClass.java | 4 ++ test/files/run/t10490/ScalaClass.scala | 13 +++++ 13 files changed, 178 insertions(+), 28 deletions(-) create mode 100644 test/files/pos/java-inherited-type/Client.scala create mode 100644 test/files/pos/java-inherited-type/Test.java create mode 100644 test/files/pos/java-inherited-type1/J.java create mode 100644 test/files/pos/java-inherited-type1/S.scala create mode 100644 test/files/pos/java-inherited-type1/Test.scala create mode 100644 test/files/run/t10490-2.check create mode 100644 test/files/run/t10490-2/JavaClass.java create mode 100644 test/files/run/t10490-2/ScalaClass.scala create mode 100644 test/files/run/t10490.check create mode 100644 test/files/run/t10490/JavaClass.java create mode 100644 test/files/run/t10490/ScalaClass.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 5b543c8f494b..5b970fe7e79e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -50,6 +50,7 @@ trait Contexts { self: Analyzer => val javaAndScalaList = JavaLangPackage :: ScalaPackage :: Nil val completeList = JavaLangPackage :: ScalaPackage :: PredefModule :: Nil } + private lazy val NoJavaMemberFound = (NoType, NoSymbol) def ambiguousImports(imp1: ImportInfo, imp2: ImportInfo) = LookupAmbiguous(s"it is imported twice in the same scope by\n$imp1\nand $imp2") @@ -1024,7 +1025,7 @@ trait Contexts { self: Analyzer => imp.importedSymbol(name, requireExplicit, record) filter (s => isAccessible(s, imp.qual.tpe, superAccess = false)) private[Contexts] def requiresQualifier(s: Symbol): Boolean = ( - s.owner.isClass + s.owner.isClass && !s.owner.isPackageClass && !s.isTypeParameterOrSkolem && !s.isExistentiallyBound @@ -1074,6 +1075,31 @@ trait Contexts { self: Analyzer => } } + final def javaFindMember(pre: Type, name: Name, qualifies: Symbol => Boolean): (Type, Symbol) = { + val sym = pre.member(name).filter(qualifies) + val preSym = pre.typeSymbol + if (sym.exists || preSym.isPackageClass || !preSym.isClass) (pre, sym) + else { + // In Java code, static innner classes, which we model as members of the companion object, + // can be referenced from an ident in a subclass or by a selection prefixed by the subclass. + val toSearch = if (preSym.isModuleClass) companionSymbolOf(pre.typeSymbol.sourceModule, this).baseClasses else preSym.baseClasses + toSearch.iterator.map { bc => + val pre1 = bc.typeOfThis + val found = pre1.decl(name) + found.filter(qualifies) match { + case NoSymbol => + val pre2 = companionSymbolOf(pre1.typeSymbol, this).typeOfThis + val found = pre2.decl(name).filter(qualifies) + found match { + case NoSymbol => NoJavaMemberFound + case sym => (pre2, sym) + } + case sym => (pre1, sym) + } + }.find(_._2 ne NoSymbol).getOrElse(NoJavaMemberFound) + } + } + } //class Context /** Find the symbol of a simple name starting from this context. @@ -1107,7 +1133,7 @@ trait Contexts { self: Analyzer => } ) def finishDefSym(sym: Symbol, pre0: Type): NameLookup = - if (thisContext.requiresQualifier(sym)) + if (!thisContext.unit.isJava && thisContext.requiresQualifier(sym)) finish(gen.mkAttributedQualifier(pre0), sym) else finish(EmptyTree, sym) @@ -1119,15 +1145,19 @@ trait Contexts { self: Analyzer => ) ) def lookupInPrefix(name: Name) = { - val sym = pre.member(name).filter(qualifies) - def isNonPackageNoModuleClass(sym: Symbol) = - sym.isClass && !sym.isModuleClass && !sym.isPackageClass - if (!sym.exists && thisContext.unit.isJava && isNonPackageNoModuleClass(pre.typeSymbol)) { - // TODO factor out duplication with Typer::inCompanionForJavaStatic - val pre1 = companionSymbolOf(pre.typeSymbol, thisContext).typeOfThis - pre1.member(name).filter(qualifies).andAlso(_ => pre = pre1) - } else sym + if (thisContext.unit.isJava) { + thisContext.javaFindMember(pre, name, qualifies) match { + case (_, NoSymbol) => + NoSymbol + case (pre1, sym) => + pre = pre1 + sym + } + } else { + pre.member(name).filter(qualifies) + } } + def accessibleInPrefix(s: Symbol) = thisContext.isAccessible(s, pre, superAccess = false) @@ -1237,8 +1267,7 @@ trait Contexts { self: Analyzer => } // At this point only one or the other of defSym and impSym might be set. - if (defSym.exists) - finishDefSym(defSym, pre) + if (defSym.exists) finishDefSym(defSym, pre) else if (impSym.exists) { // If we find a competitor imp2 which imports the same name, possible outcomes are: // diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1fa8add09001..a039d65bbc27 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -554,7 +554,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * @return modified tree and new prefix type */ private def makeAccessible(tree: Tree, sym: Symbol, pre: Type, site: Tree): (Tree, Type) = - if (context.isInPackageObject(sym, pre.typeSymbol)) { + if (!unit.isJava && context.isInPackageObject(sym, pre.typeSymbol)) { if (pre.typeSymbol == ScalaPackageClass && sym.isTerm) { // short cut some aliases. It seems pattern matching needs this // to notice exhaustiveness and to generate good code when @@ -671,16 +671,16 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } - /** The member with given name of given qualifier tree */ - def member(qual: Tree, name: Name) = { + /** The member with given name of given qualifier type */ + def member(qual: Type, name: Name): Symbol = { def callSiteWithinClass(clazz: Symbol) = context.enclClass.owner hasTransOwner clazz - val includeLocals = qual.tpe match { + val includeLocals = qual match { case ThisType(clazz) if callSiteWithinClass(clazz) => true case SuperType(clazz, _) if callSiteWithinClass(clazz.typeSymbol) => true case _ => phase.next.erasedTypes } - if (includeLocals) qual.tpe member name - else qual.tpe nonLocalMember name + if (includeLocals) qual member name + else qual nonLocalMember name } def silent[T](op: Typer => T, @@ -1160,7 +1160,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def vanillaAdapt(tree: Tree) = { def applyPossible = { - def applyMeth = member(adaptToName(tree, nme.apply), nme.apply) + def applyMeth = member(adaptToName(tree, nme.apply).tpe, nme.apply) def hasPolymorphicApply = applyMeth.alternatives exists (_.tpe.typeParams.nonEmpty) def hasMonomorphicApply = applyMeth.alternatives exists (_.tpe.paramSectionCount > 0) @@ -1364,7 +1364,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * If no conversion is found, return `qual` unchanged. */ def adaptToName(qual: Tree, name: Name) = - if (member(qual, name) != NoSymbol) qual + if (member(qual.tpe, name) != NoSymbol) qual else adaptToMember(qual, HasMember(name)) private def validateNoCaseAncestor(clazz: Symbol) = { @@ -3380,6 +3380,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (!context.owner.isPackageClass) checkNoDoubleDefs(scope) + // Note that Java units don't have synthetics, but there's no point in making a special case (for performance or correctness), + // as we only type check Java units when running Scaladoc on Java sources. addSynthetics(stats1, scope) } } @@ -5009,11 +5011,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // For Java, instance and static members are in the same scope, but we put the static ones in the companion object // so, when we can't find a member in the class scope, check the companion - def inCompanionForJavaStatic(pre: Type, cls: Symbol, name: Name): Symbol = - if (!(context.unit.isJava && cls.isClass && !cls.isModuleClass)) NoSymbol else { - val companion = companionSymbolOf(cls, context) - if (!companion.exists) NoSymbol - else member(gen.mkAttributedRef(pre, companion), name) // assert(res.isStatic, s"inCompanionForJavaStatic($pre, $cls, $name) = $res ${res.debugFlagString}") + def inCompanionForJavaStatic(cls: Symbol, name: Name): Symbol = + if (!(context.unit.isJava && cls.isClass)) NoSymbol else { + context.javaFindMember(cls.typeOfThis, name, _ => true)._2 } /* Attribute a selection where `tree` is `qual.name`. @@ -5032,7 +5032,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper wrapErrors(t, (_.typed1(t, mode, pt))) } - val sym = tree.symbol orElse member(qual, name) orElse inCompanionForJavaStatic(qual.tpe.prefix, qual.symbol, name) + val sym = tree.symbol orElse member(qual.tpe, name) orElse inCompanionForJavaStatic(qual.symbol, name) if ((sym eq NoSymbol) && name != nme.CONSTRUCTOR && mode.inAny(EXPRmode | PATTERNmode)) { // symbol not found? --> try to convert implicitly to a type that does have the required // member. Added `| PATTERNmode` to allow enrichment in patterns (so we can add e.g., an @@ -5149,7 +5149,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (name.isTypeName) { val qualTyped = typedTypeSelectionQualifier(tree.qualifier, WildcardType) val qualStableOrError = - if (qualTyped.isErrorTyped || treeInfo.admitsTypeSelection(qualTyped)) qualTyped + if (qualTyped.isErrorTyped || unit.isJava || treeInfo.admitsTypeSelection(qualTyped)) qualTyped else UnstableTreeError(qualTyped) typedSelect(tree, qualStableOrError, name) } else { @@ -5203,6 +5203,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // ignore current variable scope in patterns to enforce linearity val startContext = if (mode.typingPatternOrTypePat) context.outer else context + + def asTypeName = if (mode.inAll(MonoQualifierModes) && unit.isJava && name.isTermName) { + startContext.lookupSymbol(name.toTypeName, qualifies).symbol + } else NoSymbol + val nameLookup = tree.symbol match { case NoSymbol => startContext.lookupSymbol(name, qualifies) case sym => LookupSucceeded(EmptyTree, sym) @@ -5212,7 +5217,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case LookupAmbiguous(msg) => issue(AmbiguousIdentError(tree, name, msg)) case LookupInaccessible(sym, msg) => issue(AccessError(tree, sym, context, msg)) case LookupNotFound => - inEmptyPackage orElse lookupInRoot(name) match { + asTypeName orElse inEmptyPackage orElse lookupInRoot(name) match { case NoSymbol => issue(SymbolNotFoundError(tree, name, context.owner, startContext)) case sym => typed1(tree setSymbol sym, mode, pt) } diff --git a/test/files/pos/java-inherited-type/Client.scala b/test/files/pos/java-inherited-type/Client.scala new file mode 100644 index 000000000000..a644363cdd4c --- /dev/null +++ b/test/files/pos/java-inherited-type/Client.scala @@ -0,0 +1,19 @@ +object Client { + def test= { + Test.Outer.Nested.sig + Test.Outer.Nested.sig1 + Test.Outer.Nested.sig2 + val o = new Test.Outer + new o.Nested1().sig + new o.Nested1().sig1 + new o.Nested1().sig2 + } + + def test1 = { + val t = new Test + val o = new t.Outer1 + new o.Nested1().sig + new o.Nested1().sig1 + new o.Nested1().sig2 + } +} diff --git a/test/files/pos/java-inherited-type/Test.java b/test/files/pos/java-inherited-type/Test.java new file mode 100644 index 000000000000..ae89a6559a67 --- /dev/null +++ b/test/files/pos/java-inherited-type/Test.java @@ -0,0 +1,30 @@ +public class Test { + static class OuterBase implements OuterBaseInterface { + static class StaticInner {} + class Inner {} + } + interface OuterBaseInterface { + interface InnerFromInterface {} + } + public static class Outer extends OuterBase { + public static class Nested { + public static P sig; // was: "type StaticInner", "not found: type Inner", "not found: type InnerFromInterface" + public static P sig1; // was: "type StaticInner is not a member of Test.Outer" + public static P sig2; + + } + public class Nested1 { + public P sig; // was: "not found: type StaticInner" + public P sig1; // was: "type StaticInner is not a member of Test.Outer" + public P sig2; + } + } + public class Outer1 extends OuterBase { + public class Nested1 { + public P sig; // was: "not found: type StaticInner" + public P sig1; // was: "type StaticInner is not a member of Test.Outer" + public P sig2; + } + } + public static class P{} +} diff --git a/test/files/pos/java-inherited-type1/J.java b/test/files/pos/java-inherited-type1/J.java new file mode 100644 index 000000000000..ba9963104699 --- /dev/null +++ b/test/files/pos/java-inherited-type1/J.java @@ -0,0 +1,9 @@ +class J extends S { + // These references all work in Javac because `object O { class I }` erases to `O$I` + + void select1(S1.Inner1 i) { new S1.Inner1(); } + void ident(Inner i) {} + + void ident1(Inner1 i) {} + void select(S.Inner i) { new S.Inner(); } +} diff --git a/test/files/pos/java-inherited-type1/S.scala b/test/files/pos/java-inherited-type1/S.scala new file mode 100644 index 000000000000..155efc0e06ba --- /dev/null +++ b/test/files/pos/java-inherited-type1/S.scala @@ -0,0 +1,9 @@ +class S extends S1 +object S { + class Inner +} + +class S1 +object S1 { + class Inner1 +} diff --git a/test/files/pos/java-inherited-type1/Test.scala b/test/files/pos/java-inherited-type1/Test.scala new file mode 100644 index 000000000000..75a171b592af --- /dev/null +++ b/test/files/pos/java-inherited-type1/Test.scala @@ -0,0 +1,8 @@ +object Test { + val j = new J + // force completion of these signatures + j.ident(null); + j.ident1(null); + j.select(null); + j.select1(null); +} diff --git a/test/files/run/t10490-2.check b/test/files/run/t10490-2.check new file mode 100644 index 000000000000..473ecde25dba --- /dev/null +++ b/test/files/run/t10490-2.check @@ -0,0 +1 @@ +Foo$Bar was instantiated! diff --git a/test/files/run/t10490-2/JavaClass.java b/test/files/run/t10490-2/JavaClass.java new file mode 100644 index 000000000000..9774c05a0d91 --- /dev/null +++ b/test/files/run/t10490-2/JavaClass.java @@ -0,0 +1,4 @@ +public class JavaClass { + // This is defined in ScalaClass + public static final Foo.Bar bar = new Foo.Bar(); +} \ No newline at end of file diff --git a/test/files/run/t10490-2/ScalaClass.scala b/test/files/run/t10490-2/ScalaClass.scala new file mode 100644 index 000000000000..0528133cbf2c --- /dev/null +++ b/test/files/run/t10490-2/ScalaClass.scala @@ -0,0 +1,18 @@ +/* Similar to t10490 -- but defines `Foo` in the object. + * Placing this test within t10490 makes it work without a fix, that's why it's independent. + * Note that this was already working, we add it to make sure we don't regress + */ + +class Foo +object Foo { + class Bar { + override def toString: String = "Foo$Bar was instantiated!" + } +} + +object Test { + def main(args: Array[String]): Unit = { + // JavaClass is the user of the Scala defined classes + println(JavaClass.bar) + } +} \ No newline at end of file diff --git a/test/files/run/t10490.check b/test/files/run/t10490.check new file mode 100644 index 000000000000..473ecde25dba --- /dev/null +++ b/test/files/run/t10490.check @@ -0,0 +1 @@ +Foo$Bar was instantiated! diff --git a/test/files/run/t10490/JavaClass.java b/test/files/run/t10490/JavaClass.java new file mode 100644 index 000000000000..08b9e0bd55d4 --- /dev/null +++ b/test/files/run/t10490/JavaClass.java @@ -0,0 +1,4 @@ +public class JavaClass { + // This is defined in ScalaClass + public static final Foo.Bar bar = (new Foo()).new Bar(); +} \ No newline at end of file diff --git a/test/files/run/t10490/ScalaClass.scala b/test/files/run/t10490/ScalaClass.scala new file mode 100644 index 000000000000..da3c682b5033 --- /dev/null +++ b/test/files/run/t10490/ScalaClass.scala @@ -0,0 +1,13 @@ +class Foo { + class Bar { + override def toString: String = "Foo$Bar was instantiated!" + } +} + +object Test { + def main(args: Array[String]): Unit = { + // JavaClass is the user of the Scala defined classes + println(JavaClass.bar) + //println(JavaClass.baz) + } +} \ No newline at end of file From d71fc28c007d9d660f920e70572cf2f581d1031c Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 12 Feb 2019 09:48:32 +0100 Subject: [PATCH 1604/2793] Un-deprecate eta-expansion of 0-ary methods --- .../scala/tools/nsc/typechecker/Typers.scala | 50 ++++++------------- test/files/neg/t7187-2.13.check | 6 --- test/files/neg/t7187-2.13.scala | 4 -- test/files/neg/t7187.check | 13 ----- test/files/run/t7187-2.13.scala | 7 --- 5 files changed, 15 insertions(+), 65 deletions(-) delete mode 100644 test/files/neg/t7187-2.13.check delete mode 100644 test/files/neg/t7187-2.13.scala delete mode 100644 test/files/run/t7187-2.13.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1fa8add09001..cc440700854c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -921,43 +921,23 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper else if (isFunctionType(pt) || (!mt.params.isEmpty && samOf(pt).exists)) { // scala/bug#9536 `!mt.params.isEmpty &&`: for backwards compatibility with 2.11, // we don't adapt a zero-arg method value to a SAM - // In 2.13, we won't do any eta-expansion for zero-arg methods, but we should deprecate first - - debuglog(s"eta-expanding $tree: ${tree.tpe} to $pt") checkParamsConvertible(tree, tree.tpe) - // method values (`m _`) are always eta-expanded (this syntax will disappear once we eta-expand regardless of expected type, at least for arity > 0) - // a "naked" method reference (`m`) may or not be eta expanded -- currently, this depends on the expected type and the arity (the conditions for this are in flux) - def isMethodValue = tree.getAndRemoveAttachment[MethodValueAttachment.type].isDefined - val nakedZeroAryMethod = mt.params.isEmpty && !isMethodValue - - // scala/bug#7187 eta-expansion of zero-arg method value is deprecated - // 2.13 will switch order of (4.3) and (4.2), always inserting () before attempting eta expansion - // (This effectively disables implicit eta-expansion of 0-ary methods.) - // See mind-bending stuff like scala/bug#9178 - if (nakedZeroAryMethod && settings.isScala213) emptyApplication - else { - // eventually, we will deprecate insertion of `()` (except for java-defined methods) -- this is already the case in dotty - // Once that's done, we can more aggressively eta-expand method references, even if they are 0-arity - // 2.13 will already eta-expand non-zero-arity methods regardless of expected type (whereas 2.12 requires a function-equivalent type) - if (nakedZeroAryMethod && settings.isScala212) { - currentRun.reporting.deprecationWarning(tree.pos, NoSymbol, - s"Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write ${Function(Nil, Apply(tree, Nil))}.", "2.12.0") - } - - val tree0 = etaExpand(context.unit, tree, this) - - // #2624: need to infer type arguments for eta expansion of a polymorphic method - // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand) - // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null - // can't type with the expected type, as we can't recreate the setup in (3) without calling typed - // (note that (3) does not call typed to do the polymorphic type instantiation -- - // it is called after the tree has been typed with a polymorphic expected result type) - if (hasUndets) - instantiate(typed(tree0, mode), mode, pt) - else - typed(tree0, mode, pt) - } + // We changed our mind on deprecating 0-arity eta expansion in https://github.com/scala/scala/pull/7660 + // For history on this, see scala/bug#7187, scala/bug#9178 + // We will deprecate insertion of `()` in 2.13 (except for java-defined methods) and remove it in 2.14 + // Once that's done, we can more aggressively eta-expand method references, even if they are 0-arity. + + val tree0 = etaExpand(context.unit, tree, this) + + // #2624: need to infer type arguments for eta expansion of a polymorphic method + // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand) + // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null + // can't type with the expected type, as we can't recreate the setup in (3) without calling typed + // (note that (3) does not call typed to do the polymorphic type instantiation -- + // it is called after the tree has been typed with a polymorphic expected result type) + if (hasUndets) instantiate(typed(tree0, mode), mode, pt) + else typed(tree0, mode, pt) } // (4.3) apply to empty argument list else if (mt.params.isEmpty) emptyApplication diff --git a/test/files/neg/t7187-2.13.check b/test/files/neg/t7187-2.13.check deleted file mode 100644 index e319077612d8..000000000000 --- a/test/files/neg/t7187-2.13.check +++ /dev/null @@ -1,6 +0,0 @@ -t7187-2.13.scala:3: error: type mismatch; - found : String - required: () => Any - val f: () => Any = foo - ^ -one error found diff --git a/test/files/neg/t7187-2.13.scala b/test/files/neg/t7187-2.13.scala deleted file mode 100644 index 6b458dbccbae..000000000000 --- a/test/files/neg/t7187-2.13.scala +++ /dev/null @@ -1,4 +0,0 @@ -class EtaExpandZeroArg { - def foo() = "" - val f: () => Any = foo -} diff --git a/test/files/neg/t7187.check b/test/files/neg/t7187.check index 7290256a5e6c..f6a03e81a615 100644 --- a/test/files/neg/t7187.check +++ b/test/files/neg/t7187.check @@ -1,6 +1,3 @@ -t7187.scala:4: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.foo()). - val t1b: () => Any = foo // eta-expansion (deprecated) in 2.12, `()`-insertion in 2.13 - ^ t7187.scala:8: error: _ must follow method; cannot follow () => String val t1f: Any = foo() _ // error: _ must follow method ^ @@ -17,17 +14,7 @@ t7187.scala:15: error: not enough arguments for method apply: (index: Int)Char i Unspecified value parameter index. val t2e: Any = bar() _ // error: not enough arguments for method apply ^ -t7187.scala:18: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.baz()). - val t3a: () => Any = baz // eta-expansion (deprecated) in 2.12, error in 2.13 - ^ t7187.scala:21: error: _ must follow method; cannot follow String val t3d: Any = baz() _ // error: _ must follow method ^ -t7187.scala:24: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.zap()). - val t4a: () => Any = zap // eta-expansion (deprecated) in 2.12, error in 2.13 - ^ -t7187.scala:25: warning: Eta-expansion of zero-argument methods is deprecated. To avoid this warning, write (() => EtaExpandZeroArg.this.zap()()). - val t4b: () => Any = zap() // ditto - ^ -four warnings found 5 errors found diff --git a/test/files/run/t7187-2.13.scala b/test/files/run/t7187-2.13.scala deleted file mode 100644 index e6e2dd9cd654..000000000000 --- a/test/files/run/t7187-2.13.scala +++ /dev/null @@ -1,7 +0,0 @@ -object Test { - def foo(): () => String = () => "" - val f: () => Any = foo - def main(args: Array[String]): Unit = { - assert(f() == "") - } -} From 628cd44be94fb3d4b2ed835a21ae0314c43d81e3 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 13 Feb 2019 17:51:10 -0800 Subject: [PATCH 1605/2793] fix regression in handling of local `synchronized` methods references scala/bug#11331 (the original bug) references scala/scala#7593 (PR that fixed the bug but also caused a regression) --- src/compiler/scala/tools/nsc/transform/Delambdafy.scala | 4 +++- test/files/run/pr7593.check | 1 + test/files/run/pr7593.scala | 7 +++++++ 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 test/files/run/pr7593.check create mode 100644 test/files/run/pr7593.scala diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 1bfef5aff429..4957ac5e89b4 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -307,7 +307,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // scala/bug#9390 emit lifted methods that don't require a `this` reference as STATIC // delambdafy targets are excluded as they are made static by `transformFunction`. // a synchronized method cannot be static (`methodReferencesThis` will not see the implicit this reference due to `this.synchronized`) - if (!dd.symbol.hasFlag(STATIC | SYNCHRONIZED) && !methodReferencesThis(dd.symbol)) { + if (!dd.symbol.hasFlag(STATIC) && !methodReferencesThis(dd.symbol)) { dd.symbol.setFlag(STATIC) dd.symbol.removeAttachment[mixer.NeedStaticImpl.type] } @@ -392,6 +392,8 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre private var currentMethod: Symbol = NoSymbol override def traverse(tree: Tree) = tree match { + case _: DefDef if tree.symbol.hasFlag(SYNCHRONIZED) => + thisReferringMethods add tree.symbol case DefDef(_, _, _, _, _, _) if tree.symbol.isDelambdafyTarget || tree.symbol.isLiftedMethod => // we don't expect defs within defs. At this phase trees should be very flat if (currentMethod.exists) devWarning("Found a def within a def at a phase where defs are expected to be flattened out.") diff --git a/test/files/run/pr7593.check b/test/files/run/pr7593.check new file mode 100644 index 000000000000..5716ca5987cb --- /dev/null +++ b/test/files/run/pr7593.check @@ -0,0 +1 @@ +bar diff --git a/test/files/run/pr7593.scala b/test/files/run/pr7593.scala new file mode 100644 index 000000000000..eac03abf8673 --- /dev/null +++ b/test/files/run/pr7593.scala @@ -0,0 +1,7 @@ +object Test { + def main(args: Array[String]): Unit = { + def foo = synchronized { "bar" } + val eta = foo _ + println(eta()) + } +} From 7b648b3a732aac5cefb22011b0198e007a6c4384 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 14 Feb 2019 14:52:23 +1000 Subject: [PATCH 1606/2793] Make inferred packedType-s determistic in bytecode Prior to this patch, the enclosed test failed with: ``` --- a/C.class.scalap +++ b/C.class.scalap @@ -1,4 +1,4 @@ class C extends scala.AnyRef { def this() = { /* compiled code */ } - def foo: scala.Tuple2[scala.Tuple5[A.type, B.type, C.type, D.type, E.type], scala.Tuple5[A.type, B.type, C.type, D.type, E.type]] forSome {type C.type <: scala.AnyRef with scala.Singleton; type E.type <: scala.AnyRef with scala.Singleton; type D.type <: scala.AnyRef with scala.Singleton; type A.type <: scala.AnyRef with scala.Singleton; type B.type <: scala.AnyRef with scala.Singleton} = { /* compiled code */ } + def foo: scala.Tuple2[scala.Tuple5[A.type, B.type, C.type, D.type, E.type], scala.Tuple5[A.type, B.type, C.type, D.type, E.type]] forSome {type C.type <: scala.AnyRef with scala.Singleton; type B.type <: scala.AnyRef with scala.Singleton; type E.type <: scala.AnyRef with scala.Singleton; type A.type <: scala.AnyRef with scala.Singleton; type D.type <: scala.AnyRef with scala.Singleton} = { /* compiled code */ } } ``` --- .../scala/tools/nsc/typechecker/Typers.scala | 4 ++-- test/junit/scala/tools/nsc/DeterminismTest.scala | 13 +++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 1fa8add09001..05f4a25b2f12 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4063,8 +4063,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper while (o != owner && o != NoSymbol && !o.hasPackageFlag) o = o.owner o == owner && !isVisibleParameter(sym) } - var localSyms = immutable.Set[Symbol]() - var boundSyms = immutable.Set[Symbol]() + val localSyms = mutable.LinkedHashSet[Symbol]() + val boundSyms = mutable.LinkedHashSet[Symbol]() def isLocal(sym: Symbol): Boolean = if (sym == NoSymbol || sym.isRefinementClass || sym.isLocalDummy) false else if (owner == NoSymbol) tree exists (defines(_, sym)) diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index fabd2eb9e87f..9f79709cca58 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -263,6 +263,19 @@ class DeterminismTest { test(List(javaAnnots) :: code :: Nil) } + @Test def testPackedType(): Unit = { + def code = List[SourceFile]( + source("a.scala", + """ + | class C { + | def foo = { object A; object B; object C; object D; object E; object F; def foo[A](a: A) = (a, a); foo((A, B, C, D, E))} + | } + | + """.stripMargin) + ) + test(List(code)) + } + def source(name: String, code: String): SourceFile = new BatchSourceFile(name, code) private def test(groups: List[List[SourceFile]]): Unit = { val referenceOutput = Files.createTempDirectory("reference") From 7c4747703fec3f6015941b4f867c7c58ed5d4fdd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 17 Feb 2019 15:37:11 +1000 Subject: [PATCH 1607/2793] Support scalac -release on JDK 12+ The ct.sym file now contains the module name in the paths. --- .../scala/tools/nsc/classpath/DirectoryClassPath.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 9f51672e79a6..44a67ab5d849 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -247,9 +247,11 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas // e.g. "java.lang" -> Seq(/876/java/lang, /87/java/lang, /8/java/lang)) private val packageIndex: scala.collection.Map[String, Seq[Path]] = { val index = collection.mutable.AnyRefMap[String, collection.mutable.ListBuffer[Path]]() + val isJava12OrHigher = scala.util.Properties.isJavaAtLeast("12") rootsForRelease.foreach(root => Files.walk(root).iterator().asScala.filter(Files.isDirectory(_)).foreach { p => - if (p.getNameCount > 1) { - val packageDotted = p.subpath(1, p.getNameCount).toString.replace('/', '.') + val moduleNamePathElementCount = if (isJava12OrHigher) 1 else 0 + if (p.getNameCount > root.getNameCount + moduleNamePathElementCount) { + val packageDotted = p.subpath(moduleNamePathElementCount + root.getNameCount, p.getNameCount).toString.replace('/', '.') index.getOrElseUpdate(packageDotted, new collection.mutable.ListBuffer) += p } }) From ccdf5ffa5fd0c771ad2a117ff4a1e32dcd4f3a5e Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 19 Feb 2019 10:13:53 +1000 Subject: [PATCH 1608/2793] Remove duplicated check for unit.isJava Co-Authored-By: retronym --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ce298b78f3d7..0a5f69880cd5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1759,7 +1759,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper checkStablePrefixClassType(parent) if (psym != superclazz) { - if (context.unit.isJava && context.unit.isJava && psym.isJavaAnnotation) { + if (context.unit.isJava && psym.isJavaAnnotation) { // allowed } else if (psym.isTrait) { val ps = psym.info.parents From cfbb2a2d16846f1f366a8596fbaba1c5f62e5ced Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:03:09 +1000 Subject: [PATCH 1609/2793] [backport] Resource management for macro/plugin classloaders, classpath JARs Backports: - https://github.com/scala/scala/pull/7366 - https://github.com/scala/scala/pull/7644 --- .../macros/runtime/MacroRuntimes.scala | 13 +- .../scala/tools/nsc/CloseableRegistry.scala | 34 +++++ .../tools/nsc/GenericRunnerSettings.scala | 9 +- src/compiler/scala/tools/nsc/Global.scala | 13 +- .../tools/nsc/backend/JavaPlatform.scala | 2 +- .../nsc/classpath/ClassPathFactory.scala | 12 +- .../nsc/classpath/DirectoryClassPath.scala | 16 +- .../ZipAndJarFileLookupFactory.scala | 139 +++++++++++++++--- .../nsc/classpath/ZipArchiveFileLookup.scala | 6 +- .../scala/tools/nsc/plugins/Plugin.scala | 75 ++-------- .../scala/tools/nsc/plugins/Plugins.scala | 80 +++++++++- .../tools/nsc/settings/ScalaSettings.scala | 4 +- .../scala/tools/nsc/typechecker/Macros.scala | 48 +----- .../scala/tools/reflect/ReflectGlobal.scala | 20 +-- .../scala/tools/reflect/ReflectMain.scala | 6 +- .../scala/tools/util/PathResolver.scala | 30 ++-- .../scala/tools/partest/BytecodeTest.scala | 3 +- .../scala/reflect/internal/SymbolTable.scala | 18 +++ src/reflect/scala/reflect/io/ZipArchive.scala | 17 +++ .../scala/tools/nsc/interpreter/IMain.scala | 8 +- .../interpreter/PresentationCompilation.scala | 11 +- .../tools/nsc/interpreter/ReplGlobal.scala | 17 +-- src/scalap/scala/tools/scalap/Main.scala | 29 ++-- .../nsc/GlobalCustomizeClassloaderTest.scala | 72 +++++++++ .../nsc/classpath/JrtClassPathTest.scala | 11 +- .../nsc/classpath/PathResolverBaseTest.scala | 6 +- .../ZipAndJarFileLookupFactoryTest.scala | 8 +- .../symtab/SymbolTableForUnitTesting.scala | 2 +- 28 files changed, 486 insertions(+), 223 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/CloseableRegistry.scala create mode 100644 test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 73520dffb925..557385744ac2 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -54,19 +54,8 @@ trait MacroRuntimes extends JavaReflectionRuntimes { /** Macro classloader that is used to resolve and run macro implementations. * Loads classes from from -cp (aka the library classpath). * Is also capable of detecting REPL and reusing its classloader. - * - * When -Xmacro-jit is enabled, we sometimes fallback to on-the-fly compilation of macro implementations, - * which compiles implementations into a virtual directory (very much like REPL does) and then conjures - * a classloader mapped to that virtual directory. */ - private lazy val defaultMacroClassloaderCache = { - def attemptClose(loader: ClassLoader): Unit = loader match { - case u: URLClassLoader => debuglog("Closing macro runtime classloader"); u.close() - case afcl: AbstractFileClassLoader => attemptClose(afcl.getParent) - case _ => ??? - } - perRunCaches.newGeneric(findMacroClassLoader, attemptClose _) - } + private lazy val defaultMacroClassloaderCache: () => ClassLoader = perRunCaches.newGeneric(findMacroClassLoader()) def defaultMacroClassloader: ClassLoader = defaultMacroClassloaderCache() /** Abstracts away resolution of macro runtimes. diff --git a/src/compiler/scala/tools/nsc/CloseableRegistry.scala b/src/compiler/scala/tools/nsc/CloseableRegistry.scala new file mode 100644 index 000000000000..9812a2136263 --- /dev/null +++ b/src/compiler/scala/tools/nsc/CloseableRegistry.scala @@ -0,0 +1,34 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc + +import scala.util.control.NonFatal + +/** Registry for resources to close when `Global` is closed */ +final class CloseableRegistry { + private[this] var closeables: List[java.io.Closeable] = Nil + final def registerClosable(c: java.io.Closeable): Unit = { + closeables ::= c + } + + def close(): Unit = { + for (c <- closeables) { + try { + c.close() + } catch { + case NonFatal(_) => + } + } + closeables = Nil + } +} diff --git a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala index cb26b4d9d666..fcc829b2e64d 100644 --- a/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala +++ b/src/compiler/scala/tools/nsc/GenericRunnerSettings.scala @@ -16,7 +16,14 @@ import java.net.URL import scala.tools.util.PathResolver class GenericRunnerSettings(error: String => Unit) extends Settings(error) { - lazy val classpathURLs: Seq[URL] = new PathResolver(this).resultAsURLs + lazy val classpathURLs: Seq[URL] = { + val registry = new CloseableRegistry + try { + new PathResolver(this, registry).resultAsURLs + } finally { + registry.close() + } + } val howtorun = ChoiceSetting( diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 93fd46d01887..8b578c8ba9ea 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -40,9 +40,11 @@ import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} import scala.tools.nsc.classpath._ import scala.tools.nsc.profile.Profiler +import java.io.Closeable class Global(var currentSettings: Settings, reporter0: Reporter) extends SymbolTable + with Closeable with CompilationUnits with Plugins with PhaseAssembly @@ -817,7 +819,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) /** Extend classpath of `platform` and rescan updated packages. */ def extendCompilerClassPath(urls: URL*): Unit = { - val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings)) + val urlClasspaths = urls.map(u => ClassPathFactory.newClassPath(AbstractFile.getURL(u), settings, closeableRegistry)) val newClassPath = AggregateClassPath.createAggregate(platform.classPath +: urlClasspaths : _*) platform.currentClassPath = Some(newClassPath) invalidateClassPathEntries(urls.map(_.getPath): _*) @@ -879,7 +881,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } entries(classPath) find matchesCanonical match { case Some(oldEntry) => - Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings)) + Some(oldEntry -> ClassPathFactory.newClassPath(dir, settings, closeableRegistry)) case None => error(s"Error adding entry to classpath. During invalidation, no entry named $path in classpath $classPath") None @@ -1706,6 +1708,13 @@ class Global(var currentSettings: Settings, reporter0: Reporter) } def createJavadoc = false + + final val closeableRegistry: CloseableRegistry = new CloseableRegistry + + def close(): Unit = { + perRunCaches.clearAll() + closeableRegistry.close() + } } object Global { diff --git a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala index 2d609dcb17a6..9cbdf1dcadab 100644 --- a/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala +++ b/src/compiler/scala/tools/nsc/backend/JavaPlatform.scala @@ -27,7 +27,7 @@ trait JavaPlatform extends Platform { private[nsc] var currentClassPath: Option[ClassPath] = None protected[nsc] def classPath: ClassPath = { - if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings).result) + if (currentClassPath.isEmpty) currentClassPath = Some(new PathResolver(settings, global.closeableRegistry).result) currentClassPath.get } diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala index fa9166483594..f2fb2b0224d7 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -14,7 +14,7 @@ package scala.tools.nsc.classpath import scala.reflect.io.{AbstractFile, VirtualDirectory} import scala.reflect.io.Path.string2path -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils.AbstractFileOps import scala.tools.nsc.util.ClassPath @@ -22,11 +22,11 @@ import scala.tools.nsc.util.ClassPath * Provides factory methods for classpath. When creating classpath instances for a given path, * it uses proper type of classpath depending on a types of particular files containing sources or classes. */ -class ClassPathFactory(settings: Settings) { +class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry) { /** * Create a new classpath based on the abstract file. */ - def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings) + def newClassPath(file: AbstractFile): ClassPath = ClassPathFactory.newClassPath(file, settings, closeableRegistry) /** * Creators for sub classpaths which preserve this context. @@ -70,7 +70,7 @@ class ClassPathFactory(settings: Settings) { private def createSourcePath(file: AbstractFile): ClassPath = if (file.isJarOrZip) - ZipAndJarSourcePathFactory.create(file, settings) + ZipAndJarSourcePathFactory.create(file, settings, closeableRegistry) else if (file.isDirectory) DirectorySourcePath(file.file) else @@ -78,11 +78,11 @@ class ClassPathFactory(settings: Settings) { } object ClassPathFactory { - def newClassPath(file: AbstractFile, settings: Settings): ClassPath = file match { + def newClassPath(file: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = file match { case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) case _ => if (file.isJarOrZip) - ZipAndJarClassPathFactory.create(file, settings) + ZipAndJarClassPathFactory.create(file, settings, closeableRegistry) else if (file.isDirectory) DirectoryClassPath(file.file) else diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 44a67ab5d849..96a4b51e9f0c 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.{URI, URL} import java.nio.file.{FileSystems, Files, SimpleFileVisitor} import java.util.function.IntFunction @@ -25,6 +25,7 @@ import FileUtils._ import scala.collection.JavaConverters._ import scala.collection.immutable import scala.reflect.internal.JDK9Reflectors +import scala.tools.nsc.CloseableRegistry import scala.tools.nsc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} /** @@ -61,6 +62,7 @@ trait DirectoryLookup[FileEntryType <: ClassRepresentation] extends ClassPath { private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val dirForPackage = getDirectory(inPackage) + val nestedDirs: Array[F] = dirForPackage match { case None => emptyFiles case Some(directory) => listChildren(directory, Some(isPackage)) @@ -137,7 +139,7 @@ trait JFileDirectoryLookup[FileEntryType <: ClassRepresentation] extends Directo object JrtClassPath { import java.nio.file._, java.net.URI - def apply(release: Option[String]): Option[ClassPath] = { + def apply(release: Option[String], closeableRegistry: CloseableRegistry): Option[ClassPath] = { import scala.util.Properties._ if (!isJavaAtLeast("9")) None else { @@ -154,7 +156,11 @@ object JrtClassPath { try { val ctSym = Paths.get(javaHome).resolve("lib").resolve("ct.sym") if (Files.notExists(ctSym)) None - else Some(new CtSymClassPath(ctSym, v.toInt)) + else { + val classPath = new CtSymClassPath(ctSym, v.toInt) + closeableRegistry.registerClosable(classPath) + Some(classPath) + } } catch { case _: Throwable => None } @@ -230,7 +236,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No /** * Implementation `ClassPath` based on the $JAVA_HOME/lib/ct.sym backing http://openjdk.java.net/jeps/247 */ -final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths { +final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends ClassPath with NoSourcePaths with Closeable { import java.nio.file.Path, java.nio.file._ private val fileSystem: FileSystem = FileSystems.newFileSystem(ctSym, null) @@ -278,7 +284,7 @@ final class CtSymClassPath(ctSym: java.nio.file.Path, release: Int) extends Clas def asURLs: Seq[URL] = Nil def asClassPathStrings: Seq[String] = Nil - + override def close(): Unit = fileSystem.close() def findClassFile(className: String): Option[AbstractFile] = { if (!className.contains(".")) None else { diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 6f8b9a55c0cd..acb41185353e 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -12,16 +12,19 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.URL import java.nio.file.Files import java.nio.file.attribute.{BasicFileAttributes, FileTime} +import java.util.{Timer, TimerTask} +import java.util.concurrent.atomic.AtomicInteger import scala.annotation.tailrec import scala.reflect.io.{AbstractFile, FileZipArchive, ManifestResources} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import FileUtils._ +import scala.tools.nsc.io.Jar /** * A trait providing an optional cache for classpath entries obtained from zip and jar files. @@ -29,18 +32,20 @@ import FileUtils._ * when there are a lot of projects having a lot of common dependencies. */ sealed trait ZipAndJarFileLookupFactory { - private val cache = new FileBasedCache[ClassPath] - - def create(zipFile: AbstractFile, settings: Settings): ClassPath = { - if (settings.YdisableFlatCpCaching || zipFile.file == null) createForZipFile(zipFile, settings.releaseValue) - else createUsingCache(zipFile, settings) + private val cache = new FileBasedCache[ClassPath with Closeable] + + def create(zipFile: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = { + cache.checkCacheability(zipFile.toURL :: Nil, checkStamps = true, disableCache = settings.YdisableFlatCpCaching.value || zipFile.file == null) match { + case Left(_) => + val result: ClassPath with Closeable = createForZipFile(zipFile, settings.releaseValue) + closeableRegistry.registerClosable(result) + result + case Right(Seq(path)) => + cache.getOrCreate(List(path), () => createForZipFile(zipFile, settings.releaseValue), closeableRegistry, checkStamps = true) + } } - protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath - - private def createUsingCache(zipFile: AbstractFile, settings: Settings): ClassPath = { - cache.getOrCreate(List(zipFile.file.toPath), () => createForZipFile(zipFile, settings.releaseValue)) - } + protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable } /** @@ -75,7 +80,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { * with a particularly prepared scala-library.jar. It should have all classes listed in the manifest like e.g. this entry: * Name: scala/Function2$mcFJD$sp.class */ - private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths { + private case class ManifestResourcesClassPath(file: ManifestResources) extends ClassPath with NoSourcePaths with Closeable { override def findClassFile(className: String): Option[AbstractFile] = { val (pkg, simpleClassName) = PackageNameUtils.separatePkgAndClassNames(className) classes(pkg).find(_.name == simpleClassName).map(_.file) @@ -84,6 +89,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { override def asClassPathStrings: Seq[String] = Seq(file.path) override def asURLs: Seq[URL] = file.toURLs() + override def close(): Unit = file.close() import ManifestResourcesClassPath.PackageFileInfo import ManifestResourcesClassPath.PackageInfo @@ -152,7 +158,7 @@ object ZipAndJarClassPathFactory extends ZipAndJarFileLookupFactory { case class PackageInfo(packageName: String, subpackages: List[AbstractFile]) } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = if (zipFile.file == null) createWithoutUnderlyingFile(zipFile) else ZipArchiveClassPath(zipFile.file, release) @@ -183,28 +189,107 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { override protected def isRequiredFileType(file: AbstractFile): Boolean = file.isScalaOrJavaSource } - override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath = ZipArchiveSourcePath(zipFile.file) + override protected def createForZipFile(zipFile: AbstractFile, release: Option[String]): ClassPath with Closeable = ZipArchiveSourcePath(zipFile.file) } final class FileBasedCache[T] { import java.nio.file.Path private case class Stamp(lastModified: FileTime, fileKey: Object) - private val cache = collection.mutable.Map.empty[Seq[Path], (Seq[Stamp], T)] + private case class Entry(stamps: Seq[Stamp], t: T) { + val referenceCount: AtomicInteger = new AtomicInteger(1) + } + private val cache = collection.mutable.Map.empty[Seq[Path], Entry] + + private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = new Closeable { + var closed = false + override def close(): Unit = { + if (!closed) { + closed = true + val count = e.referenceCount.decrementAndGet() + if (count == 0) { + e.t match { + case cl: Closeable => + FileBasedCache.timer match { + case Some(timer) => + val task = new TimerTask { + override def run(): Unit = { + cache.synchronized { + if (e.referenceCount.compareAndSet(0, -1)) { + cache.remove(paths) + cl.close() + } + } + } + } + timer.schedule(task, FileBasedCache.deferCloseMs.toLong) + case None => + cl.close() + } + case _ => + } + } + } + } + } - def getOrCreate(paths: Seq[Path], create: () => T): T = cache.synchronized { - val stamps = paths.map { path => + def checkCacheability(urls: Seq[URL], checkStamps: Boolean, disableCache: Boolean): Either[String, Seq[java.nio.file.Path]] = { + import scala.reflect.io.{AbstractFile, Path} + lazy val urlsAndFiles = urls.filterNot(_.getProtocol == "jrt").map(u => u -> AbstractFile.getURL(u)) + lazy val paths = urlsAndFiles.map(t => Path(t._2.file).jfile.toPath) + if (!checkStamps) Right(paths) + else if (disableCache) Left("caching is disabled due to a policy setting") + else { + val nonJarZips = urlsAndFiles.filter { case (url, file) => file == null || !Jar.isJarOrZip(file.file) } + if (nonJarZips.nonEmpty) Left(s"caching is disabled because of the following classpath elements: ${nonJarZips.map(_._1).mkString(", ")}.") + else Right(paths) + } + } + + def getOrCreate(paths: Seq[Path], create: () => T, closeableRegistry: CloseableRegistry, checkStamps: Boolean): T = cache.synchronized { + val stamps = if (!checkStamps) Nil else paths.map { path => + try { val attrs = Files.readAttributes(path, classOf[BasicFileAttributes]) val lastModified = attrs.lastModifiedTime() // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp val fileKey = attrs.fileKey() Stamp(lastModified, fileKey) + } catch { + case ex: java.nio.file.NoSuchFileException => + // Dummy stamp for (currently) non-existent file. + Stamp(FileTime.fromMillis(0), new Object) + } } cache.get(paths) match { - case Some((cachedStamps, cached)) if cachedStamps == stamps => cached + case Some(e@Entry(cachedStamps, cached)) => + if (!checkStamps || cachedStamps == stamps) { + // Cache hit + val count = e.referenceCount.incrementAndGet() + assert(count > 0, (stamps, count)) + closeableRegistry.registerClosable(referenceCountDecrementer(e, paths)) + cached + } else { + // Cache miss: we found an entry but the underlying files have been modified + cached match { + case c: Closeable => + if (e.referenceCount.get() == 0) { + c.close() + } else { + // TODO: What do do here? Maybe add to a list of closeables polled by a cleanup thread? + } + } + val value = create() + val entry = Entry(stamps, value) + cache.put(paths, entry) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) + value + } case _ => + // Cache miss val value = create() - cache.put(paths, (stamps, value)) + val entry = Entry(stamps, value) + cache.put(paths, entry) + closeableRegistry.registerClosable(referenceCountDecrementer(entry, paths)) value } } @@ -215,3 +300,17 @@ final class FileBasedCache[T] { cache.clear() } } + +object FileBasedCache { + // The tension here is that too long a delay could lead to an error (on Windows) with an inability + // to overwrite the JAR. To short a delay and the entry could be evicted before a subsequent + // sub-project compilation is able to get a cache hit. A more comprehensive solution would be to + // involve build tools in the policy: they could close entries with refcount of zero when that + // entry's JAR is about to be overwritten. + private val deferCloseMs = Integer.getInteger("scalac.filebasedcache.defer.close.ms", 1000) + private val timer: Option[Timer] = { + if (deferCloseMs > 0) + Some(new java.util.Timer(true)) + else None + } +} diff --git a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala index 32ec4cde4485..c658d4c01664 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipArchiveFileLookup.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.classpath -import java.io.File +import java.io.{Closeable, File} import java.net.URL import scala.collection.Seq import scala.reflect.io.AbstractFile @@ -25,7 +25,7 @@ import scala.tools.nsc.util.{ClassPath, ClassRepresentation} * It provides common logic for classes handling class and source files. * It's aware of things like e.g. META-INF directory which is correctly skipped. */ -trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath { +trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPath with Closeable { val zipFile: File def release: Option[String] @@ -33,8 +33,8 @@ trait ZipArchiveFileLookup[FileEntryType <: ClassRepresentation] extends ClassPa override def asURLs: Seq[URL] = Seq(zipFile.toURI.toURL) override def asClassPathStrings: Seq[String] = Seq(zipFile.getPath) - private val archive = new FileZipArchive(zipFile, release) + override def close(): Unit = archive.close() override private[nsc] def packages(inPackage: String): Seq[PackageEntry] = { val prefix = PackageNameUtils.packagePrefix(inPackage) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index b76f67ccf6aa..9c0f2db89446 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -93,40 +93,7 @@ object Plugin { private val PluginXML = "scalac-plugin.xml" - private val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader]() - - /** Create a class loader with the specified locations plus - * the loader that loaded the Scala compiler. - * - * If the class loader has already been created before and the - * file stamps are the same, the previous loader is returned to - * mitigate the cost of dynamic classloading as it has been - * measured in https://github.com/scala/scala-dev/issues/458. - */ - private def loaderFor(locations: Seq[Path], disableCache: Boolean): ScalaClassLoader = { - def newLoader = () => { - val compilerLoader = classOf[Plugin].getClassLoader - val urls = locations map (_.toURL) - ScalaClassLoader fromURLs (urls, compilerLoader) - } - - if (disableCache || locations.exists(!Jar.isJarOrZip(_))) newLoader() - else pluginClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) - } - - /** Try to load a plugin description from the specified location. - */ - private def loadDescriptionFromJar(jarp: Path): Try[PluginDescription] = { - // XXX Return to this once we have more ARM support - def read(is: Option[InputStream]) = is match { - case None => throw new PluginLoadException(jarp.path, s"Missing $PluginXML in $jarp") - case Some(is) => PluginDescription.fromXML(is) - } - Try(new Jar(jarp.jfile).withEntryStream(PluginXML)(read)) - } - - private def loadDescriptionFromFile(f: Path): Try[PluginDescription] = - Try(PluginDescription.fromXML(new java.io.FileInputStream(f.jfile))) + private[nsc] val pluginClassLoadersCache = new FileBasedCache[ScalaClassLoader.URLClassLoader]() type AnyClass = Class[_] @@ -155,40 +122,26 @@ object Plugin { paths: List[List[Path]], dirs: List[Path], ignoring: List[String], - disableClassLoaderCache: Boolean): List[Try[AnyClass]] = + findPluginClassloader: (Seq[Path] => ClassLoader)): List[Try[AnyClass]] = { - // List[(jar, Try(descriptor))] in dir - def scan(d: Directory) = - d.files.toList sortBy (_.name) filter (Jar isJarOrZip _) map (j => (j, loadDescriptionFromJar(j))) - type PDResults = List[Try[(PluginDescription, ScalaClassLoader)]] - // scan plugin dirs for jars containing plugins, ignoring dirs with none and other jars - val fromDirs: PDResults = dirs filter (_.isDirectory) flatMap { d => - scan(d.toDirectory) collect { - case (j, Success(pd)) => Success((pd, loaderFor(Seq(j), disableClassLoaderCache))) + val fromLoaders = paths.map {path => + val loader = findPluginClassloader(path) + loader.getResource(PluginXML) match { + case null => Failure(new MissingPluginException(path)) + case url => + val inputStream = url.openStream + try { + Try((PluginDescription.fromXML(inputStream), loader)) + } finally { + inputStream.close() + } } } - // scan jar paths for plugins, taking the first plugin you find. - // a path element can be either a plugin.jar or an exploded dir. - def findDescriptor(ps: List[Path]) = { - def loop(qs: List[Path]): Try[PluginDescription] = qs match { - case Nil => Failure(new MissingPluginException(ps)) - case p :: rest => - if (p.isDirectory) loadDescriptionFromFile(p.toDirectory / PluginXML) orElse loop(rest) - else if (p.isFile) loadDescriptionFromJar(p.toFile) orElse loop(rest) - else loop(rest) - } - loop(ps) - } - val fromPaths: PDResults = paths map (p => (p, findDescriptor(p))) map { - case (p, Success(pd)) => Success((pd, loaderFor(p, disableClassLoaderCache))) - case (_, Failure(e)) => Failure(e) - } - val seen = mutable.HashSet[String]() - val enabled = (fromPaths ::: fromDirs) map { + val enabled = fromLoaders map { case Success((pd, loader)) if seen(pd.classname) => // a nod to scala/bug#7494, take the plugin classes distinctly Failure(new PluginLoadException(pd.name, s"Ignoring duplicate plugin ${pd.name} (${pd.classname})")) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index bba855ba541a..386bdc4ab1a8 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -13,7 +13,14 @@ package scala.tools.nsc package plugins +import java.net.URL + +import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.Path +import scala.tools.nsc +import scala.tools.nsc.io.Jar +import scala.tools.nsc.plugins.Plugin.pluginClassLoadersCache +import scala.tools.nsc.typechecker.Macros import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults @@ -37,7 +44,7 @@ trait Plugins { global: Global => def injectDefault(s: String) = if (s.isEmpty) Defaults.scalaPluginPath else s asPath(settings.pluginsDir.value) map injectDefault map Path.apply } - val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, settings.YcachePluginClassLoader.value == settings.CachePolicy.None.name) + val maybes = Plugin.loadAllFrom(paths, dirs, settings.disable.value, findPluginClassLoader(_)) val (goods, errors) = maybes partition (_.isSuccess) // Explicit parameterization of recover to avoid -Xlint warning about inferred Any errors foreach (_.recover[Any] { @@ -53,6 +60,43 @@ trait Plugins { global: Global => classes map (Plugin.instantiate(_, this)) } + /** + * Locate or create the classloader to load a compiler plugin with `classpath`. + * + * Subclasses may override to customise the behaviour. + * + * @param classpath + * @return + */ + protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { + val policy = settings.YcachePluginClassLoader.value + val disableCache = policy == settings.CachePolicy.None.name + def newLoader = () => { + val compilerLoader = classOf[Plugin].getClassLoader + val urls = classpath map (_.toURL) + ScalaClassLoader fromURLs (urls, compilerLoader) + } + + // Create a class loader with the specified locations plus + // the loader that loaded the Scala compiler. + // + // If the class loader has already been created before and the + // file stamps are the same, the previous loader is returned to + // mitigate the cost of dynamic classloading as it has been + // measured in https://github.com/scala/scala-dev/issues/458. + + val cache = pluginClassLoadersCache + val checkStamps = policy == settings.CachePolicy.LastModified.name + cache.checkCacheability(classpath.map(_.toURL), checkStamps, disableCache) match { + case Left(msg) => + val loader = newLoader() + closeableRegistry.registerClosable(loader) + loader + case Right(paths) => + cache.getOrCreate(classpath.map(_.jfile.toPath()), newLoader, closeableRegistry, checkStamps) + } + } + protected lazy val roughPluginsList: List[Plugin] = loadRoughPluginsList() /** Load all available plugins. Skips plugins that @@ -123,4 +167,38 @@ trait Plugins { global: Global => (for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield { "\nOptions for plugin '%s':\n%s\n".format(plug.name, help) }).mkString + + /** Obtains a `ClassLoader` instance used for macro expansion. + * + * By default a new `ScalaClassLoader` is created using the classpath + * from global and the classloader of self as parent. + * + * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. + */ + protected[scala] def findMacroClassLoader(): ClassLoader = { + val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { + for { + file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) + af <- Option(nsc.io.AbstractFile getDirectory file) + } yield af.file.toURI.toURL + } else global.classPath.asURLs + def newLoader: () => ScalaClassLoader.URLClassLoader = () => { + analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) + ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) + } + + val policy = settings.YcacheMacroClassLoader.value + val cache = Macros.macroClassLoadersCache + val disableCache = policy == settings.CachePolicy.None.name + val checkStamps = policy == settings.CachePolicy.LastModified.name + cache.checkCacheability(classpath, checkStamps, disableCache) match { + case Left(msg) => + analyzer.macroLogVerbose(s"macro classloader: $msg.") + val loader = newLoader() + closeableRegistry.registerClosable(loader) + loader + case Right(paths) => + cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) + } + } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 5f46d0606710..f14a5cd68d8e 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -246,6 +246,7 @@ trait ScalaSettings extends AbsScalaSettings val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") + val YmacroClasspath = PathSetting ("-Ymacro-classpath", "The classpath used to reflectively load macro implementations, default is the compilation classpath.", "") val YaddBackendThreads = IntSetting ("-Ybackend-parallelism", "maximum worker threads for backend", 1, Some((1,16)), (x: String) => None ) val YmaxQueue = IntSetting ("-Ybackend-worker-queue", "backend threads worker queue size", 0, Some((0,1000)), (x: String) => None ) @@ -257,9 +258,10 @@ trait ScalaSettings extends AbsScalaSettings def setting(style: String, styleLong: String) = ChoiceSetting(s"-Ycache-$style-class-loader", "policy", s"Policy for caching class loaders for $styleLong that are dynamically loaded.", values.map(_.name), None.name, values.map(_.help)) object None extends CachePolicy("none", "Don't cache class loader") object LastModified extends CachePolicy("last-modified", "Cache class loader, using file last-modified time to invalidate") + object Always extends CachePolicy("always", "Cache class loader with no invalidation") // TODO Jorge to add new policy. Think about whether there is a benefit to the user on offering this as a separate policy or unifying with the previous one. // object ZipMetadata extends CachePolicy("zip-metadata", "Cache classloade, using file last-modified time, then ZIP file metadata to invalidate") - def values: List[CachePolicy] = List(None, LastModified) + def values: List[CachePolicy] = List(None, LastModified, Always) } object optChoices extends MultiChoiceEnumeration { diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 10382720089b..5d0e51cd2ea9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -64,49 +64,6 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings - private final val macroClassLoadersCache = - new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader]() - - /** Obtains a `ClassLoader` instance used for macro expansion. - * - * By default a new `ScalaClassLoader` is created using the classpath - * from global and the classloader of self as parent. - * - * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. - */ - protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs - def newLoader = () => { - macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, self.getClass.getClassLoader) - } - - val disableCache = settings.YcacheMacroClassLoader.value == settings.CachePolicy.None.name - if (disableCache) newLoader() - else { - import scala.tools.nsc.io.Jar - import scala.reflect.io.{AbstractFile, Path} - - val urlsAndFiles = classpath.map(u => u -> AbstractFile.getURL(u)) - val hasNullURL = urlsAndFiles.filter(_._2 eq null) - if (hasNullURL.nonEmpty) { - // TODO if the only null is jrt:// we can still cache - // TODO filter out classpath elements pointing to non-existing files before we get here, that's another source of null - macroLogVerbose(s"macro classloader: caching is disabled because `AbstractFile.getURL` returned `null` for ${hasNullURL.map(_._1).mkString(", ")}.") - newLoader() - } else { - val locations = urlsAndFiles.map(t => Path(t._2.file)) - val nonJarZips = locations.filterNot(Jar.isJarOrZip(_)) - if (nonJarZips.nonEmpty) { - macroLogVerbose(s"macro classloader: caching is disabled because the following paths are not supported: ${nonJarZips.mkString(",")}.") - newLoader() - } else { - macroClassLoadersCache.getOrCreate(locations.map(_.jfile.toPath()), newLoader) - } - } - } - } - /** `MacroImplBinding` and its companion module are responsible for * serialization/deserialization of macro def -> impl bindings. * @@ -974,6 +931,11 @@ trait Macros extends MacroRuntimes with Traces with Helpers { }.transform(expandee) } +object Macros { + final val macroClassLoadersCache = + new scala.tools.nsc.classpath.FileBasedCache[ScalaClassLoader.URLClassLoader]() +} + trait MacrosStats { self: TypesStats with Statistics => val macroExpandCount = newCounter ("#macro expansions", "typer") diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index e1cf834c6fb6..2efd699e9f44 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -25,18 +25,14 @@ import scala.tools.nsc.typechecker.Analyzer class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { - override lazy val analyzer = new { - val global: ReflectGlobal.this.type = ReflectGlobal.this - } with Analyzer { - /** Obtains the classLoader used for runtime macro expansion. - * - * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]]. - * The [[rootClassLoader]] is used to obtain runtime defined macros. - */ - override protected def findMacroClassLoader(): ClassLoader = { - val classpath = global.classPath.asURLs - ScalaClassLoader.fromURLs(classpath, rootClassLoader) - } + /** Obtains the classLoader used for runtime macro expansion. + * + * Macro expansion can use everything available in `global.classPath` or `rootClassLoader`. + * The `rootClassLoader` is used to obtain runtime defined macros. + */ + override protected[scala] def findMacroClassLoader(): ClassLoader = { + val classpath = classPath.asURLs + perRunCaches.recordClassloader(ScalaClassLoader.fromURLs(classpath, rootClassLoader)) } override def transformedType(sym: Symbol) = diff --git a/src/compiler/scala/tools/reflect/ReflectMain.scala b/src/compiler/scala/tools/reflect/ReflectMain.scala index 3abd5f390761..a290c6bfafc8 100644 --- a/src/compiler/scala/tools/reflect/ReflectMain.scala +++ b/src/compiler/scala/tools/reflect/ReflectMain.scala @@ -14,15 +14,13 @@ package scala.tools package reflect import scala.reflect.internal.util.ScalaClassLoader -import scala.tools.nsc.Driver -import scala.tools.nsc.Global -import scala.tools.nsc.Settings +import scala.tools.nsc.{Driver, Global, CloseableRegistry, Settings} import scala.tools.util.PathResolver object ReflectMain extends Driver { private def classloaderFromSettings(settings: Settings) = { - val classPathURLs = new PathResolver(settings).resultAsURLs + val classPathURLs = new PathResolver(settings, new CloseableRegistry).resultAsURLs ScalaClassLoader.fromURLs(classPathURLs, getClass.getClassLoader) } diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index 1ad471e40f8b..cf454d5854f8 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -15,8 +15,9 @@ package tools package util import java.net.URL + import scala.tools.reflect.WrappedProperties.AccessControl -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.util.ClassPath import scala.reflect.io.{Directory, File, Path} import PartialFunction.condOpt @@ -189,19 +190,24 @@ object PathResolver { } else { val settings = new Settings() val rest = settings.processArguments(args.toList, processAll = false)._2 - val pr = new PathResolver(settings) - println("COMMAND: 'scala %s'".format(args.mkString(" "))) - println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) - - pr.result match { - case cp: AggregateClassPath => - println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + val registry = new CloseableRegistry + try { + val pr = new PathResolver(settings, registry) + println("COMMAND: 'scala %s'".format(args.mkString(" "))) + println("RESIDUAL: 'scala %s'\n".format(rest.mkString(" "))) + + pr.result match { + case cp: AggregateClassPath => + println(s"ClassPath has ${cp.aggregates.size} entries and results in:\n${cp.asClassPathStrings}") + } + } finally { + registry.close() } } } -final class PathResolver(settings: Settings) { - private val classPathFactory = new ClassPathFactory(settings) +final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry) { + private val classPathFactory = new ClassPathFactory(settings, closeableRegistry) import PathResolver.{ AsLines, Defaults, ppcp } @@ -250,7 +256,7 @@ final class PathResolver(settings: Settings) { // Assemble the elements! def basis = List[Traversable[ClassPath]]( - JrtClassPath.apply(settings.releaseValue), // 0. The Java 9 classpath (backed by the jrt:/ virtual system, if available) + jrt, // 0. The Java 9+ classpath (backed by the ct.sym or jrt:/ virtual system, if available) classesInPath(javaBootClassPath), // 1. The Java bootstrap class path. contentsOfDirsInPath(javaExtDirs), // 2. The Java extension class path. classesInExpandedPath(javaUserClassPath), // 3. The Java application class path. @@ -261,6 +267,8 @@ final class PathResolver(settings: Settings) { sourcesInPath(sourcePath) // 7. The Scala source path. ) + private def jrt: Option[ClassPath] = JrtClassPath.apply(settings.releaseValue, closeableRegistry) + lazy val containers = basis.flatten.distinct override def toString = s""" diff --git a/src/partest-extras/scala/tools/partest/BytecodeTest.scala b/src/partest-extras/scala/tools/partest/BytecodeTest.scala index b016778bf428..309a6d49c482 100644 --- a/src/partest-extras/scala/tools/partest/BytecodeTest.scala +++ b/src/partest-extras/scala/tools/partest/BytecodeTest.scala @@ -18,6 +18,7 @@ import scala.tools.asm.tree._ import java.io.{InputStream, File => JFile} import AsmNode._ +import scala.tools.nsc.CloseableRegistry /** * Provides utilities for inspecting bytecode using ASM library. @@ -144,7 +145,7 @@ abstract class BytecodeTest { import scala.tools.nsc.Settings // logic inspired by scala.tools.util.PathResolver implementation // `Settings` is used to check YdisableFlatCpCaching in ZipArchiveFlatClassPath - val factory = new ClassPathFactory(new Settings()) + val factory = new ClassPathFactory(new Settings(), new CloseableRegistry) val containers = factory.classesInExpandedPath(sys.props("partest.output") + java.io.File.pathSeparator + Defaults.javaUserClassPath) new AggregateClassPath(containers) } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index aba70c2449f6..fe18347d15a7 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -14,6 +14,8 @@ package scala package reflect package internal +import java.net.URLClassLoader + import scala.annotation.elidable import scala.collection.mutable import util._ @@ -429,6 +431,22 @@ abstract class SymbolTable extends macros.Universe cache } + /** Closes the provided classloader at the conclusion of this Run */ + final def recordClassloader(loader: ClassLoader): ClassLoader = { + def attemptClose(loader: ClassLoader): Unit = { + loader match { + case u: URLClassLoader => debuglog("Closing classloader " + u); u.close() + case _ => + } + } + caches ::= new WeakReference((new Clearable { + def clear(): Unit = { + attemptClose(loader) + } + })) + loader + } + /** * Removes a cache from the per-run caches. This is useful for testing: it allows running the * compiler and then inspect the state of a cache. diff --git a/src/reflect/scala/reflect/io/ZipArchive.scala b/src/reflect/scala/reflect/io/ZipArchive.scala index ee109799f3d5..05c591b9d535 100644 --- a/src/reflect/scala/reflect/io/ZipArchive.scala +++ b/src/reflect/scala/reflect/io/ZipArchive.scala @@ -149,6 +149,7 @@ abstract class ZipArchive(override val file: JFile, release: Option[String]) ext if (entry.isDirectory) ensureDir(dirs, entry.getName, entry) else ensureDir(dirs, dirName(entry.getName), null) } + def close(): Unit } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArchive(file, release) { @@ -232,6 +233,7 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch } } finally { if (ZipArchive.closeZipFile) zipFile.close() + else closeables ::= zipFile } root } @@ -259,6 +261,10 @@ final class FileZipArchive(file: JFile, release: Option[String]) extends ZipArch case x: FileZipArchive => file.getAbsoluteFile == x.file.getAbsoluteFile case _ => false } + private[this] var closeables: List[java.io.Closeable] = Nil + override def close(): Unit = { + closeables.foreach(_.close) + } } /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ final class URLZipArchive(val url: URL) extends ZipArchive(null) { @@ -266,6 +272,7 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("" -> root) val in = new ZipInputStream(new ByteArrayInputStream(Streamable.bytes(input))) + closeables ::= in @tailrec def loop() { val zipEntry = in.getNextEntry() @@ -327,6 +334,10 @@ final class URLZipArchive(val url: URL) extends ZipArchive(null) { case x: URLZipArchive => url == x.url case _ => false } + private[this] var closeables: List[java.io.Closeable] = Nil + def close(): Unit = { + closeables.foreach(_.close()) + } } final class ManifestResources(val url: URL) extends ZipArchive(null) { @@ -334,6 +345,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { val root = new DirEntry("/") val dirs = mutable.HashMap[String, DirEntry]("" -> root) val manifest = new Manifest(input) + closeables ::= input + val iter = manifest.getEntries().keySet().iterator().asScala.filter(_.endsWith(".class")).map(new ZipEntry(_)) for (zipEntry <- iter) { @@ -385,4 +398,8 @@ final class ManifestResources(val url: URL) extends ZipArchive(null) { } } } + private[this] var closeables: List[java.io.Closeable] = Nil + override def close(): Unit = { + closeables.foreach(_.close()) + } } diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 764bb4d48543..73cbc828eea4 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -28,6 +28,7 @@ import scala.tools.nsc.util._ import ScalaClassLoader.URLClassLoader import scala.tools.nsc.util.Exceptional.unwrap import java.net.URL +import java.io.Closeable import scala.tools.util.PathResolver import scala.util.{Try => Trying} @@ -63,7 +64,7 @@ import scala.util.{Try => Trying} * @author Moez A. Abdel-Gawad * @author Lex Spoon */ -class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation { +class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends Imports with PresentationCompilation with Closeable { imain => def this(initialSettings: Settings) = this(initialSettings, IMain.defaultOut) @@ -100,7 +101,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def compilerClasspath: Seq[java.net.URL] = ( if (isInitializeComplete) global.classPath.asURLs - else new PathResolver(settings).resultAsURLs // the compiler's classpath + else new PathResolver(settings, global.closeableRegistry).resultAsURLs // the compiler's classpath ) def settings = initialSettings // Run the code body with the given boolean settings flipped to true. @@ -683,6 +684,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends */ def close() { reporter.flush() + if (isInitializeComplete) { + global.close() + } } /** Here is where we: diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 106e649ac69f..7a601ab65750 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -16,7 +16,7 @@ import scala.reflect.internal.util.RangePosition import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.{interactive, Settings} +import scala.tools.nsc.{interactive, CloseableRegistry, Settings} import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.classpath._ @@ -63,10 +63,6 @@ trait PresentationCompilation { * You may downcast the `reporter` to `StoreReporter` to access type errors. */ def newPresentationCompiler(): interactive.Global = { - def mergedFlatClasspath = { - val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings) - AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil) - } def copySettings: Settings = { val s = new Settings(_ => () /* ignores "bad option -nc" errors, etc */) s.processArguments(global.settings.recreateArgs, processAll = false) @@ -75,6 +71,11 @@ trait PresentationCompilation { } val storeReporter: StoreReporter = new StoreReporter val interactiveGlobal = new interactive.Global(copySettings, storeReporter) { self => + def mergedFlatClasspath = { + val replOutClasspath = ClassPathFactory.newClassPath(replOutput.dir, settings, closeableRegistry) + AggregateClassPath(replOutClasspath :: global.platform.classPath :: Nil) + } + override lazy val platform: ThisPlatform = { new JavaPlatform { lazy val global: self.type = self diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 1273d6ac92fb..72b5a7424ceb 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -30,16 +30,11 @@ trait ReplGlobal extends Global { super.abort(msg) } - override lazy val analyzer = new { - val global: ReplGlobal.this.type = ReplGlobal.this - } with Analyzer { - - override protected def findMacroClassLoader(): ClassLoader = { - val loader = super.findMacroClassLoader - macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs)) - val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get - new util.AbstractFileClassLoader(virtualDirectory, loader) {} - } + override protected[scala] def findMacroClassLoader(): ClassLoader = { + val loader = super.findMacroClassLoader + analyzer.macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(classPath.asURLs)) + val virtualDirectory = analyzer.globalSettings.outputDirs.getSingleOutput.get + new util.AbstractFileClassLoader(virtualDirectory, loader) {} } override def optimizerClassPath(base: ClassPath): ClassPath = { @@ -47,7 +42,7 @@ trait ReplGlobal extends Global { case None => base case Some(out) => // Make bytecode of previous lines available to the inliner - val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings) + val replOutClasspath = ClassPathFactory.newClassPath(settings.outputDirs.getSingleOutput.get, settings, closeableRegistry) AggregateClassPath.createAggregate(platform.classPath, replOutClasspath) } } diff --git a/src/scalap/scala/tools/scalap/Main.scala b/src/scalap/scala/tools/scalap/Main.scala index 42b0fdfb236a..5e3d633d429f 100644 --- a/src/scalap/scala/tools/scalap/Main.scala +++ b/src/scalap/scala/tools/scalap/Main.scala @@ -14,8 +14,9 @@ package scala package tools.scalap import java.io.{ByteArrayOutputStream, OutputStreamWriter, PrintStream} + import scala.reflect.NameTransformer -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory} import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver @@ -185,14 +186,18 @@ object Main extends Main { settings.YdisableFlatCpCaching.value = arguments contains opts.disableFlatClassPathCaching settings.Ylogcp.value = arguments contains opts.logClassPath - val path = createClassPath(cpArg, settings) - - // print the classpath if output is verbose - if (verbose) - Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) - - // process all given classes - arguments.getOthers foreach process(arguments, path) + val registry = new CloseableRegistry + try { + val path = createClassPath(cpArg, settings, registry) + // print the classpath if output is verbose + if (verbose) + Console.println(Console.BOLD + "CLASSPATH" + Console.RESET + " = " + path.asClassPathString) + + // process all given classes + arguments.getOthers foreach process(arguments, path) + } finally { + registry.close() + } } private def parseArguments(args: Array[String]) = @@ -208,11 +213,11 @@ object Main extends Main { .withOption(opts.logClassPath) .parse(args) - private def createClassPath(cpArg: Option[String], settings: Settings) = cpArg match { + private def createClassPath(cpArg: Option[String], settings: Settings, closeableRegistry: CloseableRegistry) = cpArg match { case Some(cp) => - AggregateClassPath(new ClassPathFactory(settings).classesInExpandedPath(cp)) + AggregateClassPath(new ClassPathFactory(settings, closeableRegistry).classesInExpandedPath(cp)) case _ => settings.classpath.value = "." // include '.' in the default classpath scala/bug#6669 - new PathResolver(settings).result + new PathResolver(settings, closeableRegistry).result } } diff --git a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala new file mode 100644 index 000000000000..500379706090 --- /dev/null +++ b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala @@ -0,0 +1,72 @@ +package scala.tools.nsc + +import org.junit.{Assert, Test} +import org.junit.runner.RunWith +import org.junit.runners.JUnit4 + +import scala.reflect.internal.util.{AbstractFileClassLoader, NoSourceFile} +import scala.reflect.io.{Path, VirtualDirectory} +import scala.tools.nsc.plugins.{Plugin, PluginComponent} + +@RunWith(classOf[JUnit4]) +class GlobalCustomizeClassloaderTest { + // Demonstrate extension points to customise creation of the classloaders used to load compiler + // plugins and macro implementations. + // + // A use case could be for a build tool to take control of caching of these classloaders in a way + // that properly closes them before one of the elements needs to be overwritten. + @Test def test(): Unit = { + val g = new Global(new Settings) { + override protected[scala] def findMacroClassLoader(): ClassLoader = getClass.getClassLoader + override protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { + val d = new VirtualDirectory("", None) + val xml = d.fileNamed("scalac-plugin.xml") + val out = xml.bufferedOutput + out.write( + s""" + |sample-plugin + |${classOf[SamplePlugin].getName} + | + |""".stripMargin.getBytes()) + out.close() + new AbstractFileClassLoader(d, getClass.getClassLoader) + } + } + g.settings.usejavacp.value = true + g.settings.plugin.value = List("sample") + new g.Run + assert(g.settings.log.value == List("typer")) + + val unit = new g.CompilationUnit(NoSourceFile) + val context = g.analyzer.rootContext(unit) + val typer = g.analyzer.newTyper(context) + import g._ + SampleMacro.data = "in this classloader" + val typed = typer.typed(q"scala.tools.nsc.SampleMacro.m") + assert(!reporter.hasErrors) + typed match { + case Typed(Literal(Constant(s: String)), _) => Assert.assertEquals(SampleMacro.data, s) + case _ => Assert.fail() + } + g.close() + } +} + +object SampleMacro { + var data: String = _ + import language.experimental.macros + import scala.reflect.macros.blackbox.Context + def m: String = macro impl + def impl(c: Context): c.Tree = c.universe.Literal(c.universe.Constant(data)) +} + +class SamplePlugin(val global: Global) extends Plugin { + override val name: String = "sample" + override val description: String = "sample" + override val components: List[PluginComponent] = Nil + override def init(options: List[String], error: String => Unit): Boolean = { + val result = super.init(options, error) + global.settings.log.value = List("typer") + result + } +} diff --git a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala index b46677d6d477..fdc2b9caae69 100644 --- a/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala +++ b/test/junit/scala/tools/nsc/classpath/JrtClassPathTest.scala @@ -8,7 +8,7 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.nsc.backend.jvm.AsmUtils import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver @@ -19,14 +19,15 @@ class JrtClassPathTest { @Test def lookupJavaClasses(): Unit = { val specVersion = scala.util.Properties.javaSpecVersion // Run the test using the JDK8 or 9 provider for rt.jar depending on the platform the test is running on. + val closeableRegistry = new CloseableRegistry val cp: ClassPath = if (specVersion == "" || specVersion == "1.8") { val settings = new Settings() - val resolver = new PathResolver(settings) - val elements = new ClassPathFactory(settings).classesInPath(resolver.Calculated.javaBootClassPath) + val resolver = new PathResolver(settings, closeableRegistry) + val elements = new ClassPathFactory(settings, closeableRegistry).classesInPath(resolver.Calculated.javaBootClassPath) AggregateClassPath(elements) } - else JrtClassPath(None).get + else JrtClassPath(None, closeableRegistry).get assertEquals(Nil, cp.classes("")) assertTrue(cp.packages("java").toString, cp.packages("java").exists(_.name == "java.lang")) @@ -37,5 +38,7 @@ class JrtClassPathTest { assertTrue(cp.list("java.lang").classesAndSources.exists(_.name == "Object")) assertTrue(cp.findClass("java.lang.Object").isDefined) assertTrue(cp.findClassFile("java.lang.Object").isDefined) + + closeableRegistry.close() } } diff --git a/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala index d3d4289d8b94..e8025ec69ec5 100644 --- a/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala +++ b/test/junit/scala/tools/nsc/classpath/PathResolverBaseTest.scala @@ -4,13 +4,15 @@ package scala.tools.nsc.classpath import java.io.File + import org.junit.Assert._ import org.junit._ import org.junit.rules.TemporaryFolder import org.junit.runner.RunWith import org.junit.runners.JUnit4 + import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.Settings +import scala.tools.nsc.{CloseableRegistry, Settings} import scala.tools.util.PathResolver @RunWith(classOf[JUnit4]) @@ -57,7 +59,7 @@ class PathResolverBaseTest { def deleteTempDir: Unit = tempDir.delete() private def createFlatClassPath(settings: Settings) = - new PathResolver(settings).result + new PathResolver(settings, new CloseableRegistry).result @Test def testEntriesFromListOperationAgainstSeparateMethods: Unit = { diff --git a/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala b/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala index f49f04d2c56d..b58effbcfa36 100644 --- a/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala +++ b/test/junit/scala/tools/nsc/classpath/ZipAndJarFileLookupFactoryTest.scala @@ -14,7 +14,8 @@ class ZipAndJarFileLookupFactoryTest { Files.delete(f) val g = new scala.tools.nsc.Global(new scala.tools.nsc.Settings()) assert(!g.settings.YdisableFlatCpCaching.value) // we're testing with our JAR metadata caching enabled. - def createCp = ZipAndJarClassPathFactory.create(AbstractFile.getFile(f.toFile), g.settings) + val closeableRegistry = new CloseableRegistry + def createCp = ZipAndJarClassPathFactory.create(AbstractFile.getFile(f.toFile), g.settings, closeableRegistry) try { createZip(f, Array(), "p1/C.class") createZip(f, Array(), "p2/X.class") @@ -41,7 +42,10 @@ class ZipAndJarFileLookupFactoryTest { // And that instance should see D, not C, in package p1. assert(cp3.findClass("p1.C").isEmpty) assert(cp3.findClass("p1.D").isDefined) - } finally Files.delete(f) + } finally { + Files.delete(f) + closeableRegistry.close() + } } def createZip(zipLocation: Path, content: Array[Byte], internalPath: String): Unit = { diff --git a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala index cbd5634f292f..e2b11cfecd29 100644 --- a/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala +++ b/test/junit/scala/tools/nsc/symtab/SymbolTableForUnitTesting.scala @@ -36,7 +36,7 @@ class SymbolTableForUnitTesting extends SymbolTable { def platformPhases: List[SubComponent] = Nil - private[nsc] lazy val classPath: ClassPath = new PathResolver(settings).result + private[nsc] lazy val classPath: ClassPath = new PathResolver(settings, new CloseableRegistry).result def isMaybeBoxed(sym: Symbol): Boolean = ??? def needCompile(bin: AbstractFile, src: AbstractFile): Boolean = ??? From 8efcb4236b5cd7ddfce9b92e21c9a5e5a84ca1fb Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:54:54 +1000 Subject: [PATCH 1610/2793] unpickler cycle avoidance --- src/reflect/scala/reflect/internal/pickling/UnPickler.scala | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala index 7b82aa3e9f24..c1fc858cef11 100644 --- a/src/reflect/scala/reflect/internal/pickling/UnPickler.scala +++ b/src/reflect/scala/reflect/internal/pickling/UnPickler.scala @@ -249,6 +249,9 @@ abstract class UnPickler { else NoSymbol } + if (owner == definitions.ScalaPackageClass && name == tpnme.AnyRef) + return definitions.AnyRefClass + // (1) Try name. localDummy orElse fromName(name) orElse { // (2) Try with expanded name. Can happen if references to private From a4d9599f1df7ac48ba2aad5e942a71b197f45de8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:42:05 +1000 Subject: [PATCH 1611/2793] Utility to abstract over JAR and directory I/O --- src/reflect/scala/reflect/io/RootPath.scala | 39 +++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 src/reflect/scala/reflect/io/RootPath.scala diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala new file mode 100644 index 000000000000..51273a9c3f37 --- /dev/null +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -0,0 +1,39 @@ +package scala.reflect.io + +import java.io.Closeable +import java.nio +import java.nio.file.{FileSystems, Files} + + +abstract class RootPath extends Closeable { + def root: nio.file.Path +} + +object RootPath { + def apply(path: nio.file.Path, writable: Boolean): RootPath = { + if (path.getFileName.toString.endsWith(".jar")) { + import java.net.URI + val zipFile = URI.create("jar:file:" + path.toUri.getPath) + val env = new java.util.HashMap[String, String]() + if (!Files.exists(path.getParent)) + Files.createDirectories(path.getParent) + if (writable) { + env.put("create", "true") + if (Files.exists(path)) + Files.delete(path) + } + val zipfs = FileSystems.newFileSystem(zipFile, env) + new RootPath { + def root = zipfs.getRootDirectories.iterator().next() + def close(): Unit = { + zipfs.close() + } + } + } else { + new RootPath { + override def root: nio.file.Path = path + override def close(): Unit = () + } + } + } +} \ No newline at end of file From 69d3c000272ff2f238e4efdffb02be697cb1caac Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:42:18 +1000 Subject: [PATCH 1612/2793] Utility to extract Pickles from Scala compiled classes .sig files, containing the pickle bytes, are output in place of .class files emitted by Scalac. Java defined .class files are emitted after stripping them of code. --- .../scala/tools/nsc/PickleExtractor.scala | 121 ++++++++++++++++++ .../tools/nsc/symtab/classfile/Pickler.scala | 4 +- .../scala/reflect/internal/Definitions.scala | 2 +- 3 files changed, 125 insertions(+), 2 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/PickleExtractor.scala diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala new file mode 100644 index 000000000000..53a54b12e12a --- /dev/null +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -0,0 +1,121 @@ +package scala.tools.nsc + +import java.io.Closeable +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} + +import scala.collection.JavaConverters.{asScalaBufferConverter, bufferAsJavaListConverter, collectionAsScalaIterableConverter} +import scala.reflect.internal.pickling.ByteCodecs +import scala.reflect.io.RootPath +import scala.tools.asm.tree.ClassNode +import scala.tools.asm.{ClassReader, ClassWriter, Opcodes} + +object PickleExtractor { + + def main(args: Array[String]): Unit = { + args.toList match { + case input :: output :: Nil => + process(Paths.get(input), Paths.get(output)) + case _ => + } + } + def process(input: Path, output: Path): Unit = { + val inputPath = RootPath(input, writable = false) + val outputPath = RootPath(output, writable = true) + try { + val root = inputPath.root + Files.createDirectories(outputPath.root) + val visitor = new SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (dir != root) { + val outputDir = outputPath.root.resolve(root.relativize(dir).toString) + Files.createDirectories(outputDir) + } + FileVisitResult.CONTINUE + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (file.getFileName.toString.endsWith(".class")) { + stripClassFile(Files.readAllBytes(file)) match { + case Class(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString), out) + case Pickle(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString.replaceAll(".class$", ".sig")), out) + case Skip => + } + } + FileVisitResult.CONTINUE + } + } + Files.walkFileTree(root, visitor) + } finally { + inputPath.close() + outputPath.close() + } + } + + def stripClassFile(classfile: Array[Byte]): OutputFile = { + val input = new ClassNode() + new ClassReader(classfile).accept(input, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES | ClassReader.SKIP_CODE) + var output = new ClassNode() + output.name = input.name + output.access = input.access + output.version = input.version + + var foundScalaSig = false + + def isScalaAnnotation(desc: String) = (desc == "Lscala/reflect/ScalaSignature;" || desc == "Lscala/reflect/ScalaLongSignature;") && { + foundScalaSig = true + + true + } + + var pickleData: Array[Byte] = null + if (input.visibleAnnotations != null) { + input.visibleAnnotations.asScala.foreach { node => + if (node.desc == "Lscala/reflect/ScalaSignature;") { + val Array("bytes", data: String) = node.values.toArray() + val bytes = data.getBytes(java.nio.charset.StandardCharsets.UTF_8) + val len = ByteCodecs.decode(bytes) + pickleData = bytes.take(len) + } else if (node.desc == "Lscala/reflect/ScalaLongSignature;") { + val Array("bytes", data: java.util.Collection[String @unchecked]) = node.values.toArray() + val encoded = data.asScala.toArray flatMap (_.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + val len = ByteCodecs.decode(encoded) + pickleData = encoded.take(len) + } + } + output.visibleAnnotations = input.visibleAnnotations.asScala.filter(node => isScalaAnnotation(node.desc) && { + true + }).asJava + } + var foundScalaAttr = false + if (input.attrs != null) { + output.attrs = input.attrs.asScala.filter(attr => (attr.`type` == "Scala" || attr.`type` == "ScalaSig") && { + foundScalaAttr = true; + true + }).asJava + } + val writer = new ClassWriter(Opcodes.ASM7_EXPERIMENTAL) + val isScalaRaw = foundScalaAttr && !foundScalaSig + if (isScalaRaw) Skip + else { + if (pickleData == null) { + output = input + output.accept(writer) + Class(writer.toByteArray) + } else { + output.accept(writer) + Pickle(pickleData) + } + } + } + + sealed abstract class OutputFile + + case object Skip extends OutputFile + + case class Class(content: Array[Byte]) extends OutputFile + + case class Pickle(content: Array[Byte]) extends OutputFile + +} diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 947b95f57baa..fa0bb189892a 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -90,6 +90,8 @@ abstract class Pickler extends SubComponent { throw e } } + + override protected def shouldSkipThisPhaseForJava: Boolean = false //from some -Y ?? } private class Pickle(root: Symbol) extends PickleBuffer(new Array[Byte](4096), -1, 0) { @@ -213,7 +215,7 @@ abstract class Pickler extends SubComponent { // initially, but seems not to work, as the bug shows). // Adding the LOCAL_CHILD is necessary to retain exhaustivity warnings under separate // compilation. See test neg/aladdin1055. - val parents = (if (sym.isTrait) List(definitions.ObjectTpe) else Nil) ::: List(sym.tpe) + val parents = if (sym.isTrait) List(definitions.ObjectTpe, sym.tpe) else List(sym.tpe) globals + sym.newClassWithInfo(tpnme.LOCAL_CHILD, parents, EmptyScope, pos = sym.pos) } diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 31a54e35f4d1..b27bca907b08 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -1236,7 +1236,7 @@ trait Definitions extends api.StandardDefinitions { lazy val AnnotationDefaultAttr: ClassSymbol = { val sym = RuntimePackageClass.newClassSymbol(tpnme.AnnotationDefaultATTR, NoPosition, 0L) - sym setInfo ClassInfoType(List(AnnotationClass.tpe), newScope, sym) + sym setInfo ClassInfoType(List(StaticAnnotationClass.tpe), newScope, sym) markAllCompleted(sym) RuntimePackageClass.info.decls.toList.filter(_.name == sym.name) match { case existing :: _ => From b066d7e6402820879a970d6a88635018b8512dfe Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:50:53 +1000 Subject: [PATCH 1613/2793] Compute pickles for .java sources If a new hidden setting is enabled. Build tools that want to implement build pipelining can use this to feed to downstream classpaths. --- src/compiler/scala/tools/nsc/CompilationUnits.scala | 4 ++-- src/compiler/scala/tools/nsc/Global.scala | 10 +++++++--- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- 4 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 159021bdacaf..46386beb58e7 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -21,7 +21,7 @@ trait CompilationUnits { global: Global => /** An object representing a missing compilation unit. */ object NoCompilationUnit extends CompilationUnit(NoSourceFile) { - override lazy val isJava = false + override val isJava = false override def exists = false override def toString() = "NoCompilationUnit" } @@ -153,7 +153,7 @@ trait CompilationUnits { global: Global => final def comment(pos: Position, msg: String): Unit = {} /** Is this about a .java source file? */ - lazy val isJava = source.file.name.endsWith(".java") + val isJava = source.file.name.endsWith(".java") override def toString() = source.toString() } diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 8b578c8ba9ea..47bd41e37b09 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -402,12 +402,16 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def apply(unit: CompilationUnit): Unit + // run only the phases needed + protected def shouldSkipThisPhaseForJava: Boolean = { + this.id > (if (createJavadoc) currentRun.typerPhase.id + else currentRun.namerPhase.id) + } + /** Is current phase cancelled on this unit? */ def cancelled(unit: CompilationUnit) = { - // run the typer only if in `createJavadoc` mode - val maxJavaPhase = if (createJavadoc) currentRun.typerPhase.id else currentRun.namerPhase.id if (Thread.interrupted()) reporter.cancelled = true - reporter.cancelled || unit.isJava && this.id > maxJavaPhase + reporter.cancelled || unit.isJava && shouldSkipThisPhaseForJava } private def beforeUnit(unit: CompilationUnit): Unit = { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index f14a5cd68d8e..3428582806fb 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -252,6 +252,7 @@ trait ScalaSettings extends AbsScalaSettings val YmaxQueue = IntSetting ("-Ybackend-worker-queue", "backend threads worker queue size", 0, Some((0,1000)), (x: String) => None ) val YjarCompressionLevel = IntSetting("-Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (x: String) => None) + val YpickleJava = BooleanSetting("-Ypickle-java", "Pickler phase should compute pickles for .java defined symbols for use by build tools").internalOnly() sealed abstract class CachePolicy(val name: String, val help: String) object CachePolicy { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index fa0bb189892a..1fd7690763e5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -91,7 +91,7 @@ abstract class Pickler extends SubComponent { } } - override protected def shouldSkipThisPhaseForJava: Boolean = false //from some -Y ?? + override protected def shouldSkipThisPhaseForJava: Boolean = !settings.YpickleJava.value } private class Pickle(root: Symbol) extends PickleBuffer(new Array[Byte](4096), -1, 0) { From 22eae50c8fef20d8c3e387729e2ee08794be0a26 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:58:31 +1000 Subject: [PATCH 1614/2793] .sig file support --- .../scala/tools/nsc/classpath/FileUtils.scala | 2 +- .../nsc/symtab/classfile/AbstractFileReader.scala | 7 ++----- .../nsc/symtab/classfile/ClassfileParser.scala | 15 +++++++++++---- 3 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala index 059a83da796c..aa4d81736195 100644 --- a/src/compiler/scala/tools/nsc/classpath/FileUtils.scala +++ b/src/compiler/scala/tools/nsc/classpath/FileUtils.scala @@ -24,7 +24,7 @@ object FileUtils { implicit class AbstractFileOps(val file: AbstractFile) extends AnyVal { def isPackage: Boolean = file.isDirectory && mayBeValidPackage(file.name) - def isClass: Boolean = !file.isDirectory && file.hasExtension("class") + def isClass: Boolean = !file.isDirectory && (file.hasExtension("class") || file.hasExtension("sig")) def isScalaOrJavaSource: Boolean = !file.isDirectory && (file.hasExtension("scala") || file.hasExtension("java")) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index a8d673663e8d..19be00dd686a 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -25,11 +25,8 @@ import scala.tools.nsc.io.AbstractFile * @author Philippe Altherr * @version 1.0, 23/03/2004 */ -class AbstractFileReader(val file: AbstractFile) { - - /** the buffer containing the file - */ - val buf: Array[Byte] = file.toByteArray +class AbstractFileReader(val file: AbstractFile, val buf: Array[Byte]) { + def this(file: AbstractFile) = this(file, file.toByteArray) /** the current input pointer */ diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 935a100effe8..c855f1c11bb6 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -152,14 +152,21 @@ abstract class ClassfileParser { def parse(file: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol): Unit = { this.file = file pushBusy(clazz) { - this.in = new AbstractFileReader(file) this.clazz = clazz this.staticModule = module this.isScala = false - parseHeader() - this.pool = newConstantPool - parseClass() + this.in = new AbstractFileReader(file) + val magic = in.getInt(in.bp) + if (magic != JAVA_MAGIC && file.name.endsWith(".sig")) { + currentClass = TermName(clazz.javaClassName) + isScala = true + unpickler.unpickle(in.buf, 0, clazz, staticModule, file.name) + } else { + parseHeader() + this.pool = newConstantPool + parseClass() + } } } From 27a976c08468ccafcc90bc5c7f266e41677df87b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 10:55:35 +1000 Subject: [PATCH 1615/2793] Experimental support for outline typing Under this mode, the RHS of defs and vals are only typechecked if the result type of the definition is inferred and the definition's signature is forced. The movitivation is to create a fast path to extract the pickles for the API for use on the classpath of downstream compiles, which could include parallel compilation of chunks of the the current set of source files. --- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + .../scala/tools/nsc/typechecker/Analyzer.scala | 12 +++++++----- .../scala/tools/nsc/typechecker/Typers.scala | 4 ++-- 3 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 3428582806fb..804481ef709a 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -243,6 +243,7 @@ trait ScalaSettings extends AbsScalaSettings val YcacheMacroClassLoader = CachePolicy.setting("macro", "macros") val YpartialUnification = BooleanSetting ("-Ypartial-unification", "Enable partial unification in type constructor inference") val Yvirtpatmat = BooleanSetting ("-Yvirtpatmat", "Enable pattern matcher virtualization") + val Youtline = BooleanSetting ("-Youtline", "Don't compile method bodies. Use together with `-Ystop-afer:pickler to generate the pickled signatures for all source files.").internalOnly() val exposeEmptyPackage = BooleanSetting ("-Yexpose-empty-package", "Internal only: expose the empty package.").internalOnly() val Ydelambdafy = ChoiceSetting ("-Ydelambdafy", "strategy", "Strategy used for translating lambdas into JVM code.", List("inline", "method"), "method") diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b068e43d1ad4..bc5ffd0ccd7c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -112,11 +112,13 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) - warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) - new checkUnused(typer).apply(unit) + if (!settings.Youtline.value) { + for (workItem <- unit.toCheck) workItem() + if (settings.warnUnusedImport) + warnUnusedImports(unit) + if (settings.warnUnused.isSetByUser) + new checkUnused(typer).apply(unit) + } } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a4a0a0c8b8bf..7f32eda84cd0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2075,7 +2075,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } // use typedValDef instead. this version is called after creating a new context for the ValDef - private def typedValDefImpl(vdef: ValDef) = { + private def typedValDefImpl(vdef: ValDef): ValDef = { val sym = vdef.symbol.initialize val typedMods = if (nme.isLocalName(sym.name) && sym.isPrivateThis && !vdef.mods.isPrivateLocal) { // scala/bug#10009 This tree has been given a field symbol by `enterGetterSetter`, patch up the @@ -5946,7 +5946,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 - case _ => typed(tree, mode, pt) + case _ => if (settings.Youtline.value) EmptyTree else typed(tree, mode, pt) } } final def lookupTransformed(tree: Tree): Option[Tree] = From c6e290ad4ac9dd98f07ffb9c02adff6ce32aaa94 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 11:01:06 +1000 Subject: [PATCH 1616/2793] Driver to build a set of compile @argsfile-s in pipeline. --- .../scala/tools/nsc/PipelineMain.scala | 686 ++++++++++++++++++ 1 file changed, 686 insertions(+) create mode 100644 src/compiler/scala/tools/nsc/PipelineMain.scala diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala new file mode 100644 index 000000000000..b4d7943166f5 --- /dev/null +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -0,0 +1,686 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2019 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc + +import java.io.File +import java.lang.Thread.UncaughtExceptionHandler +import java.nio.file.attribute.FileTime +import java.nio.file.{Files, Path, Paths} +import java.time.Instant +import java.util.Collections +import java.util.concurrent.atomic.AtomicInteger + +import javax.tools.ToolProvider + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.collection.{immutable, mutable, parallel} +import scala.concurrent._ +import scala.concurrent.duration.Duration +import scala.reflect.internal.pickling.PickleBuffer +import scala.reflect.internal.util.FakePos +import scala.reflect.io.RootPath +import scala.tools.nsc.io.AbstractFile +import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} +import scala.tools.nsc.util.ClassPath +import scala.util.{Failure, Success, Try} + +class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { + private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") + private val pickleCache: Path = { + if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") + else { + Paths.get(pickleCacheConfigured) + } + } + private def cachePath(file: Path): Path = { + val newExtension = if (useJars) ".jar" else "" + changeExtension(pickleCache.resolve("./" + file).normalize(), newExtension) + } + + private val strippedAndExportedClassPath = mutable.HashMap[Path, Path]() + + /** Forward errors to the (current) reporter. */ + protected def scalacError(msg: String): Unit = { + reporter.error(FakePos("scalac"), msg + "\n scalac -help gives more information") + } + + private var reporter: Reporter = _ + + private object handler extends UncaughtExceptionHandler { + override def uncaughtException(t: Thread, e: Throwable): Unit = { + e.printStackTrace() + System.exit(-1) + } + } + + implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) + val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) + def changeExtension(p: Path, newExtension: String): Path = { + val fileName = p.getFileName.toString + val changedFileName = fileName.lastIndexOf('.') match { + case -1 => fileName + newExtension + case n => fileName.substring(0, n) + newExtension + } + p.getParent.resolve(changedFileName) + } + + def registerPickleClassPath[G <: Global](output: Path, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { + val jarPath = cachePath(output) + val root = RootPath(jarPath, writable = true) + Files.createDirectories(root.root) + + val dirs = mutable.Map[G#Symbol, Path]() + def packageDir(packSymbol: G#Symbol): Path = { + if (packSymbol.isEmptyPackageClass) root.root + else if (dirs.contains(packSymbol)) dirs(packSymbol) + else if (packSymbol.owner.isRoot) { + val subDir = root.root.resolve(packSymbol.encodedName) + Files.createDirectories(subDir) + dirs.put(packSymbol, subDir) + subDir + } else { + val base = packageDir(packSymbol.owner) + val subDir = base.resolve(packSymbol.encodedName) + Files.createDirectories(subDir) + dirs.put(packSymbol, subDir) + subDir + } + } + val written = new java.util.IdentityHashMap[AnyRef, Unit]() + try { + for ((symbol, pickle) <- data) { + if (!written.containsKey(pickle)) { + val base = packageDir(symbol.owner) + val primary = base.resolve(symbol.encodedName + ".sig") + Files.write(primary, pickle.bytes) + written.put(pickle, ()) + } + } + } finally { + root.close() + } + Files.setLastModifiedTime(jarPath, FileTime.from(Instant.now())) + strippedAndExportedClassPath.put(output.toRealPath().normalize(), jarPath) + } + + + def writeDotFile(dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { + val builder = new java.lang.StringBuilder() + builder.append("digraph projects {\n") + for ((p, deps) <- dependsOn) { + //builder.append(" node \"[]").append(p.label).append("\";\n") + for (dep <- deps) { + builder.append(" \"").append(p.label).append("\" -> \"").append(dep.t.label).append("\" [") + if (dep.isMacro) builder.append("label=M") + else if (dep.isPlugin) builder.append("label=P") + builder.append("];\n") + } + } + builder.append("}\n") + val path = Paths.get("projects.dot") + Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) + println("Wrote project dependency graph to: " + path.toAbsolutePath) + } + + private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) + + def process(): Boolean = { + println(s"parallelism = $parallelism, strategy = $strategy") + + reporter = new ConsoleReporter(new Settings(scalacError)) + + def commandFor(argFileArg: Path): Task = { + val ss = new Settings(scalacError) + val command = new CompilerCommand(("@" + argFileArg) :: Nil, ss) + Task(argFileArg, command, command.files) + } + + val projects: List[Task] = argFiles.toList.map(commandFor) + val numProjects = projects.size + val produces = mutable.LinkedHashMap[Path, Task]() + for (p <- projects) { + produces(p.outputDir) = p + } + val dependsOn = mutable.LinkedHashMap[Task, List[Dependency]]() + for (p <- projects) { + val macroDeps = p.macroClassPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).map(t => Dependency(t, isMacro = true, isPlugin = false)) + val pluginDeps = p.pluginClassPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).map(t => Dependency(t, isMacro = false, isPlugin = true)) + val classPathDeps = p.classPath.flatMap(p => produces.get(p)).toList.filterNot(_ == p).filterNot(p => macroDeps.exists(_.t == p)).map(t => Dependency(t, isMacro = false, isPlugin = false)) + dependsOn(p) = classPathDeps ++ macroDeps ++ pluginDeps + } + val dependedOn: Set[Task] = dependsOn.valuesIterator.flatten.map(_.t).toSet + val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p) && Files.exists(p)).toSet + + if (strategy != Traditional) { + val exportTimer = new Timer + exportTimer.start() + for (entry <- externalClassPath) { + val extracted = cachePath(entry) + val sourceTimeStamp = Files.getLastModifiedTime(entry) + if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { + // println(s"Skipped export of pickles from $entry to $extracted (up to date)") + } else { + PickleExtractor.process(entry, extracted) + Files.setLastModifiedTime(extracted, sourceTimeStamp) + println(s"Exported pickles from $entry to $extracted") + Files.setLastModifiedTime(extracted, sourceTimeStamp) + } + strippedAndExportedClassPath(entry) = extracted + } + exportTimer.stop() + println(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") + } + + writeDotFile(dependsOn) + + val timer = new Timer + timer.start() + + def awaitAll(fs: Seq[Future[_]]): Future[_] = { + val done = Promise[Any]() + val allFutures = projects.flatMap(_.futures) + val count = allFutures.size + val counter = new AtomicInteger(count) + val handler = (a: Try[_]) => a match { + case f @ Failure(_) => + done.complete(f) + case Success(_) => + val remaining = counter.decrementAndGet() + if (remaining == 0) done.success(()) + } + + allFutures.foreach(_.onComplete(handler)) + done.future + } + + def awaitDone(): Unit = { + val allFutures: immutable.Seq[Future[_]] = projects.flatMap(_.futures) + val numAllFutures = allFutures.size + val awaitAllFutures: Future[_] = awaitAll(allFutures) + val numTasks = awaitAllFutures + var lastNumCompleted = allFutures.count(_.isCompleted) + while (true) try { + Await.result(awaitAllFutures, Duration(60, "s")) + timer.stop() + val numCompleted = allFutures.count(_.isCompleted) + println(s"PROGRESS: $numCompleted / $numAllFutures") + return + } catch { + case _: TimeoutException => + val numCompleted = allFutures.count(_.isCompleted) + if (numCompleted == lastNumCompleted) { + println(s"STALLED: $numCompleted / $numAllFutures") + println("Outline/Scala/Javac") + projects.map { + p => + def toX(b: Future[_]): String = b.value match { case None => "-"; case Some(Success(_)) => "x"; case Some(Failure(_)) => "!" } + val s = List(p.outlineDoneFuture, p.groupsDoneFuture, p.javaDoneFuture).map(toX).mkString(" ") + println(s + " " + p.label) + } + } else { + println(s"PROGRESS: $numCompleted / $numAllFutures") + } + } + } + strategy match { + case OutlineTypePipeline => + projects.foreach { p => + val isLeaf = !dependedOn.contains(p) + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) + val f = if (isLeaf) { + for { + _ <- depsReady + _ <- { + p.outlineDone.complete(Success(())) + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } + } else { + for { + _ <- depsReady + _ <- { + p.outlineCompile() + p.outlineDone.future + } + _ <- { + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } + } + f.onComplete { _ => p.compiler.close() } + } + + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case Pipeline => + projects.foreach { p => + val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) + val f = for { + _ <- depsReady + _ <- { + val isLeaf = !dependedOn.contains(p) + if (isLeaf) { + p.outlineDone.complete(Success(())) + p.fullCompile() + } else + p.fullCompileExportPickles() + // Start javac after scalac has completely finished + Future.sequence(p.groups.map(_.done.future)) + } + } yield { + p.javaCompile() + } + f.onComplete { _ => p.compiler.close() } + } + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + println(f" Wall Clock: ${timer.durationMs}%.0f ms") + case Traditional => + projects.foreach { p => + val f1 = Future.sequence(dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) + val f2 = f1.flatMap { _ => + p.outlineDone.complete(Success(())) + p.fullCompile() + Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) + } + f2.onComplete { _ => p.compiler.close() } + } + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + if (parallelism == 1) { + val maxFullCriticalPath: Double = projects.map(_.fullCriticalPathMs).max + println(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else { + println(f"Wall Clock: ${timer.durationMs}%.0f ms") + } + } + + writeChromeTrace(projects) + deleteTempPickleCache() + true + } + + private def deleteTempPickleCache(): Unit = { + if (pickleCacheConfigured == null) { + AbstractFile.getDirectory(pickleCache.toFile).delete() + } + } + + private def writeChromeTrace(projects: List[Task]) = { + val trace = new java.lang.StringBuilder() + trace.append("""{"traceEvents": [""") + val sb = new mutable.StringBuilder(trace) + + def durationEvent(name: String, cat: String, t: Timer): String = { + s"""{"name": "$name", "cat": "$cat", "ph": "X", "ts": ${(t.startMicros).toLong}, "dur": ${(t.durationMicros).toLong}, "pid": 0, "tid": ${t.thread.getId}}""" + } + + def projectEvents(p: Task): List[String] = { + val events = List.newBuilder[String] + if (p.outlineTimer.durationMicros > 0d) { + val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" + events += durationEvent(p.label, desc, p.outlineTimer) + events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) + } + for ((g, ix) <- p.groups.zipWithIndex) { + if (g.timer.durationMicros > 0d) + events += durationEvent(p.label, "compile-" + ix, g.timer) + } + if (p.javaTimer.durationMicros > 0d) { + val desc = "javac" + events += durationEvent(p.label, desc, p.javaTimer) + } + events.result() + } + + projects.iterator.flatMap(projectEvents).addString(sb, ",\n") + trace.append("]}") + val traceFile = Paths.get(s"build-${label}.trace") + Files.write(traceFile, trace.toString.getBytes()) + println("Chrome trace written to " + traceFile.toAbsolutePath) + } + + case class Group(files: List[String]) { + val timer = new Timer + val done = Promise[Unit]() + } + + private case class Task(argsFile: Path, command: CompilerCommand, files: List[String]) { + val label = argsFile.toString.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") + override def toString: String = argsFile.toString + def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() + private def expand(s: command.settings.PathSetting): List[Path] = { + ClassPath.expandPath(s.value, expandStar = true).map(s => Paths.get(s).toAbsolutePath.normalize()) + } + lazy val classPath: Seq[Path] = expand(command.settings.classpath) + lazy val macroClassPath: Seq[Path] = expand(command.settings.YmacroClasspath) + lazy val macroClassPathSet: Set[Path] = macroClassPath.toSet + lazy val pluginClassPath: Set[Path] = { + def asPath(p: String) = ClassPath split p + + val paths = command.settings.plugin.value filter (_ != "") flatMap (s => asPath(s) map (s => Paths.get(s))) + paths.toSet + } + def dependencyReadyFuture(dependency: Dependency) = if (dependency.isMacro) { + log(s"dependency is on macro classpath, will wait for .class files: ${dependency.t.label}") + dependency.t.javaDone.future + } else if (dependency.isPlugin) { + log(s"dependency is on plugin classpath, will wait for .class files: ${dependency.t.label}") + dependency.t.javaDone.future + } else + dependency.t.outlineDone.future + + + val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") + val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") + if (cacheMacro) + command.settings.YcacheMacroClassLoader.value = "always" + if (cachePlugin) + command.settings.YcachePluginClassLoader.value = "always" + + if (strategy != Traditional) { + command.settings.YpickleJava.value = true + } + + val groups: List[Group] = { + val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) + if (strategy != OutlineTypePipeline || isScalaLibrary) { + Group(files) :: Nil + } else { + command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value + val length = files.length + val groups = (length.toDouble / 128).toInt.max(1) + files.grouped((length.toDouble / groups).ceil.toInt.max(1)).toList.map(Group(_)) + } + } + command.settings.outputDirs.getSingleOutput.get.file.mkdirs() + + val isGrouped = groups.size > 1 + + val outlineTimer = new Timer() + val pickleExportTimer = new Timer + val javaTimer = new Timer() + + var outlineCriticalPathMs = 0d + var regularCriticalPathMs = 0d + var fullCriticalPathMs = 0d + val outlineDone: Promise[Unit] = Promise[Unit]() + val outlineDoneFuture = outlineDone.future + val javaDone: Promise[Unit] = Promise[Unit]() + val javaDoneFuture: Future[_] = javaDone.future + val groupsDoneFuture: Future[List[Unit]] = Future.sequence(groups.map(_.done.future)) + val futures: List[Future[_]] = { + outlineDone.future :: javaDone.future :: groups.map(_.done.future) + } + + val originalClassPath: String = command.settings.classpath.value + + lazy val compiler: Global = try { + val result = newCompiler(command.settings) + val reporter = result.reporter + if (reporter.hasErrors) + reporter.flush() + else if (command.shouldStopWithInfo) + reporter.echo(command.getInfoMessage(result)) + result + } catch { + case t: Throwable => + t.printStackTrace() + throw t + } + + def outlineCompile(): Unit = { + outlineTimer.start() + try { + log("scalac outline: start") + command.settings.Youtline.value = true + command.settings.stopAfter.value = List("pickler") + command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + val run1 = new compiler.Run() + run1 compile files + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) + outlineTimer.stop() + reporter.finish() + if (reporter.hasErrors) { + log("scalac outline: failed") + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + log(f"scala outline: done ${outlineTimer.durationMs}%.0f ms") + outlineDone.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } + } + + def fullCompile(): Unit = { + command.settings.Youtline.value = false + command.settings.stopAfter.value = Nil + command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal + + val groupCount = groups.size + for ((group, ix) <- groups.zipWithIndex) { + group.done.completeWith { + Future { + log(s"scalac (${ix + 1}/$groupCount): start") + group.timer.start() + val compiler2 = newCompiler(command.settings) + try { + val run2 = new compiler2.Run() + run2 compile group.files + compiler2.reporter.finish() + if (compiler2.reporter.hasErrors) { + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + group.done.complete(Success(())) + } + } finally { + compiler2.close() + group.timer.stop() + } + log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") + } + } + } + } + + def fullCompileExportPickles(): Unit = { + assert(groups.size == 1) + val group = groups.head + log("scalac: start") + outlineTimer.start() + try { + val run2 = new compiler.Run() { + + override def advancePhase(): Unit = { + if (compiler.phase == this.picklerPhase) { + outlineTimer.stop() + log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") + pickleExportTimer.start() + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) + pickleExportTimer.stop() + log(f"scalac: exported pickles ${pickleExportTimer.durationMs}%.0f ms") + outlineDone.complete(Success(())) + group.timer.start() + } + super.advancePhase() + } + } + + run2 compile group.files + compiler.reporter.finish() + group.timer.stop() + if (compiler.reporter.hasErrors) { + log("scalac: failed") + if (!outlineDone.isCompleted) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + log(f"scalac: done ${group.timer.durationMs}%.0f ms") + // outlineDone.complete(Success(())) + group.done.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + if (!outlineDone.isCompleted) + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + if (!group.done.isCompleted) + group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } + } + + def javaCompile(): Unit = { + val javaSources = files.filter(_.endsWith(".java")) + if (javaSources.nonEmpty) { + log("javac: start") + javaTimer.start() + javaDone.completeWith(Future { + val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) + val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) + compileTask.setProcessors(Collections.emptyList()) + compileTask.call() + javaTimer.stop() + log(f"javac: done ${javaTimer.durationMs}%.0f ms") + () + }) + } else { + javaDone.complete(Success(())) + } + } + def log(msg: String): Unit = println(this.label + ": " + msg) + } + + final class Timer() { + private var startNanos: Long = 0 + private var endNanos: Long = 0 + def start(): Unit = { + assert(startNanos == 0L) + startNanos = System.nanoTime + } + var thread: Thread = Thread.currentThread() + def stop(): Unit = { + thread = Thread.currentThread() + endNanos = System.nanoTime() + } + def startMs: Double = startNanos.toDouble / 1000 / 1000 + def durationMs: Double = { + val result = (endNanos - startNanos).toDouble / 1000 / 1000 + if (result < 0) + getClass + result + } + def startMicros: Double = startNanos.toDouble / 1000d + def durationMicros: Double = (endNanos - startNanos).toDouble / 1000d + } + + protected def newCompiler(settings: Settings): Global = { + if (strategy != Traditional) { + val classPath = ClassPath.expandPath(settings.classpath.value, expandStar = true) + val modifiedClassPath = classPath.map { entry => + val entryPath = Paths.get(entry) + if (Files.exists(entryPath)) + strippedAndExportedClassPath.getOrElse(entryPath.toRealPath().normalize(), entryPath).toString + else + entryPath + } + settings.classpath.value = modifiedClassPath.mkString(java.io.File.pathSeparator) + } + Global(settings) + } +} + +sealed abstract class BuildStrategy + +/** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ +case object OutlineTypePipeline extends BuildStrategy + +case object Pipeline extends BuildStrategy + +/** Emit class files before triggering downstream compilation */ +case object Traditional extends BuildStrategy + +object PipelineMain { + def main(args: Array[String]): Unit = { + val strategies = List(OutlineTypePipeline, Pipeline, Traditional) + val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get + val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) + val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") + val argFiles: Seq[Path] = args match { + case Array(path) if Files.isDirectory(Paths.get(path)) => + Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList + case _ => + args.map(Paths.get(_)) + } + val main = new PipelineMainClass("1", parallelism, strategy, argFiles, useJars) + val result = main.process() + if (!result) + System.exit(1) + else + System.exit(0) + } +} + +//object PipelineMainTest { +// def main(args: Array[String]): Unit = { +// var i = 0 +// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList +// for (_ <- 1 to 2; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { +// i += 1 +// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = false) +// println(s"====== ITERATION $i=======") +// val result = main.process() +// if (!result) +// System.exit(1) +// } +// System.exit(0) +// } +//} From 8e58ea0bc65792706a9809f7be65fc94b075d5ac Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 15 Jan 2019 11:01:31 +1000 Subject: [PATCH 1617/2793] Valid URL for VirtualDirectory classpath --- .../scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 5b157e9b386e..04ddc61b2107 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -35,7 +35,7 @@ case class VirtualDirectoryClassPath(dir: VirtualDirectory) extends ClassPath wi def isPackage(f: AbstractFile): Boolean = f.isPackage // mimic the behavior of the old nsc.util.DirectoryClassPath - def asURLs: Seq[URL] = Seq(new URL(dir.name)) + def asURLs: Seq[URL] = Seq(new URL("file://_VIRTUAL_/" + dir.name)) def asClassPathStrings: Seq[String] = Seq(dir.path) override def findClass(className: String): Option[ClassRepresentation] = findClassFile(className) map ClassFileEntryImpl From 141a72f6fabb5bea096736f7ff668377bc1f63f6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Feb 2019 13:42:03 +1000 Subject: [PATCH 1618/2793] Add missing copyright headers --- src/compiler/scala/tools/nsc/PickleExtractor.scala | 12 ++++++++++++ src/compiler/scala/tools/nsc/PipelineMain.scala | 14 +++++++++++--- src/reflect/scala/reflect/io/RootPath.scala | 12 ++++++++++++ 3 files changed, 35 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala index 53a54b12e12a..23ae8f4338fb 100644 --- a/src/compiler/scala/tools/nsc/PickleExtractor.scala +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.tools.nsc import java.io.Closeable diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index b4d7943166f5..a36f64cda7f4 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -1,7 +1,15 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2019 LAMP/EPFL - * @author Martin Odersky +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. */ + package scala.tools.nsc import java.io.File diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala index 51273a9c3f37..6634d323481e 100644 --- a/src/reflect/scala/reflect/io/RootPath.scala +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -1,3 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + package scala.reflect.io import java.io.Closeable From 65857146ca97b47763ec3b5067ea3d0edcf8ac65 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 19 Feb 2019 18:38:12 -0800 Subject: [PATCH 1619/2793] partest 1.1.9 (was 1.1.7) there aren't any real changes in this version. the context is to test that we are able to publish Scala modules using sbt 1 now --- versions.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/versions.properties b/versions.properties index d3ff92c49a31..144cb4005571 100644 --- a/versions.properties +++ b/versions.properties @@ -22,6 +22,6 @@ scala.binary.version=2.12 scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.3 -partest.version.number=1.1.7 +partest.version.number=1.1.9 scala-asm.version=6.2.0-scala-2 jline.version=2.14.6 From a996dc49ebde8ec34b9ebbcf77205a4b0f5ebb54 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 21 Feb 2019 14:30:24 -0800 Subject: [PATCH 1620/2793] misc minor readme tweaks --- README.md | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index b212ee773cc2..f4d9fb5c7b19 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,13 @@ # Welcome! -This is the official repository for the [Scala Programming Language](http://www.scala-lang.org). + +This is the official repository for the [Scala Programming Language](http://www.scala-lang.org) +standard library, compiler, and language spec. # How to contribute -To contribute to the Scala standard library, Scala compiler, and Scala Language Specification, please send us a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository. +To contribute in this repo, please open a [pull request](https://help.github.com/articles/using-pull-requests/#fork--pull) from your fork of this repository. -We do have to ask you to sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala) before we can merge any of your work into our code base, to protect its open source nature. +We do have to ask you to sign the [Scala CLA](http://www.lightbend.com/contribute/cla/scala) before we can merge any of your work, to protect its open source nature. For more information on building and developing the core of Scala, make sure to read the rest of this README! @@ -50,13 +52,13 @@ scala/ +---/library Scala Standard Library +---/reflect Scala Reflection +---/compiler Scala Compiler - +---/eclipse Eclipse project files +---/intellij IntelliJ project templates +--spec/ The Scala language specification +--scripts/ Scripts for the CI jobs (including building releases) +--test/ The Scala test suite +---/files Partest tests +---/junit JUnit tests + +---/scalacheck ScalaCheck tests +--build/ [Generated] Build output directory ``` @@ -68,10 +70,9 @@ You need the following tools: - Java SDK. The baseline version is 8 for both 2.12.x and 2.13.x. It may be possible to use a later SDK for local development, but the CI will verify against the baseline version. - - sbt. We recommend the [sbt-extras](https://github.com/paulp/sbt-extras) runner - script. It provides sensible default jvm options (stack and heap size). + - sbt (sbt 0.13 on the 2.12.x branch, sbt 1 on the 2.13.x branch) -Mac OS X and Linux work. Windows may work if you use Cygwin. Community help with keeping +MacOS and Linux work. Windows may work if you use Cygwin. Community help with keeping the build working on Windows is appreciated. ## Tools we use @@ -87,7 +88,7 @@ We are grateful for the following OSS licenses: During ordinary development, a new Scala build is built by the previously released version. For short we call the previous release -"starr": the stable reference Scala release. Building with starr is +"starr": the stable reference release. Building with starr is sufficient for most kinds of changes. However, a full build of Scala (a *bootstrap*, as performed by our CI) @@ -258,14 +259,14 @@ after an LGTM comment is in the [scala/scabot](https://github.com/scala/scabot) ## Community build -The Scala community build is a central element for testing Scala +The Scala community build is an important method for testing Scala releases. A community build can be launched for any Scala commit, even before the commit's PR has been merged. That commit is then used to build a large number of open-source projects from source and run their test suites. To request a community build run on your PR, just ask in a comment on -the PR and a Scala team member will take care of +the PR and a Scala team member (probably @SethTisue) will take care of it. ([details](https://github.com/scala/community-builds/wiki#can-i-run-it-against-a-pull-request-in-scalascala)) Community builds run on the Scala Jenkins instance. The jobs are From 5ad9e03fde432df99cee89df4cbe47681cfca94f Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 21 Feb 2019 14:31:15 -0800 Subject: [PATCH 1621/2793] remove inactive maintainers --- README.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.md b/README.md index f4d9fb5c7b19..716acc554366 100644 --- a/README.md +++ b/README.md @@ -31,10 +31,6 @@ If you need some help with your PR at any time, please feel free to @-mention an | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | - | [`@VladUreche`](https://github.com/VladUreche) | specialization, Scaladoc tool | - | [`@densh`](https://github.com/densh) | quasiquotes, parser, string interpolators, macros in standard library | - | [`@xeno-by`](https://github.com/xeno-by) | macros and reflection | - | [`@heathermiller`](https://github.com/heathermiller) | documentation | | [`@dragos`](https://github.com/dragos) | specialization, back end | | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | | [`@janekdb`](https://github.com/janekdb) | documentation | From 1bb9b7482c2c873a51285febb256ee303634e3f2 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 21 Feb 2019 14:35:07 -0800 Subject: [PATCH 1622/2793] readme: reorder maintainers, add Stefan and Viktor --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 716acc554366..1408d04add4a 100644 --- a/README.md +++ b/README.md @@ -27,12 +27,14 @@ If you need some help with your PR at any time, please feel free to @-mention an | | username | talk to me about... | --------------------------------------------------------------------------------------------------|----------------------------------------------------------------|---------------------------------------------------| | [`@adriaanm`](https://github.com/adriaanm) | type checker, pattern matcher, infrastructure, language spec | - | [`@SethTisue`](https://github.com/SethTisue) | build, developer docs, community build, Jenkins, library, the welcome-to-Scala experience | + | [`@SethTisue`](https://github.com/SethTisue) | getting started, build, developer docs, community build, Jenkins, library | | [`@retronym`](https://github.com/retronym) | compiler performance, weird compiler bugs, Java 8 lambdas, REPL | + | [`@szeiger`](https://github.com/szeiger) | collections, build | + | [`@lrytz`](https://github.com/lrytz) | back end, optimizer, named & default arguments | | [`@Ichoran`](https://github.com/Ichoran) | collections library, performance | - | [`@lrytz`](https://github.com/lrytz) | optimizer, named & default arguments | + | [`@viktorklang`](https://github.com/viktorklang) | concurrency, futures | + | [`@axel22`](https://github.com/axel22) | concurrency, parallel collections, specialization | | [`@dragos`](https://github.com/dragos) | specialization, back end | - | [`@axel22`](https://github.com/axel22) | collections, concurrency, specialization | | [`@janekdb`](https://github.com/janekdb) | documentation | P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list! From 6a8177b4e1f5a45a81be9103cb64968a08425934 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Wed, 31 Oct 2018 16:52:47 +0100 Subject: [PATCH 1623/2793] [backport] Upgrade to ASM 7 --- .../scala/tools/nsc/backend/jvm/PostProcessor.scala | 6 +----- .../nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala | 6 +++--- .../nsc/backend/jvm/analysis/TypeFlowInterpreter.scala | 2 +- test/files/run/large_class.check | 3 ++- test/files/run/large_code.check | 3 ++- versions.properties | 2 +- 6 files changed, 10 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 60652c0bcd65..c42a02c58439 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -73,15 +73,11 @@ abstract class PostProcessor extends PerRunInit { setInnerClasses(classNode) serializeClass(classNode) } catch { - case e: java.lang.RuntimeException if e.getMessage != null && (e.getMessage contains "too large!") => - backendReporting.error(NoPosition, - s"Could not write class ${internalName} because it exceeds JVM code size limits. ${e.getMessage}") - null case ex: InterruptedException => throw ex case ex: Throwable => // TODO fail fast rather than continuing to write the rest of the class files? if (frontendAccess.compilerSettings.debug) ex.printStackTrace() - backendReporting.error(NoPosition, s"Error while emitting ${internalName}\n${ex.getMessage}") + backendReporting.error(NoPosition, s"Error while emitting $internalName\n${ex.getMessage}") null } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala index 8e29f5082c10..dd75484afdb7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzerImpl.scala @@ -464,16 +464,16 @@ case class ParameterProducer(local: Int) case class UninitializedLocalProducer(local: Int) extends InitialProducer case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerStackTop: Int) extends InitialProducer -class InitialProducerSourceInterpreter extends SourceInterpreter(scala.tools.asm.Opcodes.ASM7_EXPERIMENTAL) { +class InitialProducerSourceInterpreter extends SourceInterpreter(scala.tools.asm.Opcodes.ASM7) { override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): SourceValue = { new SourceValue(tp.getSize, ParameterProducer(local)) } - override def newEmptyNonParameterLocalValue(local: Int): SourceValue = { + override def newEmptyValue(local: Int): SourceValue = { new SourceValue(1, UninitializedLocalProducer(local)) } - override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[_ <: Value], exceptionType: Type): SourceValue = { + override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[SourceValue], exceptionType: Type): SourceValue = { val handlerStackTop = handlerFrame.stackTop + 1 // +1 because this value is about to be pushed onto `handlerFrame`. new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerStackTop)) } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala index 7adc5f28cd42..baa4450c5bb3 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/TypeFlowInterpreter.scala @@ -17,7 +17,7 @@ package analysis import scala.tools.asm.Type import scala.tools.asm.tree.analysis.{BasicValue, BasicInterpreter} -abstract class TypeFlowInterpreter extends BasicInterpreter(scala.tools.asm.Opcodes.ASM7_EXPERIMENTAL) { +abstract class TypeFlowInterpreter extends BasicInterpreter(scala.tools.asm.Opcodes.ASM7) { override def newValue(tp: Type) = { if (tp == null) super.newValue(tp) else if (isRef(tp)) new BasicValue(tp) diff --git a/test/files/run/large_class.check b/test/files/run/large_class.check index babe24db94e7..f5a569d880eb 100644 --- a/test/files/run/large_class.check +++ b/test/files/run/large_class.check @@ -1 +1,2 @@ -error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Class file too large! +error: Error while emitting BigEnoughToFail +Class too large: BigEnoughToFail diff --git a/test/files/run/large_code.check b/test/files/run/large_code.check index 42bf4909423d..c19862f68981 100644 --- a/test/files/run/large_code.check +++ b/test/files/run/large_code.check @@ -1 +1,2 @@ -error: Could not write class BigEnoughToFail because it exceeds JVM code size limits. Method tooLong's code too large! +error: Error while emitting BigEnoughToFail +Method too large: BigEnoughToFail.tooLong ()V diff --git a/versions.properties b/versions.properties index 144cb4005571..83a1cd644fe2 100644 --- a/versions.properties +++ b/versions.properties @@ -23,5 +23,5 @@ scala-xml.version.number=1.0.6 scala-parser-combinators.version.number=1.0.7 scala-swing.version.number=2.0.3 partest.version.number=1.1.9 -scala-asm.version=6.2.0-scala-2 +scala-asm.version=7.0.0-scala-1 jline.version=2.14.6 From 1220d3c915d8066f835d542a702681c8d0e6c795 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Thu, 25 Oct 2018 15:17:50 +0200 Subject: [PATCH 1624/2793] [backport] Nicer branch-sensitive nullness --- .../jvm/analysis/NullnessAnalyzer.scala | 31 +++++++++++++++---- 1 file changed, 25 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala index f55bd730c0e7..e23afd8a4a03 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/NullnessAnalyzer.scala @@ -18,7 +18,7 @@ import java.util import scala.annotation.switch import scala.tools.asm.tree.analysis._ -import scala.tools.asm.tree.{AbstractInsnNode, LdcInsnNode, MethodInsnNode, MethodNode} +import scala.tools.asm.tree.{AbstractInsnNode, LdcInsnNode, MethodInsnNode, MethodNode, LabelNode} import scala.tools.asm.{Opcodes, Type} import scala.tools.nsc.backend.jvm.opt.BytecodeUtils import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ @@ -146,15 +146,37 @@ final class NullnessInterpreter(knownNonNullInvocation: MethodInsnNode => Boolea } class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessValue](nLocals, nStack) { + private[this] var ifNullAliases: AliasSet = null + // Auxiliary constructor required for implementing `NullnessAnalyzer.newFrame` def this(src: Frame[_ <: NullnessValue]) { this(src.getLocals, src.getMaxStackSize) init(src) } + private def setNullness(s: AliasSet, v: NullnessValue) = { + val it = s.iterator + while (it.hasNext) + this.setValue(it.next(), v) + } + + override def initJumpTarget(opcode: Int, target: LabelNode): Unit = { + // when `target` is defined, we're in the case where the branch condition is true + val conditionTrue = target != null + if (opcode == Opcodes.IFNULL) + setNullness(ifNullAliases, if (conditionTrue) NullValue else NotNullValue) + else if (opcode == Opcodes.IFNONNULL) + setNullness(ifNullAliases, if (conditionTrue) NotNullValue else NullValue) + } + override def execute(insn: AbstractInsnNode, interpreter: Interpreter[NullnessValue]): Unit = { import Opcodes._ + ifNullAliases = insn.getOpcode match { + case IFNULL | IFNONNULL => aliasesOf(this.stackTop) + case _ => null + } + // get the alias set the object that is known to be not-null after this operation. // alias sets are mutable / mutated, so after super.execute, this set contains the remaining // aliases of the value that becomes not-null. @@ -203,11 +225,8 @@ class NullnessFrame(nLocals: Int, nStack: Int) extends AliasingFrame[NullnessVal super.execute(insn, interpreter) - if (nullCheckedAliases != null) { - val it = nullCheckedAliases.iterator - while (it.hasNext) - this.setValue(it.next(), NotNullValue) - } + if (nullCheckedAliases != null) + setNullness(nullCheckedAliases, NotNullValue) } } From a1539c6934a1fdbba1c674cc271ee61999d9f0c9 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 23 Feb 2019 20:07:26 +0000 Subject: [PATCH 1625/2793] Back-ports changes to add upperBound, lowerBound. Back-ports the changes from https://github.com/scala/scala/pull/7142 We add an `upperBound` and `lowerBound` method to the Type class, and replace as many calls as we can of `.bounds` with calls to these. --- .../tools/nsc/transform/SpecializeTypes.scala | 24 +++++++++---------- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../transform/patmat/MatchTranslation.scala | 2 +- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../tools/nsc/typechecker/Implicits.scala | 16 ++++++------- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../tools/nsc/typechecker/RefChecks.scala | 6 ++--- .../scala/tools/nsc/typechecker/Typers.scala | 8 +++---- .../scala/reflect/internal/Definitions.scala | 4 ++-- .../internal/ExistentialsAndSkolems.scala | 4 ++-- .../scala/reflect/internal/Symbols.scala | 6 ++--- .../scala/reflect/internal/Types.scala | 23 ++++++++++++++---- .../scala/reflect/internal/tpe/GlbLubs.scala | 8 +++---- .../reflect/internal/tpe/TypeComparers.scala | 6 ++--- .../internal/tpe/TypeConstraints.scala | 10 ++++---- .../scala/reflect/internal/tpe/TypeMaps.scala | 4 ++-- 16 files changed, 70 insertions(+), 57 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 10d733d04378..0e3ad97af6c6 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -532,7 +532,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { sClassMap.getOrElseUpdate(tparam, tparam.cloneSymbol(sClass, tparam.flags, tparam.name append tpnme.SPECIALIZED_SUFFIX) - modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe)) + modifyInfo (info => TypeBounds(info.lowerBound, AnyRefTpe)) ).tpe } @@ -562,11 +562,11 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { */ def produceTypeParameters(syms: List[Symbol], nowner: Symbol, env: TypeEnv) = { val cloned = for (s <- syms) yield if (!env.contains(s)) s.cloneSymbol(nowner) else env(s).typeSymbol - // log("producing type params: " + cloned.map(t => (t, t.tpe.bounds.hi))) + // log("producing type params: " + cloned.map(t => (t, t.tpe.upperBound))) foreach2(syms, cloned) { (orig, cln) => cln.removeAnnotation(SpecializedClass) if (env.contains(orig)) - cln modifyInfo (info => TypeBounds(info.bounds.lo, AnyRefTpe)) + cln modifyInfo (info => TypeBounds(info.lowerBound, AnyRefTpe)) } cloned map (_ substInfo (syms, cloned)) } @@ -633,7 +633,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { val specializedInfoType: Type = { oldClassTParams = survivingParams(clazz.info.typeParams, env) newClassTParams = produceTypeParameters(oldClassTParams, sClass, env) map subst(env) - // log("new tparams " + newClassTParams.zip(newClassTParams map {s => (s.tpe, s.tpe.bounds.hi)}) + ", in env: " + env) + // log("new tparams " + newClassTParams.zip(newClassTParams map {s => (s.tpe, s.tpe.upperBound)}) + ", in env: " + env) def applyContext(tpe: Type) = subst(env, tpe).instantiateTypeParams(oldClassTParams, newClassTParams map (_.tpe)) @@ -1280,7 +1280,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { * A conflicting type environment could still be satisfiable. */ def nonConflicting(env: TypeEnv) = env forall { case (tvar, tpe) => - (subst(env, tvar.info.bounds.lo) <:< tpe) && (tpe <:< subst(env, tvar.info.bounds.hi)) + (subst(env, tvar.info.lowerBound) <:< tpe) && (tpe <:< subst(env, tvar.info.upperBound)) } /** The type environment is sound w.r.t. to all type bounds or only soft @@ -1300,15 +1300,15 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } env forall { case (tvar, tpe) => - matches(tvar.info.bounds.lo, tpe) && matches(tpe, tvar.info.bounds.hi) || { + matches(tvar.info.lowerBound, tpe) && matches(tpe, tvar.info.upperBound) || { if (warnings) reporter.warning(tvar.pos, s"Bounds prevent specialization of $tvar") debuglog("specvars: " + - tvar.info.bounds.lo + ": " + - specializedTypeVars(tvar.info.bounds.lo) + " " + - subst(env, tvar.info.bounds.hi) + ": " + - specializedTypeVars(subst(env, tvar.info.bounds.hi)) + tvar.info.lowerBound + ": " + + specializedTypeVars(tvar.info.lowerBound) + " " + + subst(env, tvar.info.upperBound) + ": " + + specializedTypeVars(subst(env, tvar.info.upperBound)) ) false } @@ -1332,8 +1332,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { env.foldLeft[Option[TypeEnv]](noconstraints) { case (constraints, (tvar, tpe)) => - val loconstraints = matches(tvar.info.bounds.lo, tpe) - val hiconstraints = matches(tpe, tvar.info.bounds.hi) + val loconstraints = matches(tvar.info.lowerBound, tpe) + val hiconstraints = matches(tpe, tvar.info.upperBound) val allconstraints = for (c <- constraints; l <- loconstraints; h <- hiconstraints) yield c ++ l ++ h allconstraints } diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index b1893487893b..4849d85f84cf 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -276,7 +276,7 @@ abstract class UnCurry extends InfoTransform // Don't want bottom types getting any further than this (scala/bug#4024) if (tp.typeSymbol.isBottomClass) getClassTag(AnyTpe) else if (!tag.isEmpty) tag - else if (tp.bounds.hi ne tp) getClassTag(tp.bounds.hi) + else if (tp.upperBound ne tp) getClassTag(tp.upperBound) else localTyper.TyperErrorGen.MissingClassTagError(tree, tp) } def traversableClassTag(tpe: Type): Tree = { diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala index 6db93de2c6dd..46a4d06a00d8 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTranslation.scala @@ -173,7 +173,7 @@ trait MatchTranslation { true } - private def concreteType = tpe.bounds.hi + private def concreteType = tpe.upperBound private def unbound = unbind(tree) private def tpe_s = if (pt <:< concreteType) "" + pt else s"$pt (binder: $tpe)" private def at_s = unbound match { diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index d0653a9ae755..b62ec028b0b8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -720,7 +720,7 @@ trait ContextErrors { // SelectFromTypeTree def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = { - val hiBound = qual.tpe.bounds.hi + val hiBound = qual.tpe.upperBound val addendum = if (hiBound =:= qual.tpe) "" else s" (with upper bound ${hiBound})" issueNormalTypeError(tree, s"illegal type selection from volatile type ${qual.tpe}${addendum}") setError(tree) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 2cc7fa729899..3cdd2633f559 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -354,7 +354,7 @@ trait Implicits { sym.tpe match { case MethodType(params, restpe) if (params forall (_.tpe.isInstanceOf[BoundedWildcardType])) => - Some((sym.name, params map (_.tpe.bounds.lo), restpe)) + Some((sym.name, params map (_.tpe.lowerBound), restpe)) case _ => None } case _ => None @@ -462,8 +462,8 @@ trait Implicits { def core(tp: Type): Type = tp.dealiasWiden match { case RefinedType(parents, defs) => intersectionType(parents map core, tp.typeSymbol.owner) case AnnotatedType(annots, tp) => core(tp) - case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi))) - case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.bounds.hi))) + case ExistentialType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.upperBound))) + case PolyType(tparams, result) => core(result).subst(tparams, tparams map (t => core(t.info.upperBound))) case _ => tp } def stripped(tp: Type): Type = { @@ -624,7 +624,7 @@ trait Implicits { else pt match { case tr @ TypeRef(pre, sym, args) => if (sym.isAliasType) loop(tp, pt.dealias) - else if (sym.isAbstractType) loop(tp, pt.bounds.lo) + else if (sym.isAbstractType) loop(tp, pt.lowerBound) else { val ptFunctionArity = functionArity(pt) ptFunctionArity > 0 && hasLength(params, ptFunctionArity) && { @@ -668,7 +668,7 @@ trait Implicits { // We only know enough to rule out a subtype relationship if the left hand side is a class. case tr1@TypeRef(_, sym1, args1) if sym1.isClass => val tp2Wide = - tp2.dealiasWiden.bounds.hi match { + tp2.dealiasWiden.upperBound match { case et: ExistentialType => et.underlying // OPT meant as cheap approximation of skolemizeExistential? case tp => tp } @@ -1195,7 +1195,7 @@ trait Implicits { // SLS 2.12, section 7.2: // - if `T` is an abstract type, the parts of its upper bound; - getParts(tp.bounds.hi) + getParts(tp.upperBound) if (isScala213) { // - if `T` is a parameterized type `S[T1,…,Tn]`, the union of the parts of `S` and `T1,…,Tn` @@ -1382,7 +1382,7 @@ trait Implicits { else findSubManifest(pre) :: suffix): _*) } else if (sym.isExistentiallyBound && full) { manifestFactoryCall("wildcardType", tp, - findManifest(tp.bounds.lo), findManifest(tp.bounds.hi)) + findManifest(tp.lowerBound), findManifest(tp.upperBound)) } // looking for a manifest of a type parameter that hasn't been inferred by now, // can't do much, but let's not fail @@ -1447,7 +1447,7 @@ trait Implicits { private def materializeImplicit(pt: Type): SearchResult = pt match { case TypeRef(_, sym, _) if sym.isAbstractType => - materializeImplicit(pt.dealias.bounds.lo) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.bounds.lo == pt) + materializeImplicit(pt.dealias.lowerBound) // #3977: use pt.dealias, not pt (if pt is a type alias, pt.lowerBound == pt) case pt @ TypeRef(pre, sym, arg :: Nil) => sym match { case sym if ManifestSymbols(sym) => manifestOfType(arg, sym) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 4c32bf9678d5..2e66eff3c82d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -531,7 +531,7 @@ trait Infer extends Checkable { // explicitly anywhere amongst the formal, argument, result, or expected type. // ...or lower bound of a type param, since they're asking for it. def canWarnAboutAny = { - val loBounds = tparams map (_.info.bounds.lo) + val loBounds = tparams map (_.info.lowerBound) def containsAny(t: Type) = (t contains AnyClass) || (t contains AnyValClass) val hasAny = pt :: restpe :: formals ::: argtpes ::: loBounds exists (_.dealiasWidenChain exists containsAny) !hasAny diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index ab821eedb94d..a3ab364998b4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -527,7 +527,7 @@ abstract class RefChecks extends Transform { kindErrors.toList.mkString("\n", ", ", "")) } } - else if (low.isAbstractType && lowType.isVolatile && !highInfo.bounds.hi.isVolatile) + else if (low.isAbstractType && lowType.isVolatile && !highInfo.upperBound.isVolatile) overrideError("is a volatile type; cannot override a type with non-volatile upper bound") } def checkOverrideTerm() { @@ -992,7 +992,7 @@ abstract class RefChecks extends Transform { } def underlyingClass(tp: Type): Symbol = { val sym = tp.widen.typeSymbol - if (sym.isAbstractType) underlyingClass(sym.info.bounds.hi) + if (sym.isAbstractType) underlyingClass(sym.info.upperBound) else sym } val actual = underlyingClass(other.tpe) @@ -1359,7 +1359,7 @@ abstract class RefChecks extends Transform { // types of the value parameters mapParamss(member)(p => checkAccessibilityOfType(p.tpe)) // upper bounds of type parameters - member.typeParams.map(_.info.bounds.hi.widen) foreach checkAccessibilityOfType + member.typeParams.map(_.info.upperBound.widen) foreach checkAccessibilityOfType } private def checkByNameRightAssociativeDef(tree: DefDef) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a4a0a0c8b8bf..aef595df5071 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -536,7 +536,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper def expectsStable = ( pt.isStable || mode.inQualMode && !tree.symbol.isConstant - || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.bounds.lo.isStable || ptSym.isRefinementClass) + || !(tree.tpe <:< pt) && (ptSym.isAbstractType && pt.lowerBound.isStable || ptSym.isRefinementClass) ) ( isNarrowable(tree.tpe) @@ -2268,7 +2268,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper log(s"""checking $tp0 in refinement$parentString at ${meth.owner.owner.fullLocationString}""") ( (!sym.hasTransOwner(meth.owner) && failStruct(paramPos, "an abstract type defined outside that refinement", what)) || (!sym.hasTransOwner(meth) && failStruct(paramPos, "a type member of that refinement", what)) - || checkAbstract(sym.info.bounds.hi, "Type bound") + || checkAbstract(sym.info.upperBound, "Type bound") ) } tp0.dealiasWidenChain forall (t => check(t.typeSymbol)) @@ -3401,7 +3401,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper * in an argument closure overlaps with an uninstantiated formal? */ def needsInstantiation(tparams: List[Symbol], formals: List[Type], args: List[Tree]) = { - def isLowerBounded(tparam: Symbol) = !tparam.info.bounds.lo.typeSymbol.isBottomClass + def isLowerBounded(tparam: Symbol) = !tparam.info.lowerBound.typeSymbol.isBottomClass exists2(formals, args) { case (formal, Function(vparams, _)) => @@ -3791,7 +3791,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper val args1 = map2(args, formals)(typedArgToPoly) if (args1 exists { _.isErrorTyped }) duplErrTree else { - debuglog("infer method inst " + fun + ", tparams = " + tparams + ", args = " + args1.map(_.tpe) + ", pt = " + pt + ", lobounds = " + tparams.map(_.tpe.bounds.lo) + ", parambounds = " + tparams.map(_.info)) //debug + debuglog("infer method inst " + fun + ", tparams = " + tparams + ", args = " + args1.map(_.tpe) + ", pt = " + pt + ", lobounds = " + tparams.map(_.tpe.lowerBound) + ", parambounds = " + tparams.map(_.info)) //debug // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun" // returns those undetparams which have not been instantiated. val undetparams = inferMethodInstance(fun, tparams, args1, pt) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 31a54e35f4d1..95c5914626f9 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -788,7 +788,7 @@ trait Definitions extends api.StandardDefinitions { case _: SingletonType => true case NoPrefix => true case TypeRef(_, NothingClass | SingletonClass, _) => true - case TypeRef(_, sym, _) if sym.isAbstractType => tp.bounds.hi.typeSymbol isSubClass SingletonClass + case TypeRef(_, sym, _) if sym.isAbstractType => tp.upperBound.typeSymbol isSubClass SingletonClass case TypeRef(pre, sym, _) if sym.isModuleClass => isStable(pre) case TypeRef(_, _, _) => val normalize = tp.normalize; (normalize ne tp) && isStable(normalize) case TypeVar(origin, _) => isStable(origin) @@ -803,7 +803,7 @@ trait Definitions extends api.StandardDefinitions { // indirectly upper-bounded by itself. See #2918 def isVolatileAbstractType: Boolean = { def sym = tp.typeSymbol - def volatileUpperBound = isVolatile(tp.bounds.hi) + def volatileUpperBound = isVolatile(tp.upperBound) def safeIsVolatile = ( if (volatileRecursions < TypeConstants.LogVolatileThreshold) volatileUpperBound diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 776f4e31fa65..34db867060a8 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -56,9 +56,9 @@ trait ExistentialsAndSkolems { */ private def existentialBoundsExcludingHidden(hidden: List[Symbol]): Map[Symbol, Type] = { def safeBound(t: Type): Type = - if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.bounds.hi) else t + if (hidden contains t.typeSymbol) safeBound(t.typeSymbol.existentialBound.upperBound) else t - def hiBound(s: Symbol): Type = safeBound(s.existentialBound.bounds.hi) match { + def hiBound(s: Symbol): Type = safeBound(s.existentialBound.upperBound) match { case tp @ RefinedType(parents, decls) => val parents1 = parents mapConserve safeBound if (parents eq parents1) tp diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index d56c5988da85..6a792c11c6ff 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1922,7 +1922,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ final def isLess(that: Symbol): Boolean = { def baseTypeSeqLength(sym: Symbol) = - if (sym.isAbstractType) 1 + sym.info.bounds.hi.baseTypeSeq.length + if (sym.isAbstractType) 1 + sym.info.upperBound.baseTypeSeq.length else sym.info.baseTypeSeq.length if (this.isType) (that.isType && @@ -2800,12 +2800,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => private def compose(ss: String*) = ss filter (_ != "") mkString " " def isSingletonExistential = - nme.isSingletonName(name) && (info.bounds.hi.typeSymbol isSubClass SingletonClass) + nme.isSingletonName(name) && (info.upperBound.typeSymbol isSubClass SingletonClass) /** String representation of existentially bound variable */ def existentialToString = if (isSingletonExistential && !settings.debug.value) - "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.bounds.hi) + "val " + tpnme.dropSingletonName(name) + ": " + dropSingletonType(info.upperBound) else defString } implicit val SymbolTag = ClassTag[Symbol](classOf[Symbol]) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 81e77790e851..1c20dd98df94 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -163,6 +163,8 @@ trait Types override def widen = underlying.widen override def typeOfThis = underlying.typeOfThis override def bounds = underlying.bounds + override def lowerBound = underlying.lowerBound + override def upperBound = underlying.upperBound override def parents = underlying.parents override def prefix = underlying.prefix override def decls = underlying.decls @@ -408,7 +410,9 @@ trait Types * for a reference denoting an abstract type, its bounds, * for all other types, a TypeBounds type all of whose bounds are this type. */ - def bounds: TypeBounds = TypeBounds(this, this) + def bounds: TypeBounds = TypeBounds(lowerBound, upperBound) + def lowerBound: Type = this + def upperBound: Type = this /** For a class or intersection type, its parents. * For a TypeBounds type, the parents of its hi bound. @@ -1151,6 +1155,8 @@ trait Types * BoundedWildcardTypes. */ case class BoundedWildcardType(override val bounds: TypeBounds) extends Type with BoundedWildcardTypeApi { + override def upperBound: Type = bounds.hi + override def lowerBound: Type = bounds.lo override def isWildcard = true override def safeToString: String = "?" + bounds override def kind = "BoundedWildcardType" @@ -1310,6 +1316,8 @@ trait Types def supertype = hi override def isTrivial: Boolean = lo.isTrivial && hi.isTrivial override def bounds: TypeBounds = this + override def upperBound: Type = hi + override def lowerBound: Type = lo def containsType(that: Type) = that match { case TypeBounds(_, _) => that <:< this case _ => lo <:< that && that <:< hi @@ -2090,6 +2098,8 @@ trait Types override def baseClasses = relativeInfo.baseClasses override def decls = relativeInfo.decls override def bounds = relativeInfo.bounds + override def upperBound = relativeInfo.upperBound + override def lowerBound = relativeInfo.lowerBound // TODO: this deviates from the spec "The base types of an abstract type are the base types of its upper bound." override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = bounds.hi.baseTypeSeq prepend this @@ -2671,9 +2681,8 @@ trait Types * to represent a higher-kinded type parameter * wrap lo&hi in polytypes to bind variables */ - override def bounds: TypeBounds = - TypeBounds(typeFun(typeParams, resultType.bounds.lo), - typeFun(typeParams, resultType.bounds.hi)) + override def lowerBound: Type = typeFun(typeParams, resultType.lowerBound) + override def upperBound: Type = typeFun(typeParams, resultType.upperBound) override def isHigherKinded = !typeParams.isEmpty @@ -2710,7 +2719,9 @@ trait Types override protected def rewrap(newtp: Type) = existentialAbstraction(quantified, newtp) override def isTrivial = false - override def bounds = TypeBounds(maybeRewrap(underlying.bounds.lo), maybeRewrap(underlying.bounds.hi)) + override def lowerBound = maybeRewrap(underlying.lowerBound) + override def upperBound = maybeRewrap(underlying.upperBound) + override def parents = underlying.parents map maybeRewrap @deprecated("No longer used in the compiler implementation", since = "2.12.3") override def boundSyms = quantified.toSet @@ -3460,6 +3471,8 @@ trait Types case TypeBounds(_: this.type, _: this.type) => TypeBounds(this, this) case oftp => oftp } + override def lowerBound: Type = bounds.lo + override def upperBound: Type = bounds.hi // ** Replace formal type parameter symbols with actual type arguments. * / override def instantiateTypeParams(formals: List[Symbol], actuals: List[Type]) = { diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index f5c89217953c..16f80793a7af 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -304,7 +304,7 @@ private[internal] trait GlbLubs { case ts @ NullaryMethodType(_) :: rest => NullaryMethodType(lub0(matchingRestypes(ts, Nil))) case ts @ TypeBounds(_, _) :: rest => - TypeBounds(glb(ts map (_.bounds.lo), depth), lub(ts map (_.bounds.hi), depth)) + TypeBounds(glb(ts map (_.lowerBound), depth), lub(ts map (_.upperBound), depth)) case ts @ AnnotatedType(annots, tpe) :: rest => annotationsLub(lub0(ts map (_.withoutAnnotations)), ts) case ts => @@ -466,7 +466,7 @@ private[internal] trait GlbLubs { case ts @ NullaryMethodType(_) :: rest => NullaryMethodType(glbNorm(matchingRestypes(ts, Nil), depth)) case ts @ TypeBounds(_, _) :: rest => - TypeBounds(lub(ts map (_.bounds.lo), depth), glb(ts map (_.bounds.hi), depth)) + TypeBounds(lub(ts map (_.lowerBound), depth), glb(ts map (_.upperBound), depth)) case ts => glbResults get ((depth, ts)) match { case Some(glbType) => @@ -515,8 +515,8 @@ private[internal] trait GlbLubs { case _ => false } def glbBounds(bnds: List[Type]): TypeBounds = { - val lo = lub(bnds map (_.bounds.lo), depth.decr) - val hi = glb(bnds map (_.bounds.hi), depth.decr) + val lo = lub(bnds map (_.lowerBound), depth.decr) + val hi = glb(bnds map (_.upperBound), depth.decr) if (lo <:< hi) TypeBounds(lo, hi) else throw GlbFailure } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala index c481ae38fa00..44bec946bd8e 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeComparers.scala @@ -496,7 +496,7 @@ trait TypeComparers { isSubType(tp1.withoutAnnotations, tp2.withoutAnnotations, depth) && annotationsConform(tp1, tp2) case BoundedWildcardType(bounds) => - isSubType(tp1.bounds.lo, tp2, depth) + isSubType(tp1.lowerBound, tp2, depth) case tv @ TypeVar(_,_) => tv.registerBound(tp2, isLowerBound = false) case ExistentialType(_, _) => @@ -522,7 +522,7 @@ trait TypeComparers { sym2 match { case SingletonClass => tp1.isStable || fourthTry case _: ClassSymbol => classOnRight - case _: TypeSymbol if sym2.isDeferred => abstractTypeOnRight(tp2.bounds.lo) || fourthTry + case _: TypeSymbol if sym2.isDeferred => abstractTypeOnRight(tp2.lowerBound) || fourthTry case _: TypeSymbol => retry(normalizePlus(tp1), normalizePlus(tp2)) case _ => fourthTry } @@ -593,7 +593,7 @@ trait TypeComparers { case _: ClassSymbol if isRawType(tp1) => retry(normalizePlus(tp1), normalizePlus(tp2)) case _: ClassSymbol if sym1.isModuleClass => retry(normalizePlus(tp1), normalizePlus(tp2)) case _: ClassSymbol if sym1.isRefinementClass => retry(sym1.info, tp2) - case _: TypeSymbol if sym1.isDeferred => abstractTypeOnLeft(tp1.bounds.hi) + case _: TypeSymbol if sym1.isDeferred => abstractTypeOnLeft(tp1.upperBound) case _: TypeSymbol => retry(normalizePlus(tp1), normalizePlus(tp2)) case _ => false } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index 9fd742c2eb03..bc3d9794a37b 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -207,14 +207,14 @@ private[internal] trait TypeConstraints { if (tvar.constr.inst == NoType) { val up = if (variance.isContravariant) !upper else upper tvar.constr.inst = null - val bound: Type = if (up) tparam.info.bounds.hi else tparam.info.bounds.lo + val bound: Type = if (up) tparam.info.upperBound else tparam.info.lowerBound //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound)) var cyclic = bound contains tparam foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => { val ok = (tparam2 != tparam) && ( (bound contains tparam2) - || up && (tparam2.info.bounds.lo =:= tparam.tpeHK) - || !up && (tparam2.info.bounds.hi =:= tparam.tpeHK) + || up && (tparam2.info.lowerBound =:= tparam.tpeHK) + || !up && (tparam2.info.upperBound =:= tparam.tpeHK) ) if (ok) { if (tvar2.constr.inst eq null) cyclic = true @@ -228,7 +228,7 @@ private[internal] trait TypeConstraints { tvar addHiBound bound.instantiateTypeParams(tparams, tvars) } for (tparam2 <- tparams) - tparam2.info.bounds.lo.dealias match { + tparam2.info.lowerBound.dealias match { case TypeRef(_, `tparam`, _) => debuglog(s"$tvar addHiBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") tvar addHiBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) @@ -240,7 +240,7 @@ private[internal] trait TypeConstraints { tvar addLoBound bound.instantiateTypeParams(tparams, tvars) } for (tparam2 <- tparams) - tparam2.info.bounds.hi.dealias match { + tparam2.info.upperBound.dealias match { case TypeRef(_, `tparam`, _) => debuglog(s"$tvar addLoBound $tparam2.tpeHK.instantiateTypeParams($tparams, $tvars)") tvar addLoBound tparam2.tpeHK.instantiateTypeParams(tparams, tvars) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index 0ba1db60decb..dd6ab0081f9f 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -58,7 +58,7 @@ private[internal] trait TypeMaps { object abstractTypesToBounds extends TypeMap { def apply(tp: Type): Type = tp match { case TypeRef(_, sym, _) if sym.isAliasType => apply(tp.dealias) - case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.bounds.hi) + case TypeRef(_, sym, _) if sym.isAbstractType => apply(tp.upperBound) case rtp @ RefinedType(parents, decls) => copyRefinedType(rtp, parents mapConserve this, decls) case AnnotatedType(_, _) => mapOver(tp) case _ => tp // no recursion - top level only @@ -409,7 +409,7 @@ private[internal] trait TypeMaps { if (variance.isInvariant) tp1 else tp1 match { case TypeRef(pre, sym, args) if tparams contains sym => - val repl = if (variance.isPositive) dropSingletonType(tp1.bounds.hi) else tp1.bounds.lo + val repl = if (variance.isPositive) dropSingletonType(tp1.upperBound) else tp1.lowerBound val count = occurCount(sym) val containsTypeParam = tparams exists (repl contains _) def msg = { From b37c0a42b3097d621686ef4daa106464e643c017 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 23 Feb 2019 18:00:20 +0000 Subject: [PATCH 1626/2793] Small optimisation in glbNorm function. The section of code being modified had two inefficiencies: - It created a `syms` list that was only used in the `map` immediatily afterwards. - It was performing two calls to the `glbThisType.memberInfo` method. We change this code to fix these inefficiencies: we replace the for comprehensions with a set of foreach statements, insert on ListBuffer. --- .../scala/reflect/internal/tpe/GlbLubs.scala | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index f5c89217953c..46692fc7c05c 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -501,11 +501,17 @@ private[internal] trait GlbLubs { val glbThisType = glbRefined.typeSymbol.thisType def glbsym(proto: Symbol): Symbol = { val prototp = glbThisType.memberInfo(proto) - val syms = for (t <- ts; - alt <- (t.nonPrivateMember(proto.name).alternatives) - if glbThisType.memberInfo(alt) matches prototp - ) yield alt - val symtypes = syms map glbThisType.memberInfo + val symtypes: List[Type] = { + var res = mutable.ListBuffer.empty[Type] + ts foreach { t => + t.nonPrivateMember(proto.name).alternatives foreach { alt => + val mi = glbThisType.memberInfo(alt) + if (mi matches prototp) + res += mi + } + } + res.toList + } assert(!symtypes.isEmpty) proto.cloneSymbol(glbRefined.typeSymbol).setInfoOwnerAdjusted( if (proto.isTerm) glb(symtypes, depth.decr) From ecfa63154533a14a911002ed8c96597a60a696e3 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 24 Feb 2019 14:36:04 +0000 Subject: [PATCH 1627/2793] Small performance tweak to glbNorm. We change some auxiliary methods of glbNorm. We replace the recursive `refinedToParentsList` function, that was using nested List.flatMap operations, with a custom function that uses a mutable ListBuffer and iterates recursively through the RefinedType elements. We replace the `refinedToDecls` method, which was building a list of scopes that was later iterated over, by a method refinedDeclsForeach that iterates over the elements that would be added to that list. Signed-off-by: Diego Alonso --- .../scala/reflect/internal/tpe/GlbLubs.scala | 49 ++++++++++--------- 1 file changed, 27 insertions(+), 22 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 46692fc7c05c..e1f7bb01efc2 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -482,27 +482,25 @@ private[internal] trait GlbLubs { try { val (ts, tparams) = stripExistentialsAndTypeVars(ts0) val glbOwner = commonOwner(ts) - def refinedToParents(t: Type): List[Type] = t match { - case RefinedType(ps, _) => ps flatMap refinedToParents - case _ => List(t) - } - def refinedToDecls(t: Type): List[Scope] = t match { - case RefinedType(ps, decls) => - val dss = ps flatMap refinedToDecls - if (decls.isEmpty) dss else decls :: dss - case _ => List() + val ts1 = { + val res = mutable.ListBuffer.empty[Type] + def loop(ty: Type): Unit = ty match { + case RefinedType(ps, _) => ps.foreach(loop) + case _ => res += ty + } + ts foreach loop + res.toList } - val ts1 = ts flatMap refinedToParents - val glbBase = intersectionType(ts1, glbOwner) val glbType = - if (phase.erasedTypes || depth.isZero) glbBase + if (phase.erasedTypes || depth.isZero) + intersectionType(ts1, glbOwner) else { val glbRefined = refinedType(ts1, glbOwner) val glbThisType = glbRefined.typeSymbol.thisType def glbsym(proto: Symbol): Symbol = { val prototp = glbThisType.memberInfo(proto) val symtypes: List[Type] = { - var res = mutable.ListBuffer.empty[Type] + val res = mutable.ListBuffer.empty[Type] ts foreach { t => t.nonPrivateMember(proto.name).alternatives foreach { alt => val mi = glbThisType.memberInfo(alt) @@ -540,18 +538,25 @@ private[internal] trait GlbLubs { if (globalGlbDepth < globalGlbLimit) try { globalGlbDepth = globalGlbDepth.incr - val dss = ts flatMap refinedToDecls - for (ds <- dss; sym <- ds.iterator) - if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth)) - try { - addMember(glbThisType, glbRefined, glbsym(sym), depth) - } catch { - case ex: NoCommonType => - } + def foreachRefinedDecls(ty: Type): Unit = ty match { + case RefinedType(ps, decls) => + ps foreach foreachRefinedDecls + if (! decls.isEmpty) + decls.iterator.foreach { sym => + if (globalGlbDepth < globalGlbLimit && !specializesSym(glbThisType, sym, depth)) + try { + addMember(glbThisType, glbRefined, glbsym(sym), depth) + } catch { + case ex: NoCommonType => + } + } + case _ => + } + ts foreach foreachRefinedDecls } finally { globalGlbDepth = globalGlbDepth.decr } - if (glbRefined.decls.isEmpty) glbBase else glbRefined + if (glbRefined.decls.isEmpty) intersectionType(ts1, glbOwner) else glbRefined } existentialAbstraction(tparams, glbType) } catch { From 6ff01aec2b8c39b279c8aa08d26363ae9d89cfb3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Doeraene?= Date: Tue, 26 Feb 2019 15:43:02 +0100 Subject: [PATCH 1628/2793] Add sjrd to the maintainer list, for interactions with Scala.js. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 1408d04add4a..9112cef4511e 100644 --- a/README.md +++ b/README.md @@ -36,6 +36,7 @@ If you need some help with your PR at any time, please feel free to @-mention an | [`@axel22`](https://github.com/axel22) | concurrency, parallel collections, specialization | | [`@dragos`](https://github.com/dragos) | specialization, back end | | [`@janekdb`](https://github.com/janekdb) | documentation | + | [`@sjrd`](https://github.com/sjrd) | interactions with Scala.js | P.S.: If you have some spare time to help out around here, we would be delighted to add your name to this list! From 9a04c4d9b7017ae5401a321992de4e73d6a1ab60 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Mon, 25 Feb 2019 05:13:58 +0000 Subject: [PATCH 1629/2793] Complexity: fold map into the sum. Merges the use of a `map` function into the `sum` function, which avoids allocating a list of objects. --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 2cc7fa729899..bbd2a071c754 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -473,12 +473,16 @@ trait Implicits { val syms = for (t <- tp; if t.typeSymbol.isTypeParameter) yield t.typeSymbol deriveTypeWithWildcards(syms.distinct)(tp) } + @annotation.tailrec def sumComplexity(acc: Int, xs: List[Type]): Int = xs match { + case h :: t => sumComplexity(acc + complexity(h), t) + case _: Nil.type => acc + } def complexity(tp: Type): Int = tp.dealias match { case NoPrefix => 0 case SingleType(pre, sym) => if (sym.hasPackageFlag) 0 else complexity(tp.dealiasWiden) case ThisType(sym) => if (sym.hasPackageFlag) 0 else 1 - case TypeRef(pre, sym, args) => complexity(pre) + (args map complexity).sum + 1 - case RefinedType(parents, _) => (parents map complexity).sum + 1 + case TypeRef(pre, sym, args) => 1 + complexity(pre) + sumComplexity(0, args) + case RefinedType(parents, _) => 1 + sumComplexity(0, parents) case _ => 1 } def overlaps(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match { From e22e12da28a2f9364ec7e1945281b2cdf94d466c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 27 Feb 2019 13:44:15 +1000 Subject: [PATCH 1630/2793] Restore API in typer used by scala-meta --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a4a0a0c8b8bf..424c3dbd8d78 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -671,6 +671,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } + @deprecated("Use the overload accepting a Type.", "2.12.9") + def member(qual: Tree, name: Name): Symbol = member(qual.tpe, name) /** The member with given name of given qualifier type */ def member(qual: Type, name: Name): Symbol = { def callSiteWithinClass(clazz: Symbol) = context.enclClass.owner hasTransOwner clazz From 7707a763fc09f38c760d10454696ca294fd1c0ec Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 28 Feb 2019 11:27:38 +1000 Subject: [PATCH 1631/2793] [nomerge] Restore findMacroClassloader into Analyzer for the 2.12.x series --- .../scala/tools/nsc/plugins/Plugins.scala | 2 +- .../scala/tools/nsc/typechecker/Macros.scala | 37 +++++++++++++++++++ .../scala/tools/reflect/ReflectGlobal.scala | 20 ++++++---- .../tools/nsc/interpreter/ReplGlobal.scala | 14 ++++--- 4 files changed, 59 insertions(+), 14 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 386bdc4ab1a8..d30cf712f8ac 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -175,7 +175,7 @@ trait Plugins { global: Global => * * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. */ - protected[scala] def findMacroClassLoader(): ClassLoader = { + protected def findMacroClassLoader(): ClassLoader = { val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { for { file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 5d0e51cd2ea9..6d8d87b8ef7d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -64,6 +64,43 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def globalSettings = global.settings + /** Obtains a `ClassLoader` instance used for macro expansion. + * + * By default a new `ScalaClassLoader` is created using the classpath + * from global and the classloader of self as parent. + * + * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. + */ + protected def findMacroClassLoader(): ClassLoader = { + import java.net.URL + import scala.tools.nsc.io.AbstractFile + + val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { + for { + file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) + af <- Option(AbstractFile getDirectory file) + } yield af.file.toURI.toURL + } else global.classPath.asURLs + def newLoader: () => ScalaClassLoader.URLClassLoader = () => { + analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) + ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) + } + + val policy = settings.YcacheMacroClassLoader.value + val cache = Macros.macroClassLoadersCache + val disableCache = policy == settings.CachePolicy.None.name + val checkStamps = policy == settings.CachePolicy.LastModified.name + cache.checkCacheability(classpath, checkStamps, disableCache) match { + case Left(msg) => + analyzer.macroLogVerbose(s"macro classloader: $msg.") + val loader = newLoader() + closeableRegistry.registerClosable(loader) + loader + case Right(paths) => + cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) + } + } + /** `MacroImplBinding` and its companion module are responsible for * serialization/deserialization of macro def -> impl bindings. * diff --git a/src/compiler/scala/tools/reflect/ReflectGlobal.scala b/src/compiler/scala/tools/reflect/ReflectGlobal.scala index 2efd699e9f44..9fea65d111ee 100644 --- a/src/compiler/scala/tools/reflect/ReflectGlobal.scala +++ b/src/compiler/scala/tools/reflect/ReflectGlobal.scala @@ -25,14 +25,18 @@ import scala.tools.nsc.typechecker.Analyzer class ReflectGlobal(currentSettings: Settings, reporter: Reporter, override val rootClassLoader: ClassLoader) extends Global(currentSettings, reporter) with scala.tools.reflect.ReflectSetup with scala.reflect.runtime.SymbolTable { - /** Obtains the classLoader used for runtime macro expansion. - * - * Macro expansion can use everything available in `global.classPath` or `rootClassLoader`. - * The `rootClassLoader` is used to obtain runtime defined macros. - */ - override protected[scala] def findMacroClassLoader(): ClassLoader = { - val classpath = classPath.asURLs - perRunCaches.recordClassloader(ScalaClassLoader.fromURLs(classpath, rootClassLoader)) + override lazy val analyzer = new { + val global: ReflectGlobal.this.type = ReflectGlobal.this + } with Analyzer { + /** Obtains the classLoader used for runtime macro expansion. + * + * Macro expansion can use everything available in [[global.classPath]] or [[rootClassLoader]]. + * The [[rootClassLoader]] is used to obtain runtime defined macros. + */ + override protected def findMacroClassLoader(): ClassLoader = { + val classpath = global.classPath.asURLs + ScalaClassLoader.fromURLs(classpath, rootClassLoader) + } } override def transformedType(sym: Symbol) = diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index 72b5a7424ceb..f3455a2b094a 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -30,11 +30,15 @@ trait ReplGlobal extends Global { super.abort(msg) } - override protected[scala] def findMacroClassLoader(): ClassLoader = { - val loader = super.findMacroClassLoader - analyzer.macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(classPath.asURLs)) - val virtualDirectory = analyzer.globalSettings.outputDirs.getSingleOutput.get - new util.AbstractFileClassLoader(virtualDirectory, loader) {} + override lazy val analyzer = new { + val global: ReplGlobal.this.type = ReplGlobal.this + } with Analyzer { + override protected def findMacroClassLoader(): ClassLoader = { + val loader = super.findMacroClassLoader + macroLogVerbose("macro classloader: initializing from a REPL classloader: %s".format(global.classPath.asURLs)) + val virtualDirectory = globalSettings.outputDirs.getSingleOutput.get + new util.AbstractFileClassLoader(virtualDirectory, loader) {} + } } override def optimizerClassPath(base: ClassPath): ClassPath = { From 0b1974c8e744d06469b17065067e68d5bf9aabc2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 28 Feb 2019 12:18:48 +1000 Subject: [PATCH 1632/2793] Refactor PipelineMain - Scope the build strategies inside object PipelineMain - Prefer Future.traverse to Future.sequence --- .../scala/tools/nsc/PipelineMain.scala | 34 ++++++++++--------- 1 file changed, 18 insertions(+), 16 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index a36f64cda7f4..0fe47f8bcc68 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -33,6 +33,7 @@ import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} +import PipelineMain.{BuildStrategy, Traditional, OutlineTypePipeline, Pipeline} class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") @@ -234,16 +235,16 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } strategy match { case OutlineTypePipeline => - projects.foreach { p => + projects.foreach { p: Task => val isLeaf = !dependedOn.contains(p) - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map { task => p.dependencyReadyFuture(task) }) + val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) val f = if (isLeaf) { for { _ <- depsReady _ <- { p.outlineDone.complete(Success(())) p.fullCompile() - Future.sequence(p.groups.map(_.done.future)) + Future.traverse(p.groups)(_.done.future) } } yield { p.javaCompile() @@ -257,7 +258,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } _ <- { p.fullCompile() - Future.sequence(p.groups.map(_.done.future)) + Future.traverse(p.groups)(_.done.future) } } yield { p.javaCompile() @@ -286,7 +287,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => - val depsReady = Future.sequence(dependsOn.getOrElse(p, Nil).map(task => p.dependencyReadyFuture(task))) + val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) val f = for { _ <- depsReady _ <- { @@ -297,7 +298,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else p.fullCompileExportPickles() // Start javac after scalac has completely finished - Future.sequence(p.groups.map(_.done.future)) + Future.traverse(p.groups)(_.done.future) } } yield { p.javaCompile() @@ -324,11 +325,11 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Traditional => projects.foreach { p => - val f1 = Future.sequence(dependsOn.getOrElse(p, Nil).map(_.t.javaDone.future)) + val f1 = Future.traverse(dependsOn.getOrElse(p, Nil))(_.t.javaDone.future) val f2 = f1.flatMap { _ => p.outlineDone.complete(Success(())) p.fullCompile() - Future.sequence(p.groups.map(_.done.future)).map(_ => p.javaCompile()) + Future.traverse(p.groups)(_.done.future).map(_ => p.javaCompile()) } f2.onComplete { _ => p.compiler.close() } } @@ -462,7 +463,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val outlineDoneFuture = outlineDone.future val javaDone: Promise[Unit] = Promise[Unit]() val javaDoneFuture: Future[_] = javaDone.future - val groupsDoneFuture: Future[List[Unit]] = Future.sequence(groups.map(_.done.future)) + val groupsDoneFuture: Future[List[Unit]] = Future.traverse(groups)(_.done.future) val futures: List[Future[_]] = { outlineDone.future :: javaDone.future :: groups.map(_.done.future) } @@ -646,17 +647,18 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } -sealed abstract class BuildStrategy -/** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ -case object OutlineTypePipeline extends BuildStrategy +object PipelineMain { + sealed abstract class BuildStrategy + + /** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ + case object OutlineTypePipeline extends BuildStrategy -case object Pipeline extends BuildStrategy + case object Pipeline extends BuildStrategy -/** Emit class files before triggering downstream compilation */ -case object Traditional extends BuildStrategy + /** Emit class files before triggering downstream compilation */ + case object Traditional extends BuildStrategy -object PipelineMain { def main(args: Array[String]): Unit = { val strategies = List(OutlineTypePipeline, Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get From d9b98b8d743c17d695be8d3b38fa47f93a23d310 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 Feb 2019 13:36:16 +1000 Subject: [PATCH 1633/2793] Whitelist some binary changes to internals of scala-reflect --- src/reflect/mima-filters/2.12.0.backwards.excludes | 2 ++ src/reflect/mima-filters/2.12.0.forwards.excludes | 9 ++++++++- 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/src/reflect/mima-filters/2.12.0.backwards.excludes b/src/reflect/mima-filters/2.12.0.backwards.excludes index ffa7f91a7eb8..ed9dc507eea4 100644 --- a/src/reflect/mima-filters/2.12.0.backwards.excludes +++ b/src/reflect/mima-filters/2.12.0.backwards.excludes @@ -14,3 +14,5 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats") ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.IOStats$") ProblemFilters.exclude[MissingTypesProblem]("scala.reflect.runtime.JavaUniverse") + +ProblemFilters.exclude[ReversedMissingMethodProblem]("scala.reflect.io.ZipArchive.close") diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index ee7ce7fb19e8..0f3b81cd3cc7 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -24,4 +24,11 @@ ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.Settin ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.this") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.this") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.getDir") -ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.allDirsByDottedName") \ No newline at end of file +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.allDirsByDottedName") + +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.RootPath") +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.RootPath$") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.URLZipArchive.close") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.close") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ManifestResources.close") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.close") \ No newline at end of file From 693f3a724b8a67c9121c78f6764c80d08add5ea1 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 3 Mar 2019 12:26:45 +0000 Subject: [PATCH 1634/2793] Typers: merge treesInResult with errorInResult In the Typers file, in the `tryTypedApply` function, there was a function treesInResult that was generating a list that contained each tree and many of its subtrees. The result of this function was a list that was immediately afterwards put into an exists function, thus consuming the generated list right away. To avoid list allocations, we replace that list generation and traversal with a tree traversal, that carries out the exists function directly on the nodes of the tree. --- .../scala/tools/nsc/typechecker/Typers.scala | 35 ++++++++++--------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 424c3dbd8d78..12570fdf2da0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -4747,28 +4747,29 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } } // TODO: case to recurse into Function? - def treesInResult(tree: Tree): List[Tree] = tree :: (tree match { - case Block(_, r) => treesInResult(r) - case Match(_, cases) => cases - case CaseDef(_, _, r) => treesInResult(r) - case Annotated(_, r) => treesInResult(r) - case If(_, t, e) => treesInResult(t) ++ treesInResult(e) - case Try(b, catches, _) => treesInResult(b) ++ catches - case MethodValue(r) => treesInResult(r) - case Select(qual, name) => treesInResult(qual) - case Apply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult) - case TypeApply(fun, args) => treesInResult(fun) ++ args.flatMap(treesInResult) - case _ => Nil - }) /* Only retry if the error hails from a result expression of `tree` * (for instance, it makes no sense to retry on an error from a block statement) * compare with `samePointAs` since many synthetic trees are made with * offset positions even under -Yrangepos. */ - def errorInResult(tree: Tree) = - treesInResult(tree).exists(err => typeErrors.exists(_.errPos samePointAs err.pos)) - - val retry = (typeErrors.forall(_.errPos != null)) && (fun :: tree :: args exists errorInResult) + def errorInResult(tree: Tree): Boolean = { + def pred(tree: Tree) = typeErrors.exists(_.errPos samePointAs tree.pos) + def loop(tree: Tree): Boolean = pred(tree) || (tree match { + case Block(_, r) => loop(r) + case Match(_, cases) => cases.exists(pred) + case CaseDef(_, _, r) => loop(r) + case Annotated(_, r) => loop(r) + case If(_, t, e) => loop(t) || loop(e) + case Try(b, catches, _) => loop(b) || catches.exists(pred) + case MethodValue(r) => loop(r) + case Select(qual, name) => loop(qual) + case Apply(fun, args) => loop(fun) || args.exists(loop) + case TypeApply(fun, args) => loop(fun) || args.exists(loop) + case _ => false + }) + loop(tree) + } + val retry = typeErrors.forall(_.errPos != null) && (errorInResult(fun) || errorInResult(tree) || args.exists(errorInResult)) typingStack.printTyping({ val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ") if (retry) "second try: " + funStr From ea0e5c8dafb32589b06a8cbf8483c9f893d0d963 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 3 Mar 2019 23:21:06 +0000 Subject: [PATCH 1635/2793] Remove the bothNames method from the Name class. The Name class defined a bothNames method which always returned a list of two elements, the termName and the typeName, both publicly accessible. To avoid needless List allocations, we remove this method and replace any use of it by a direct call to the termName and typeName methods. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 3 ++- src/reflect/scala/reflect/internal/Names.scala | 1 - src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala | 6 +++++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 22f8f9057866..bed4c6a8c3da 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -559,7 +559,8 @@ trait Namers extends MethodSynthesis { def checkSelector(s: ImportSelector) = { val ImportSelector(from, fromPos, to, _) = s def isValid(original: Name) = - original.bothNames forall (x => (base nonLocalMember x) == NoSymbol) + (base nonLocalMember original.toTermName) == NoSymbol && + (base nonLocalMember original.toTypeName) == NoSymbol if (from != nme.WILDCARD && base != ErrorType) { if (isValid(from)) { diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index fc6596a52c3f..b33cc232d656 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -214,7 +214,6 @@ trait Names extends api.Names { def toTermName: TermName def toTypeName: TypeName def companionName: Name - def bothNames: List[Name] = List(toTermName, toTypeName) /** Return the subname with characters from from to to-1. */ def subName(from: Int, to: Int): Name with ThisNameType diff --git a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala index 058bfc756d07..cdceefee1a8d 100644 --- a/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala +++ b/src/repl/scala/tools/nsc/interpreter/MemberHandlers.scala @@ -244,7 +244,11 @@ trait MemberHandlers { def importedSymbols = individualSymbols ++ wildcardSymbols lazy val importableSymbolsWithRenames = { - val selectorRenameMap = individualSelectors.flatMap(x => x.name.bothNames zip x.rename.bothNames).toMap + val selectorRenameMap: mutable.HashMap[Name, Name] = mutable.HashMap.empty[Name, Name] + individualSelectors foreach { x => + selectorRenameMap.put(x.name.toTermName, x.rename.toTermName) + selectorRenameMap.put(x.name.toTypeName, x.rename.toTypeName) + } importableTargetMembers flatMap (m => selectorRenameMap.get(m.name) map (m -> _)) } From 2c6a4b22e4a8ce73e5d06a2be8cc95f354b31ef2 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Mar 2019 11:43:22 +1000 Subject: [PATCH 1636/2793] Remove outline typechecking for now We need to typecheck the RHS of type-ascribed definitions if they contain Super trees that require super-accessors to be added to the enclosing template. Rather than take that on right now, I'm removing this feature to focus on the other form of build pipelining. --- .../scala/tools/nsc/PipelineMain.scala | 89 ++----------------- .../tools/nsc/typechecker/Analyzer.scala | 12 ++- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- 3 files changed, 11 insertions(+), 92 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 0fe47f8bcc68..4fbcfd099ef8 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -33,7 +33,7 @@ import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} -import PipelineMain.{BuildStrategy, Traditional, OutlineTypePipeline, Pipeline} +import PipelineMain.{BuildStrategy, Traditional, Pipeline} class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") @@ -234,57 +234,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } strategy match { - case OutlineTypePipeline => - projects.foreach { p: Task => - val isLeaf = !dependedOn.contains(p) - val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) - val f = if (isLeaf) { - for { - _ <- depsReady - _ <- { - p.outlineDone.complete(Success(())) - p.fullCompile() - Future.traverse(p.groups)(_.done.future) - } - } yield { - p.javaCompile() - } - } else { - for { - _ <- depsReady - _ <- { - p.outlineCompile() - p.outlineDone.future - } - _ <- { - p.fullCompile() - Future.traverse(p.groups)(_.done.future) - } - } yield { - p.javaCompile() - } - } - f.onComplete { _ => p.compiler.close() } - } - - awaitDone() - - for (p <- projects) { - val dependencies = dependsOn(p).map(_.t) - - def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max - - val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) - p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs - p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) - p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum - } - - if (parallelism == 1) { - val criticalPath = projects.maxBy(_.regularCriticalPathMs) - println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") - } else - println(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) @@ -373,7 +322,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy def projectEvents(p: Task): List[String] = { val events = List.newBuilder[String] if (p.outlineTimer.durationMicros > 0d) { - val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" + val desc = "parser-to-pickler" events += durationEvent(p.label, desc, p.outlineTimer) events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) } @@ -439,7 +388,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) - if (strategy != OutlineTypePipeline || isScalaLibrary) { + if (isScalaLibrary) { Group(files) :: Nil } else { command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value @@ -484,34 +433,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy throw t } - def outlineCompile(): Unit = { - outlineTimer.start() - try { - log("scalac outline: start") - command.settings.Youtline.value = true - command.settings.stopAfter.value = List("pickler") - command.settings.Ymacroexpand.value = command.settings.MacroExpand.None - val run1 = new compiler.Run() - run1 compile files - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) - outlineTimer.stop() - reporter.finish() - if (reporter.hasErrors) { - log("scalac outline: failed") - outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) - } else { - log(f"scala outline: done ${outlineTimer.durationMs}%.0f ms") - outlineDone.complete(Success(())) - } - } catch { - case t: Throwable => - t.printStackTrace() - outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) - } - } def fullCompile(): Unit = { - command.settings.Youtline.value = false command.settings.stopAfter.value = Nil command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal @@ -651,16 +574,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy object PipelineMain { sealed abstract class BuildStrategy - /** Outline type check to compute type signatures as pickles as an input to downstream compilation. */ - case object OutlineTypePipeline extends BuildStrategy - + /** Begin compilation as soon as the pickler phase is complete on all dependencies. */ case object Pipeline extends BuildStrategy /** Emit class files before triggering downstream compilation */ case object Traditional extends BuildStrategy def main(args: Array[String]): Unit = { - val strategies = List(OutlineTypePipeline, Pipeline, Traditional) + val strategies = List(Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index bc5ffd0ccd7c..b068e43d1ad4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -112,13 +112,11 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - if (!settings.Youtline.value) { - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) - warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) - new checkUnused(typer).apply(unit) - } + for (workItem <- unit.toCheck) workItem() + if (settings.warnUnusedImport) + warnUnusedImports(unit) + if (settings.warnUnused.isSetByUser) + new checkUnused(typer).apply(unit) } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 7f32eda84cd0..b4277d3a90f0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5946,7 +5946,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 - case _ => if (settings.Youtline.value) EmptyTree else typed(tree, mode, pt) + case _ => typed(tree, mode, pt) } } final def lookupTransformed(tree: Tree): Option[Tree] = From 3e500b21a59f648081fb0cc8566b187779b31075 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 4 Mar 2019 11:48:14 +1000 Subject: [PATCH 1637/2793] Use file size as part of the cache invalidation --- .../tools/nsc/classpath/ZipAndJarFileLookupFactory.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index acb41185353e..6a20b0311bfd 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -194,7 +194,7 @@ object ZipAndJarSourcePathFactory extends ZipAndJarFileLookupFactory { final class FileBasedCache[T] { import java.nio.file.Path - private case class Stamp(lastModified: FileTime, fileKey: Object) + private case class Stamp(lastModified: FileTime, size: Long, fileKey: Object) private case class Entry(stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) } @@ -252,11 +252,11 @@ final class FileBasedCache[T] { val lastModified = attrs.lastModifiedTime() // only null on some platforms, but that's okay, we just use the last modified timestamp as our stamp val fileKey = attrs.fileKey() - Stamp(lastModified, fileKey) + Stamp(lastModified, attrs.size(), fileKey) } catch { case ex: java.nio.file.NoSuchFileException => // Dummy stamp for (currently) non-existent file. - Stamp(FileTime.fromMillis(0), new Object) + Stamp(FileTime.fromMillis(0), -1, new Object) } } From 3e0ab870ceaf0ba9deb6201b29ce37b9d6cc9f32 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 5 Mar 2019 07:56:41 +1000 Subject: [PATCH 1638/2793] Honour CachePolicy.None in classloader/classpath caching I broke this in a recent refactoring. --- .../tools/nsc/classpath/ZipAndJarFileLookupFactory.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 6a20b0311bfd..2321f0ff80f0 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -236,8 +236,8 @@ final class FileBasedCache[T] { import scala.reflect.io.{AbstractFile, Path} lazy val urlsAndFiles = urls.filterNot(_.getProtocol == "jrt").map(u => u -> AbstractFile.getURL(u)) lazy val paths = urlsAndFiles.map(t => Path(t._2.file).jfile.toPath) - if (!checkStamps) Right(paths) - else if (disableCache) Left("caching is disabled due to a policy setting") + if (disableCache) Left("caching is disabled due to a policy setting") + else if (!checkStamps) Right(paths) else { val nonJarZips = urlsAndFiles.filter { case (url, file) => file == null || !Jar.isJarOrZip(file.file) } if (nonJarZips.nonEmpty) Left(s"caching is disabled because of the following classpath elements: ${nonJarZips.map(_._1).mkString(", ")}.") From 57277be9a7f8034b48e7a6b49c862f1063986efc Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Tue, 5 Mar 2019 00:32:07 +0000 Subject: [PATCH 1639/2793] Restore deprecated bothnames method. We restore the method bothNames, with the deprecated annotation, to prevent compiler plugins inadvertently falling into a binary crash. --- src/reflect/scala/reflect/internal/Names.scala | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index b33cc232d656..b4cde7b6a3bf 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -214,6 +214,8 @@ trait Names extends api.Names { def toTermName: TermName def toTypeName: TypeName def companionName: Name + @deprecated("Use either toTermName or toTypeName", "2.12.9") + def bothNames: List[Name] = List(toTermName, toTypeName) /** Return the subname with characters from from to to-1. */ def subName(from: Int, to: Int): Name with ThisNameType From 4e718b33599ff35a7c93fa5e7e8c418920823f88 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Wed, 6 Mar 2019 16:11:13 +0000 Subject: [PATCH 1640/2793] Selectively Backport some changes We "backport", but with some changes, the improvements to the `noDuplicates` function already introduced in the 2.13.x branch. Importantly, this avoids the allocation of two mapped lists. --- .../scala/tools/nsc/typechecker/Namers.scala | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index bed4c6a8c3da..a7d46c358af7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -582,20 +582,24 @@ trait Namers extends MethodSynthesis { } } - def noDuplicates(names: List[Name], check: DuplicatesErrorKinds.Value) { - def loop(xs: List[Name]): Unit = xs match { + selectors foreach checkSelector + + def noDuplicates(): Unit = { + @inline def isRename(hd: ImportSelector): Boolean = + hd.rename != null && hd.rename != nme.WILDCARD && hd.rename != hd.name + def loop(xs: List[ImportSelector]): Unit = xs match { case Nil => () case hd :: tl => - if (hd == nme.WILDCARD || !(tl contains hd)) loop(tl) - else DuplicatesError(tree, hd, check) + if (hd.name != nme.WILDCARD && tl.exists(x => ! (x.name == nme.WILDCARD) && x.name == hd.name)) + DuplicatesError(tree, hd.name, RenamedTwice) + else if (isRename(hd) && tl.exists(x => isRename(hd) && x.rename == hd.rename)) + DuplicatesError(tree, hd.rename, AppearsTwice) + else loop(tl) } - loop(names filterNot (x => x == null || x == nme.WILDCARD)) + loop(selectors) } - selectors foreach checkSelector - // checks on the whole set - noDuplicates(selectors map (_.name), RenamedTwice) - noDuplicates(selectors map (_.rename), AppearsTwice) + noDuplicates() } def copyMethodCompleter(copyDef: DefDef): TypeCompleter = { From 1494b64d74ba4378e386703eb48240ae0a7191ab Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 7 Mar 2019 00:26:22 +0000 Subject: [PATCH 1641/2793] Avoid the parentSymbols method: less allocations. The "Type" abstract class defines a method "parentSymbols" that is implemented by getting the list of parents of the type, and then performing a "map" to get each parent's type symbol. This map is allocating a list. Looking at the usages of this method, we confirmed that most of the calls to this method were followed by a "fold" on the list, such as an "contains", or a "find". In some cases, the list is not used at all. To save allocations, we replace the calls to "parentSymbols" with the calls to its implementation, "info.parents", and replace the "contains" methods with "exists(_.typesymbol)". --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 2 +- src/compiler/scala/tools/nsc/transform/Erasure.scala | 7 ++++--- src/compiler/scala/tools/nsc/transform/Mixin.scala | 4 ++-- .../scala/tools/nsc/transform/SpecializeTypes.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 4 ++-- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 4 ++-- src/reflect/scala/reflect/internal/SymbolTable.scala | 3 ++- 7 files changed, 15 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index a6c8eb7f5229..03589bc4aef8 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -1123,7 +1123,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ def isAndroidParcelableClass(sym: Symbol) = (AndroidParcelableInterface != NoSymbol) && - (sym.parentSymbols contains AndroidParcelableInterface) + (sym.info.parents.exists( _.typeSymbol == AndroidParcelableInterface)) /* * must-single-thread diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 2bc6daa393e4..1582c9d66e27 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1213,7 +1213,7 @@ abstract class Erasure extends InfoTransform // class if `m` is defined in Java. This avoids the need for having the Java class as // a direct parent (scala-dev#143). if (qual.isInstanceOf[Super]) { - val qualSym = accessibleOwnerOrParentDefiningMember(sym, qual.tpe.typeSymbol.parentSymbols, context) match { + val qualSym = accessibleOwnerOrParentDefiningMember(sym, qual.tpe.typeSymbol.info.parents, context) match { case Some(p) => p case None => // There is no test for this warning, I have been unable to come up with an example that would trigger it. @@ -1395,13 +1395,14 @@ abstract class Erasure extends InfoTransform * - For Java-defined members we prefer a direct parent over of the owner, even if the owner is * accessible. This way the owner doesn't need to be added as a direct parent, see scala-dev#143. */ - final def accessibleOwnerOrParentDefiningMember(member: Symbol, parents: List[Symbol], context: Context): Option[Symbol] = { + final def accessibleOwnerOrParentDefiningMember(member: Symbol, parents: List[Type], context: Context): Option[Symbol] = { def eraseAny(cls: Symbol) = if (cls == AnyClass || cls == AnyValClass) ObjectClass else cls if (member.isConstructor || !member.isJavaDefined) Some(eraseAny(member.owner)) else parents.find { p => - val e = eraseAny(p) + val e = eraseAny(p.typeSymbol) isJvmAccessible(e, context) && definesMemberAfterErasure(e, member) + } map { _.typeSymbol } orElse { val e = eraseAny(member.owner) if (isJvmAccessible(e, context)) Some(e) else None diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 76f03d4b2fed..6338c6b09b46 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -225,7 +225,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes def genForwarder(required: Boolean): Unit = { val owner = member.owner val isJavaInterface = owner.isJavaDefined && owner.isInterface - if (isJavaInterface && !clazz.parentSymbols.contains(owner)) { + if (isJavaInterface && !clazz.info.parents.exists(_.typeSymbol == owner)) { if (required) { val text = s"Unable to implement a mixin forwarder for $member in $clazz unless interface ${owner.name} is directly extended by $clazz." reporter.error(clazz.pos, text) @@ -302,7 +302,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes mixinMember.alias, mixinClass)) case alias1 => if (alias1.owner.isJavaDefined && alias1.owner.isInterface) { - if (!clazz.parentSymbols.contains(alias1.owner)) { + if (!clazz.info.parents.exists(_.typeSymbol eq alias1.owner)) { val suggestedParent = exitingTyper(clazz.info.baseType(alias1.owner)) reporter.error(clazz.pos, s"Unable to implement a super accessor required by trait ${mixinClass.name} unless $suggestedParent is directly extended by $clazz.") } else diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0e3ad97af6c6..0f1af59d9c70 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1453,7 +1453,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def illegalSpecializedInheritance(clazz: Symbol): Boolean = ( clazz.isSpecialized - && originalClass(clazz).parentSymbols.exists(p => hasSpecializedParams(p) && !p.isTrait) + && originalClass(clazz).info.parents.exists(p => hasSpecializedParams(p.typeSymbol) && !p.typeSymbol.isTrait) ) class SpecializationTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { @@ -1938,7 +1938,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } if (hasSpecializedFields) { - val isSpecializedInstance = sClass :: sClass.parentSymbols exists (_ hasFlag SPECIALIZED) + val isSpecializedInstance = (sClass hasFlag SPECIALIZED) || sClass.info.parents.exists(_.typeSymbol hasFlag SPECIALIZED) val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe) mbrs += DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index a3ab364998b4..5b96eb6cc327 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -394,11 +394,11 @@ abstract class RefChecks extends Transform { //Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG return } - if (clazz.parentSymbols exists (p => subOther(p) && subMember(p) && deferredCheck)) { + if (clazz.info.parents exists (p => subOther(p.typeSymbol) && subMember(p.typeSymbol) && deferredCheck)) { //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG return } - if (clazz.parentSymbols forall (p => subOther(p) == subMember(p))) { + if (clazz.info.parents forall (p => subOther(p.typeSymbol) == subMember(p.typeSymbol))) { //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG return } diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 4f2010d66ee1..40cd0822fd70 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -184,8 +184,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT // There is no test left for this warning, as I have been unable to come up with an example that would trigger it. // For a `super.m` selection, there must be a direct parent from which `m` can be selected. This parent will be used // as receiver in the invokespecial call. - val receiverInBytecode = erasure.accessibleOwnerOrParentDefiningMember(sym, sup.tpe.typeSymbol.parentSymbols, localTyper.context.asInstanceOf[erasure.Context]).getOrElse(sym.owner) - if (!clazz.parentSymbols.contains(receiverInBytecode)) + val receiverInBytecode = erasure.accessibleOwnerOrParentDefiningMember(sym, sup.tpe.typeSymbol.info.parents, localTyper.context.asInstanceOf[erasure.Context]).getOrElse(sym.owner) + if (!clazz.info.parents.exists(_.typeSymbol == receiverInBytecode)) reporter.error(sel.pos, s"unable to emit super call unless interface ${owner.name} (which declares $sym) is directly extended by $clazz.") } } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index fe18347d15a7..444b35d5c5c4 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -368,7 +368,8 @@ abstract class SymbolTable extends macros.Universe } } // enter decls of parent classes - for (p <- container.parentSymbols) { + for (px <- container.info.parents) { + val p = px.typeSymbol if (p != definitions.ObjectClass) { openPackageModule(p, dest) } From d62f26c9827648dd3d8949be4e4739b2ed61477d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 7 Mar 2019 11:02:35 +1000 Subject: [PATCH 1642/2793] Avoid NPE during global initialization under -verbose/-Ylogcp ``` $ git clone wheaties/TwoTails ``` ``` sbt:root> ; ++2.12.9-bin-88ed07f-SNAPSHOT! ; Test/compile [error] java.lang.NullPointerException [error] at scala.tools.nsc.classpath.FileBasedCache.getOrCreate(ZipAndJarFileLookupFactory.scala:269) [error] at scala.tools.nsc.classpath.ZipAndJarFileLookupFactory.create(ZipAndJarFileLookupFactory.scala:44) [error] at scala.tools.nsc.classpath.ZipAndJarFileLookupFactory.create$(ZipAndJarFileLookupFactory.scala:37) [error] at scala.tools.nsc.classpath.ZipAndJarClassPathFactory$.create(ZipAndJarFileLookupFactory.scala:55) [error] at scala.tools.nsc.classpath.ClassPathFactory$.newClassPath(ClassPathFactory.scala:85) [error] at scala.tools.nsc.classpath.ClassPathFactory.newClassPath(ClassPathFactory.scala:29) [error] at scala.tools.nsc.classpath.ClassPathFactory.$anonfun$classesInPathImpl$3(ClassPathFactory.scala:69) [error] at scala.tools.nsc.classpath.ClassPathFactory.$anonfun$classesInPathImpl$1(ClassPathFactory.scala:65) [error] at scala.tools.nsc.classpath.ClassPathFactory.classesInPathImpl(ClassPathFactory.scala:64) [error] at scala.tools.nsc.classpath.ClassPathFactory.classesInPath(ClassPathFactory.scala:55) [error] at scala.tools.util.PathResolver$Calculated$.basis(PathResolver.scala:260) [error] at scala.tools.util.PathResolver$Calculated$.containers$lzycompute(PathResolver.scala:272) [error] at scala.tools.util.PathResolver$Calculated$.containers(PathResolver.scala:272) [error] at scala.tools.util.PathResolver.containers(PathResolver.scala:288) [error] at scala.tools.util.PathResolver.computeResult(PathResolver.scala:310) [error] at scala.tools.util.PathResolver.result(PathResolver.scala:293) [error] at scala.tools.nsc.backend.JavaPlatform.classPath(JavaPlatform.scala:30) [error] at scala.tools.nsc.backend.JavaPlatform.classPath$(JavaPlatform.scala:29) [error] at scala.tools.nsc.Global$GlobalPlatform.classPath(Global.scala:127) [error] at scala.tools.nsc.Global.classPath(Global.scala:138) [error] at scala.tools.nsc.Global.(Global.scala:364) [error] at xsbt.CallbackGlobal.(CallbackGlobal.scala:21) [error] at xsbt.ZincCompiler.(CallbackGlobal.scala:60) [error] at xsbt.CachedCompilerCompat.newCompiler(Compat.scala:31) [error] at xsbt.CachedCompilerCompat.newCompiler$(Compat.scala:30) [error] at xsbt.CachedCompiler0.newCompiler(CompilerInterface.scala:55) [error] at xsbt.CachedCompiler0.(CompilerInterface.scala:84) [error] at xsbt.CompilerInterface.newCompiler(CompilerInterface.scala:22) ``` --- src/compiler/scala/tools/nsc/Global.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 47bd41e37b09..eaaba1e99b2e 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1713,7 +1713,7 @@ class Global(var currentSettings: Settings, reporter0: Reporter) def createJavadoc = false - final val closeableRegistry: CloseableRegistry = new CloseableRegistry + final lazy val closeableRegistry: CloseableRegistry = new CloseableRegistry def close(): Unit = { perRunCaches.clearAll() From ca3e491114471286266eeb1fb8fa67a2a3b23086 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 7 Mar 2019 03:53:52 +0000 Subject: [PATCH 1643/2793] Avoid using List flatMap withi normalizeImpl The normalizeImpl was using a `flatten` method for flattening out the list of parents of a refined type. This `flatten` method combined a list map with a list flatMap and a recursive loop, which could give a lot of list allocations discarded. We replace the code by a simple `foreach` loop that uses a mutable list buffer. To further avoid the call to the "distinct" method, we also check for repeated elements before insertion. --- src/reflect/scala/reflect/internal/Types.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1c20dd98df94..1680b3479a3a 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1632,14 +1632,17 @@ trait Types private def normalizeImpl = { // TODO see comments around def intersectionType and def merge // scala/bug#8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala - def flatten(tps: List[Type]): List[Type] = { + val flattened: List[Type] = { + @inline def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp - tps map dealiasRefinement flatMap { - case RefinedType(parents, ds) if ds.isEmpty => flatten(parents) - case tp => List(tp) + val buf: ListBuffer[Type] = ListBuffer.empty[Type] + def loop(tp: Type): Unit = dealiasRefinement(tp) match { + case RefinedType(parents, ds) if ds.isEmpty => parents.foreach(loop) + case tp => if (buf contains tp) () else buf += tp } + parents foreach loop + buf.toList } - val flattened = flatten(parents).distinct if (decls.isEmpty && hasLength(flattened, 1)) { flattened.head } else if (flattened != parents) { From ca8c69da8e69afc0b2536a68e367591fafb46b37 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 6 Mar 2019 16:26:25 -0800 Subject: [PATCH 1644/2793] restore compat for PathResolver, ClassPathFactory constructors restore source compat and bincompat small followup to #7712. the community build found that a couple of projects (mima, classpath-shrinker) were using the old constructors. since it's easy to do, let's keep both source compat (with the default arguments) and bincompat (with the extra constructors, which we can toss for 2.13) --- .../scala/tools/nsc/classpath/ClassPathFactory.scala | 11 +++++++++-- src/compiler/scala/tools/util/PathResolver.scala | 6 +++++- 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala index f2fb2b0224d7..39f2bb88541f 100644 --- a/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ClassPathFactory.scala @@ -22,7 +22,11 @@ import scala.tools.nsc.util.ClassPath * Provides factory methods for classpath. When creating classpath instances for a given path, * it uses proper type of classpath depending on a types of particular files containing sources or classes. */ -class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry) { +class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry) { + + @deprecated("for bincompat in 2.12.x series", "2.12.9") // TODO remove from 2.13.x + def this(settings: Settings) = this(settings, new CloseableRegistry) + /** * Create a new classpath based on the abstract file. */ @@ -78,7 +82,10 @@ class ClassPathFactory(settings: Settings, closeableRegistry: CloseableRegistry) } object ClassPathFactory { - def newClassPath(file: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry): ClassPath = file match { + @deprecated("for bincompat in 2.12.x series", "2.12.9") // TODO remove from 2.13.x + def newClassPath(file: AbstractFile, settings: Settings): ClassPath = + newClassPath(file, settings, new CloseableRegistry) + def newClassPath(file: AbstractFile, settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry): ClassPath = file match { case vd: VirtualDirectory => VirtualDirectoryClassPath(vd) case _ => if (file.isJarOrZip) diff --git a/src/compiler/scala/tools/util/PathResolver.scala b/src/compiler/scala/tools/util/PathResolver.scala index cf454d5854f8..21f541babdac 100644 --- a/src/compiler/scala/tools/util/PathResolver.scala +++ b/src/compiler/scala/tools/util/PathResolver.scala @@ -206,7 +206,11 @@ object PathResolver { } } -final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry) { +final class PathResolver(settings: Settings, closeableRegistry: CloseableRegistry = new CloseableRegistry) { + + @deprecated("for bincompat in 2.12.x series", "2.12.9") // TODO remove from 2.13.x + def this(settings: Settings) = this(settings, new CloseableRegistry) + private val classPathFactory = new ClassPathFactory(settings, closeableRegistry) import PathResolver.{ AsLines, Defaults, ppcp } From 8763166e8f4ffac6c2e8937c60a449a0cd132354 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 7 Mar 2019 15:27:54 +1000 Subject: [PATCH 1645/2793] Avoid trailing zero bytes in .sig files written in PipelineMain --- src/compiler/scala/tools/nsc/PipelineMain.scala | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 4fbcfd099ef8..24f8f8881771 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -12,7 +12,7 @@ package scala.tools.nsc -import java.io.File +import java.io.{BufferedOutputStream, File} import java.lang.Thread.UncaughtExceptionHandler import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} @@ -33,7 +33,7 @@ import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} -import PipelineMain.{BuildStrategy, Traditional, Pipeline} +import PipelineMain.{BuildStrategy, Pipeline, Traditional} class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") @@ -103,7 +103,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (!written.containsKey(pickle)) { val base = packageDir(symbol.owner) val primary = base.resolve(symbol.encodedName + ".sig") - Files.write(primary, pickle.bytes) + val writer = new BufferedOutputStream(Files.newOutputStream(primary)) + try { + writer.write(pickle.bytes, 0, pickle.writeIndex) + } finally { + writer.close() + } written.put(pickle, ()) } } From 9fcb2f34fcd63617faa7699844c1e30b4cbd78d4 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 7 Mar 2019 00:46:38 +0000 Subject: [PATCH 1646/2793] Avoid using List.flatten The method `List.flatten`, which is applied to a list of lists of elements, results in a lot of allocations for the concatenation. In this commit, we remove one use of a call to flatten in the Typers. A for loop on the flattened list is same as a for loop on the outer list and one on each inner list. An "exists" on the flattened list is same as an outer exists of inner exists. In the same way, when computing the sum of lengths of the lists in a list of list, we replace the flatten with a custom function for that. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 8 +++----- src/reflect/scala/reflect/internal/util/Collections.scala | 3 +++ 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 812b5bb5cea3..a59a87a140d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2353,10 +2353,9 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper StarWithDefaultError(meth) if (!isPastTyper) { - val allParams = meth.paramss.flatten - for (p <- allParams) { + for (pp <- meth.paramss ; p <- pp){ for (n <- p.deprecatedParamName) { - if (allParams.exists(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) + if (mexists(meth.paramss)(p1 => p != p1 && (p1.name == n || p1.deprecatedParamName.exists(_ == n)))) DeprecatedParamNameError(p, n) } } @@ -4012,8 +4011,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case _ => reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn)) } - - if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2) + if (annType.typeSymbol == DeprecatedAttr && sumSize(argss, 0) < 2) context.deprecationWarning(ann.pos, DeprecatedAttr, "@deprecated now takes two arguments; see the scaladoc.", "2.11.0") if ((typedAnn.tpe == null) || typedAnn.tpe.isErroneous) ErroneousAnnotation diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index ca5cad827855..544f66c8db0c 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -331,6 +331,9 @@ trait Collections { /** Again avoiding calling length, but the lengthCompare interface is clunky. */ final def hasLength(xs: List[_], len: Int) = xs.lengthCompare(len) == 0 + + @tailrec final def sumSize(xss: List[List[_]], acc: Int): Int = + if (xss.isEmpty) acc else sumSize(xss.tail, acc + xss.head.size) } object Collections extends Collections From a7891444110c27af7b1312c2a93c27e0e73bdc43 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 7 Mar 2019 13:30:07 +0000 Subject: [PATCH 1647/2793] Add parentSymbolsIterator method. Use it instead of the info. --- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 2 +- src/compiler/scala/tools/nsc/transform/Erasure.scala | 7 +++---- src/compiler/scala/tools/nsc/transform/Mixin.scala | 4 ++-- .../scala/tools/nsc/transform/SpecializeTypes.scala | 4 ++-- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 4 ++-- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 4 ++-- src/reflect/scala/reflect/internal/SymbolTable.scala | 3 +-- src/reflect/scala/reflect/internal/Symbols.scala | 1 + 8 files changed, 14 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 03589bc4aef8..5fe51011b856 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -1123,7 +1123,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ def isAndroidParcelableClass(sym: Symbol) = (AndroidParcelableInterface != NoSymbol) && - (sym.info.parents.exists( _.typeSymbol == AndroidParcelableInterface)) + (sym.parentSymbolsIterator contains AndroidParcelableInterface) /* * must-single-thread diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 1582c9d66e27..ff428cc156b4 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -1213,7 +1213,7 @@ abstract class Erasure extends InfoTransform // class if `m` is defined in Java. This avoids the need for having the Java class as // a direct parent (scala-dev#143). if (qual.isInstanceOf[Super]) { - val qualSym = accessibleOwnerOrParentDefiningMember(sym, qual.tpe.typeSymbol.info.parents, context) match { + val qualSym = accessibleOwnerOrParentDefiningMember(sym, qual.tpe.typeSymbol.parentSymbolsIterator, context) match { case Some(p) => p case None => // There is no test for this warning, I have been unable to come up with an example that would trigger it. @@ -1395,14 +1395,13 @@ abstract class Erasure extends InfoTransform * - For Java-defined members we prefer a direct parent over of the owner, even if the owner is * accessible. This way the owner doesn't need to be added as a direct parent, see scala-dev#143. */ - final def accessibleOwnerOrParentDefiningMember(member: Symbol, parents: List[Type], context: Context): Option[Symbol] = { + final def accessibleOwnerOrParentDefiningMember(member: Symbol, parents: Iterator[Symbol], context: Context): Option[Symbol] = { def eraseAny(cls: Symbol) = if (cls == AnyClass || cls == AnyValClass) ObjectClass else cls if (member.isConstructor || !member.isJavaDefined) Some(eraseAny(member.owner)) else parents.find { p => - val e = eraseAny(p.typeSymbol) + val e = eraseAny(p) isJvmAccessible(e, context) && definesMemberAfterErasure(e, member) - } map { _.typeSymbol } orElse { val e = eraseAny(member.owner) if (isJvmAccessible(e, context)) Some(e) else None diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index 6338c6b09b46..d6c5aa5e2888 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -225,7 +225,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes def genForwarder(required: Boolean): Unit = { val owner = member.owner val isJavaInterface = owner.isJavaDefined && owner.isInterface - if (isJavaInterface && !clazz.info.parents.exists(_.typeSymbol == owner)) { + if (isJavaInterface && !clazz.parentSymbolsIterator.contains(owner)) { if (required) { val text = s"Unable to implement a mixin forwarder for $member in $clazz unless interface ${owner.name} is directly extended by $clazz." reporter.error(clazz.pos, text) @@ -302,7 +302,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes mixinMember.alias, mixinClass)) case alias1 => if (alias1.owner.isJavaDefined && alias1.owner.isInterface) { - if (!clazz.info.parents.exists(_.typeSymbol eq alias1.owner)) { + if (!clazz.parentSymbolsIterator.contains(alias1.owner)) { val suggestedParent = exitingTyper(clazz.info.baseType(alias1.owner)) reporter.error(clazz.pos, s"Unable to implement a super accessor required by trait ${mixinClass.name} unless $suggestedParent is directly extended by $clazz.") } else diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 0f1af59d9c70..207a9fcefb8b 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1453,7 +1453,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { def illegalSpecializedInheritance(clazz: Symbol): Boolean = ( clazz.isSpecialized - && originalClass(clazz).info.parents.exists(p => hasSpecializedParams(p.typeSymbol) && !p.typeSymbol.isTrait) + && originalClass(clazz).parentSymbolsIterator.exists(p => hasSpecializedParams(p) && !p.isTrait) ) class SpecializationTransformer(unit: CompilationUnit) extends TypingTransformer(unit) { @@ -1938,7 +1938,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } if (hasSpecializedFields) { - val isSpecializedInstance = (sClass hasFlag SPECIALIZED) || sClass.info.parents.exists(_.typeSymbol hasFlag SPECIALIZED) + val isSpecializedInstance = (sClass hasFlag SPECIALIZED) || sClass.parentSymbolsIterator.exists(_ hasFlag SPECIALIZED) val sym = sClass.newMethod(nme.SPECIALIZED_INSTANCE, sClass.pos) setInfoAndEnter MethodType(Nil, BooleanTpe) mbrs += DefDef(sym, Literal(Constant(isSpecializedInstance)).setType(BooleanTpe)).setType(NoType) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 5b96eb6cc327..e3e3bf7737fe 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -394,11 +394,11 @@ abstract class RefChecks extends Transform { //Console.println(infoString(member) + " shadows1 " + infoString(other) " in " + clazz);//DEBUG return } - if (clazz.info.parents exists (p => subOther(p.typeSymbol) && subMember(p.typeSymbol) && deferredCheck)) { + if (clazz.parentSymbolsIterator exists (p => subOther(p) && subMember(p) && deferredCheck)) { //Console.println(infoString(member) + " shadows2 " + infoString(other) + " in " + clazz);//DEBUG return } - if (clazz.info.parents forall (p => subOther(p.typeSymbol) == subMember(p.typeSymbol))) { + if (clazz.parentSymbolsIterator forall (p => subOther(p) == subMember(p))) { //Console.println(infoString(member) + " shadows " + infoString(other) + " in " + clazz);//DEBUG return } diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 40cd0822fd70..68ee0eb86416 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -184,8 +184,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT // There is no test left for this warning, as I have been unable to come up with an example that would trigger it. // For a `super.m` selection, there must be a direct parent from which `m` can be selected. This parent will be used // as receiver in the invokespecial call. - val receiverInBytecode = erasure.accessibleOwnerOrParentDefiningMember(sym, sup.tpe.typeSymbol.info.parents, localTyper.context.asInstanceOf[erasure.Context]).getOrElse(sym.owner) - if (!clazz.info.parents.exists(_.typeSymbol == receiverInBytecode)) + val receiverInBytecode = erasure.accessibleOwnerOrParentDefiningMember(sym, sup.tpe.typeSymbol.parentSymbolsIterator, localTyper.context.asInstanceOf[erasure.Context]).getOrElse(sym.owner) + if (!clazz.parentSymbolsIterator.contains(receiverInBytecode)) reporter.error(sel.pos, s"unable to emit super call unless interface ${owner.name} (which declares $sym) is directly extended by $clazz.") } } diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 444b35d5c5c4..1fcc0f575137 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -368,8 +368,7 @@ abstract class SymbolTable extends macros.Universe } } // enter decls of parent classes - for (px <- container.info.parents) { - val p = px.typeSymbol + for (p <- container.parentSymbolsIterator) { if (p != definitions.ObjectClass) { openPackageModule(p, dest) } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 6a792c11c6ff..17e651a78de0 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2170,6 +2170,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def superClass: Symbol = if (info.parents.isEmpty) NoSymbol else info.parents.head.typeSymbol def parentSymbols: List[Symbol] = info.parents map (_.typeSymbol) + def parentSymbolsIterator: Iterator[Symbol] = info.parents.iterator.map(_.typeSymbol) /** The directly or indirectly inherited mixins of this class * except for mixin classes inherited by the superclass. Mixin classes appear * in linearization order. From d27a93af6e246b977470f5df7918904eef4b58a2 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 7 Mar 2019 14:42:00 +0000 Subject: [PATCH 1648/2793] Use LinkedHashSet instead of ListBuffer: better query times --- .../scala/reflect/internal/Types.scala | 24 ++++++++----------- 1 file changed, 10 insertions(+), 14 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1680b3479a3a..772ce537d77b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -18,7 +18,7 @@ import java.util.Objects import scala.collection.{immutable, mutable} import scala.ref.WeakReference -import mutable.ListBuffer +import mutable.{ListBuffer, LinkedHashSet} import Flags._ import scala.util.control.ControlThrowable import scala.annotation.tailrec @@ -1632,21 +1632,17 @@ trait Types private def normalizeImpl = { // TODO see comments around def intersectionType and def merge // scala/bug#8575 The dealias is needed here to keep subtyping transitive, example in run/t8575b.scala - val flattened: List[Type] = { - @inline - def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp - val buf: ListBuffer[Type] = ListBuffer.empty[Type] - def loop(tp: Type): Unit = dealiasRefinement(tp) match { - case RefinedType(parents, ds) if ds.isEmpty => parents.foreach(loop) - case tp => if (buf contains tp) () else buf += tp - } - parents foreach loop - buf.toList + val flattened: LinkedHashSet[Type] = LinkedHashSet.empty[Type] + def dealiasRefinement(tp: Type) = if (tp.dealias.isInstanceOf[RefinedType]) tp.dealias else tp + def loop(tp: Type): Unit = dealiasRefinement(tp) match { + case RefinedType(parents, ds) if ds.isEmpty => parents.foreach(loop) + case tp => flattened.add(tp) } - if (decls.isEmpty && hasLength(flattened, 1)) { + parents foreach loop + if (decls.isEmpty && flattened.size == 1) { flattened.head - } else if (flattened != parents) { - refinedType(flattened, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition) + } else if (!flattened.sameElements(parents)) { + refinedType(flattened.toList, if (typeSymbol eq NoSymbol) NoSymbol else typeSymbol.owner, decls, NoPosition) } else if (isHigherKinded) { etaExpand } else super.normalize From 403e5412701ad91b627f36f4db5e1b03476a4412 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Mar 2019 11:06:14 +1000 Subject: [PATCH 1649/2793] Assert that non-overloaded constructors don't need this type substitution (In checkAccessible) --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 2e66eff3c82d..bd3e3097d3b4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -282,8 +282,11 @@ trait Infer extends Checkable { catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) } ) tree setSymbol sym1 setType ( - pre match { - case _: SuperType => owntype map (tp => if (tp eq pre) site.symbol.thisType else tp) + pre match { + case _: SuperType => + val result = owntype map ((tp: Type) => if (tp eq pre) site.symbol.thisType else tp) + if (result ne owntype) assert(!sym.isConstructor || owntype.isInstanceOf[OverloadedType], (sym, owntype, result)) + result case _ => owntype } ) From 4f419e7a7f83175ac8f3689bd5897f27ff3cde81 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Mar 2019 11:32:44 +1000 Subject: [PATCH 1650/2793] Optimize checkAccessible for super constructor calls --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index bd3e3097d3b4..246913589723 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -284,9 +284,8 @@ trait Infer extends Checkable { tree setSymbol sym1 setType ( pre match { case _: SuperType => - val result = owntype map ((tp: Type) => if (tp eq pre) site.symbol.thisType else tp) - if (result ne owntype) assert(!sym.isConstructor || owntype.isInstanceOf[OverloadedType], (sym, owntype, result)) - result + if (!sym.isConstructor && !owntype.isInstanceOf[OverloadedType]) owntype // OPT: avoid lambda allocation and Type.map + else owntype map ((tp: Type) => if (tp eq pre) site.symbol.thisType else tp) case _ => owntype } ) From e9bd65d7f84864ba16bde7b0537d60043798eb3f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Mar 2019 14:31:55 +1000 Subject: [PATCH 1651/2793] Fix completion of explicitly imported type names The logic under all `allImportedSymbols` differed from `importedSymbol` by fruitlessly copmaring the import selector (a TermName) to the TypeName of the member of the import's prefix. --- .../scala/tools/nsc/typechecker/Contexts.scala | 2 +- .../scala/tools/nsc/interpreter/CompletionTest.scala | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 5b970fe7e79e..3b1d75567f02 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1567,7 +1567,7 @@ trait Contexts { self: Analyzer => private def transformImport(selectors: List[ImportSelector], sym: Symbol): List[Symbol] = selectors match { case List() => List() case List(ImportSelector(nme.WILDCARD, _, _, _)) => List(sym) - case ImportSelector(from, _, to, _) :: _ if from == sym.name => + case ImportSelector(from, _, to, _) :: _ if from == (if (from.isTermName) sym.name.toTermName else sym.name.toTypeName) => if (to == nme.WILDCARD) List() else List(sym.cloneSymbol(sym.owner, sym.rawflags, to)) case _ :: rest => transformImport(rest, sym) diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 1eb2558880f3..d130f133e6d3 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -202,6 +202,16 @@ class CompletionTest { checkExact(completer, "p1.p2.p3.Ping.Po")("Pong") } + @Test + def constructor(): Unit = { + val intp = newIMain() + val completer = new PresentationCompilerCompleter(intp) + checkExact(completer, "class Shazam{}; new Shaz")("Shazam") + + intp.interpret("class Shazam {}") + checkExact(completer, "new Shaz")("Shazam") + } + @Test def performanceOfLenientMatch(): Unit = { val intp = newIMain() From 55ba885d2a149996df02fb4caa100c225cf6134f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Mar 2019 15:30:19 +1000 Subject: [PATCH 1652/2793] Show constructor def strings TAB,TAB, as we do for methods --- .../nsc/interactive/CompilerControl.scala | 2 +- .../interpreter/PresentationCompilation.scala | 13 +++++---- .../PresentationCompilerCompleter.scala | 29 +++++++++++++++---- .../nsc/interpreter/CompletionTest.scala | 11 +++++++ 4 files changed, 43 insertions(+), 12 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala index 4ad122148993..b75d61a22092 100644 --- a/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala +++ b/src/interactive/scala/tools/nsc/interactive/CompilerControl.scala @@ -281,7 +281,7 @@ trait CompilerControl { self: Global => val tpe: Type val accessible: Boolean def implicitlyAdded = false - def symNameDropLocal: Name = sym.name.dropLocal + def symNameDropLocal: Name = if (sym.name.isTermName) sym.name.dropLocal else sym.name private def accessible_s = if (accessible) "" else "[inaccessible] " def forceInfoString = { diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index 7a601ab65750..e03f4cdc3c2d 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -12,11 +12,11 @@ package scala.tools.nsc.interpreter -import scala.reflect.internal.util.RangePosition +import scala.reflect.internal.util.{Position, RangePosition} import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.{interactive, CloseableRegistry, Settings} +import scala.tools.nsc.{CloseableRegistry, Settings, interactive} import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.classpath._ @@ -98,10 +98,13 @@ trait PresentationCompilation { import compiler.CompletionResult def completionsAt(cursor: Int): CompletionResult = { - val pos = unit.source.position(preambleLength + cursor) - compiler.completionsAt(pos) + compiler.completionsAt(positionOf(cursor)) } - def typedTreeAt(code: String, selectionStart: Int, selectionEnd: Int): compiler.Tree = { + + def positionOf(cursor: Int): Position = + unit.source.position(preambleLength + cursor) + + def typedTreeAt(selectionStart: Int, selectionEnd: Int): compiler.Tree = { val start = selectionStart + preambleLength val end = selectionEnd + preambleLength val pos = new RangePosition(unit.source, start, start, end) diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala index e941192a9086..9e469041d54d 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.interpreter -import scala.reflect.internal.util.StringOps +import scala.reflect.internal.util.{RangePosition, StringOps} import scala.tools.nsc.interpreter.Completion.Candidates import scala.util.control.NonFatal @@ -57,17 +57,17 @@ class PresentationCompilerCompleter(intp: IMain) extends Completion { Candidates(cursor, "" :: printed :: Nil) } def typeAt(result: Result, start: Int, end: Int) = { - val tpString = result.compiler.exitingTyper(result.typedTreeAt(buf, start, end).tpe.toString) + val tpString = result.compiler.exitingTyper(result.typedTreeAt(start, end).tpe.toString) Candidates(cursor, "" :: tpString :: Nil) } def candidates(result: Result): Candidates = { import result.compiler._ import CompletionResult._ - def defStringCandidates(matching: List[Member], name: Name): Candidates = { + def defStringCandidates(matching: List[Member], name: Name, isNew: Boolean): Candidates = { val defStrings = for { member <- matching if member.symNameDropLocal == name - sym <- member.sym.alternatives + sym <- if (member.sym.isClass && isNew) member.sym.info.decl(nme.CONSTRUCTOR).alternatives else member.sym.alternatives sugared = sym.sugaredSymbolOrSelf } yield { val tp = member.prefix memberType sym @@ -94,8 +94,25 @@ class PresentationCompilerCompleter(intp: IMain) extends Completion { val matching = r.matchingResults().filterNot(shouldHide) val tabAfterCommonPrefixCompletion = lastCommonPrefixCompletion.contains(buf.substring(0, cursor)) && matching.exists(_.symNameDropLocal == r.name) val doubleTab = tabCount > 0 && matching.forall(_.symNameDropLocal == r.name) - if (tabAfterCommonPrefixCompletion || doubleTab) defStringCandidates(matching, r.name) - else if (matching.isEmpty) { + if (tabAfterCommonPrefixCompletion || doubleTab) { + val offset = result.preambleLength + val pos1 = result.positionOf(cursor) + import result.compiler._ + val locator = new Locator(pos1) + val tree = locator locateIn result.unit.body + var isNew = false + new TreeStackTraverser { + override def traverse(t: Tree): Unit = { + if (t eq tree) { + isNew = path.dropWhile { case _: Select | _: Annotated => true; case _ => false}.headOption match { + case Some(_: New) => true + case _ => false + } + } else super.traverse(t) + } + }.traverse(result.unit.body) + defStringCandidates(matching, r.name, isNew) + } else if (matching.isEmpty) { // Lenient matching based on camel case and on eliding JavaBean "get" / "is" boilerplate val camelMatches: List[Member] = r.matchingResults(CompletionResult.camelMatch(_)).filterNot(shouldHide) val memberCompletions = camelMatches.map(_.symNameDropLocal.decoded).distinct.sorted diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index d130f133e6d3..2873bca8c668 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -176,6 +176,17 @@ class CompletionTest { checkExact(completer, "trait T[A] { def foo: A }; (t: T[Int]) => t.foo")(EmptyString, "def foo: Int") } + @Test + def defStringConstructor(): Unit = { + val intp = newIMain() + val completer = new PresentationCompilerCompleter(intp) + checkExact(completer, "class Shazam(i: Int); new Shaza")("Shazam") + checkExact(completer, "class Shazam(i: Int); new Shazam")(EmptyString, "def (i: Int): Shazam") + + checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shaza")("Shazam") + checkExact(completer, "class Shazam(i: Int) { def this(x: String) = this(0) }; new Shazam")(EmptyString, "def (i: Int): Shazam", "def (x: String): Shazam") + } + @Test def treePrint(): Unit = { val intp = newIMain() From 3cc07a495562654e22aa5c0367f8522ea6c2dacd Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 7 Mar 2019 04:23:44 +0000 Subject: [PATCH 1653/2793] Uncurry: avoid using List.flatten The `varargForwarderSym` method was using the `List.flatten` method to reduce two lists of lists of parameters to a single list. Also, in one of those lists it was using a `list.map`, with extra allocations. However, these lists were immediately paired in a `foreach` loop, and thus discarded immediately. However, since one of the lists is precisely a copy (cloned) from the other, we can instead replace the `flatten` with a double nested for-each pairs loop. --- .../scala/reflect/internal/transform/UnCurry.scala | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index b86e74e83aa0..f8783e36fd6c 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -125,8 +125,6 @@ trait UnCurry { // we are using `origSym.info`, which contains the type *before* the transformation // so we still see repeated parameter types (uncurry replaces them with Seq) - val isRepeated = origSym.info.paramss.flatten.map(sym => definitions.isRepeatedParamType(sym.tpe)) - val oldPs = newInfo.paramss.head def toArrayType(tp: Type, newParam: Symbol): Type = { val arg = elementType(SeqClass, tp) val elem = if (arg.typeSymbol.isTypeParameterOrSkolem && !(arg <:< AnyRefTpe)) { @@ -148,11 +146,12 @@ trait UnCurry { arrayType(elem) } - foreach2(forwSym.paramss.flatten, isRepeated)((p, isRep) => - if (isRep) { - p.setInfo(toArrayType(p.info, p)) + foreach2(forwSym.paramss, origSym.info.paramss){ (fsps, origPs) => + foreach2(fsps, origPs){ (p, sym) => + if (definitions.isRepeatedParamType(sym.tpe)) + p.setInfo(toArrayType(p.info, p)) } - ) + } origSym.updateAttachment(VarargsSymbolAttachment(forwSym)) forwSym From fcb8ff7a291bb89bfbb06e452d95e6b5fb390a37 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 9 Mar 2019 03:08:19 +0000 Subject: [PATCH 1654/2793] copyMethodCompleter: Replace map2 with foreach2 Inside the `copyMethodCompleter` we were using a call to the `map2` method, that was generating a list. However, the result of that call was not being used at all. Only the side-effects are relevant. Thus, we can replace the `map2` with a `foreach2`. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index bed4c6a8c3da..f7071b4f941c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -602,14 +602,14 @@ trait Namers extends MethodSynthesis { /* Assign the types of the class parameters to the parameters of the * copy method. See comment in `Unapplies.caseClassCopyMeth` */ - def assignParamTypes(copyDef: DefDef, sym: Symbol) { + def assignParamTypes(copyDef: DefDef, sym: Symbol): Unit = { val clazz = sym.owner val constructorType = clazz.primaryConstructor.tpe val subst = new SubstSymMap(clazz.typeParams, copyDef.tparams map (_.symbol)) val classParamss = constructorType.paramss - map2(copyDef.vparamss, classParamss)((copyParams, classParams) => - map2(copyParams, classParams)((copyP, classP) => + foreach2(copyDef.vparamss, classParamss)((copyParams, classParams) => + foreach2(copyParams, classParams)((copyP, classP) => copyP.tpt setType subst(classP.tpe) ) ) From 524c672ef2edba784e035ecd589f4da0b8353386 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 9 Mar 2019 05:48:56 +0000 Subject: [PATCH 1655/2793] Avoid using List.zip in the SpecializeTypes unification The code in the unify section of SpecializeTypes had a call to `List.zip`, which was followed by a process to foldLeft that list of pairs (by unifying the elements of each pair). This is allocating a linear number of Cons `::` and Tuple2 objects. We introduce in the Collections class a new method, foldLeft2, to fold over two lists without extra allocations. We use it to teplace the call to `zip`. --- .../scala/tools/nsc/transform/SpecializeTypes.scala | 8 ++++---- .../scala/reflect/internal/util/Collections.scala | 13 +++++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 207a9fcefb8b..857c9430cc79 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1189,13 +1189,13 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv = { if (tp1.isEmpty || tp2.isEmpty) env - else (tp1 zip tp2).foldLeft(env) { (env, args) => - if (!strict) unify(args._1, args._2, env, strict) + else foldLeft2(tp1, tp2)(env) { (env, arg1, arg2) => + if (!strict) unify(arg1, arg2, env, strict) else { - val nenv = unify(args._1, args._2, emptyEnv, strict) + val nenv = unify(arg1, arg2, emptyEnv, strict) if (env.keySet.intersect(nenv.keySet).isEmpty) env ++ nenv else { - debuglog(s"could not unify: u(${args._1}, ${args._2}) yields $nenv, env: $env") + debuglog(s"could not unify: u($arg1, $arg2) yields $nenv, env: $env") unifyError(tp1, tp2) } } diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 544f66c8db0c..c75c44a10873 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -158,6 +158,19 @@ trait Collections { if (lb eq null) Nil else lb.result } + // compare to foldLeft[A, B](xs) + final def foldLeft2[A1, A2, B](xs1: List[A1], xs2: List[A2])(z0: B)(f: (B, A1, A2) => B): B = { + var ys1 = xs1 + var ys2 = xs2 + var res = z0 + while (!ys1.isEmpty && !ys2.isEmpty) { + res = f(res, ys1.head, ys2.head) + ys1 = ys1.tail + ys2 = ys2.tail + } + res + } + final def flatCollect[A, B](elems: List[A])(pf: PartialFunction[A, Traversable[B]]): List[B] = { val lb = new ListBuffer[B] for (x <- elems ; if pf isDefinedAt x) From 57c4888f084bd61404b5bdc3c7bfd77f551b6a1a Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Fri, 8 Mar 2019 20:52:51 +0000 Subject: [PATCH 1656/2793] Remove use of List.map in the "varianceInTypes" function. The function `varianceInTypes`, used to calculate the variance of a certain type parameter inside a type expression, used several calls to `List.map`, which creates a new list that was immediately folded into a single Variance result. Using fold fusion, we remove the intermediate mapped lists. We replace the several uses of `List.map` and the `fold` method from the `Variance` companion objects by tail-recursive loops. - By putting the `map` functiona and the fold together, it is not allocating any extra List. - We add an "Extractor" trait in the Variance object, for functions that alwas return `Variance`. Because the return type is not polymorphic, unlike the `Function1`, this saves boxing/unboxing. - We add a couple of polymorphic functions, `foldExtract` and `foldExtract2`, to get the variance intersection of a list, given the extractor for the given type. --- .../scala/reflect/internal/Variance.scala | 23 +++++++++++++++---- .../scala/reflect/internal/Variances.scala | 14 +++++++---- 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Variance.scala b/src/reflect/scala/reflect/internal/Variance.scala index fee270d6c50f..be1f79ecf2d3 100644 --- a/src/reflect/scala/reflect/internal/Variance.scala +++ b/src/reflect/scala/reflect/internal/Variance.scala @@ -14,6 +14,7 @@ package scala package reflect package internal +import scala.annotation.tailrec import Variance._ /** Variances form a lattice: @@ -86,12 +87,26 @@ object Variance { def > (other: Int) = v.flags > other } - def fold(variances: List[Variance]): Variance = ( - if (variances.isEmpty) Bivariant - else variances reduceLeft (_ & _) - ) val Bivariant = new Variance(2) val Covariant = new Variance(1) val Contravariant = new Variance(-1) val Invariant = new Variance(0) + + trait Extractor[A] { def apply(x: A): Variance } + trait Extractor2[A, B] { def apply(x: A, y: B): Variance } + + def foldExtract[A](as: List[A])(f: Extractor[A]): Variance = { + @tailrec def loop(xs: List[A], acc: Variance): Variance = + if (acc.isInvariant || xs.isEmpty) acc + else loop(xs.tail, acc & f(xs.head)) + loop(as, Bivariant) + } + + def foldExtract2[A, B](as: List[A], bs: List[B])(f: Extractor2[A, B]): Variance = { + @tailrec def loop(xs: List[A], ys: List[B], acc: Variance): Variance = + if (acc.isInvariant || xs.isEmpty || ys.isEmpty) acc + else loop(xs.tail, ys.tail, acc & f(xs.head, ys.head)) + loop(as, bs, Bivariant) + } + } diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index bbdb8d28a98e..f5139e45ba9d 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -206,13 +206,17 @@ trait Variances { /** Compute variance of type parameter `tparam` in all types `tps`. */ def varianceInTypes(tps: List[Type])(tparam: Symbol): Variance = - fold(tps map (tp => varianceInType(tp)(tparam))) + Variance.foldExtract(tps)(t => varianceInType(t)(tparam)) /** Compute variance of type parameter `tparam` in type `tp`. */ def varianceInType(tp: Type)(tparam: Symbol): Variance = { - def inArgs(sym: Symbol, args: List[Type]): Variance = fold(map2(args, sym.typeParams)((a, p) => inType(a) * p.variance)) - def inSyms(syms: List[Symbol]): Variance = fold(syms map inSym) - def inTypes(tps: List[Type]): Variance = fold(tps map inType) + def inArgs(sym: Symbol, args: List[Type]): Variance = + Variance.foldExtract2(args, sym.typeParams)( (a, b) => inType(a)*b.variance ) + def inSyms(syms: List[Symbol]): Variance = + Variance.foldExtract(syms)(s => inSym(s)) + def inTypes(tps: List[Type]): Variance = Variance.foldExtract(tps)(t => inType(t)) + + def inAnnots(anns: List[AnnotationInfo]): Variance = Variance.foldExtract(anns)(a => inType(a.atp)) def inSym(sym: Symbol): Variance = if (sym.isAliasType) inType(sym.info).cut else inType(sym.info) def inType(tp: Type): Variance = tp match { @@ -229,7 +233,7 @@ trait Variances { case MethodType(params, restpe) => inSyms(params).flip & inType(restpe) case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe) case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe) - case AnnotatedType(annots, tp) => inTypes(annots map (_.atp)) & inType(tp) + case AnnotatedType(annots, tp) => inAnnots(annots) & inType(tp) } inType(tp) From 150e0fe350cc9afd8e31d5beb7e5119317c08a89 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 Mar 2019 11:08:24 +1000 Subject: [PATCH 1657/2793] Reduce SAM lambda allocation within varianceInTypes --- .../scala/reflect/internal/Variances.scala | 48 +++++++++++-------- 1 file changed, 28 insertions(+), 20 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index f5139e45ba9d..09042e426ad0 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -205,37 +205,45 @@ trait Variances { } /** Compute variance of type parameter `tparam` in all types `tps`. */ - def varianceInTypes(tps: List[Type])(tparam: Symbol): Variance = + final def varianceInTypes(tps: List[Type])(tparam: Symbol): Variance = Variance.foldExtract(tps)(t => varianceInType(t)(tparam)) /** Compute variance of type parameter `tparam` in type `tp`. */ - def varianceInType(tp: Type)(tparam: Symbol): Variance = { - def inArgs(sym: Symbol, args: List[Type]): Variance = - Variance.foldExtract2(args, sym.typeParams)( (a, b) => inType(a)*b.variance ) - def inSyms(syms: List[Symbol]): Variance = - Variance.foldExtract(syms)(s => inSym(s)) - def inTypes(tps: List[Type]): Variance = Variance.foldExtract(tps)(t => inType(t)) - - def inAnnots(anns: List[AnnotationInfo]): Variance = Variance.foldExtract(anns)(a => inType(a.atp)) + final def varianceInType(tp: Type)(tparam: Symbol): Variance = { + new varianceInType(tp, tparam).apply() + } - def inSym(sym: Symbol): Variance = if (sym.isAliasType) inType(sym.info).cut else inType(sym.info) - def inType(tp: Type): Variance = tp match { + private final class varianceInType(tp: Type, tparam: Symbol) { + import Variance._ + private def inArgs(sym: Symbol, args: List[Type]): Variance = foldExtract2(args, sym.typeParams)(inArgParam) + private def inSyms(syms: List[Symbol]): Variance = foldExtract(syms)(inSym) + private def inTypes(tps: List[Type]): Variance = foldExtract(tps)(inType) + private def inAnnots(anns: List[AnnotationInfo]): Variance = foldExtract(anns)(inAnnotationAtp) + + // OPT these extractors are hoisted to fields to reduce allocation. We're also avoiding Function1[_, Variance] to + // avoid value class boxing. + private[this] lazy val inAnnotationAtp: Extractor[AnnotationInfo] = (a: AnnotationInfo) => inType(a.atp) + private[this] lazy val inArgParam: Extractor2[Type, Symbol] = (a, b) => inType(a) * b.variance + private[this] lazy val inSym: Extractor[Symbol] = (sym: Symbol) => if (sym.isAliasType) inType(sym.info).cut else inType(sym.info) + private[this] val inType: Extractor[Type] = { case ErrorType | WildcardType | NoType | NoPrefix => Bivariant case ThisType(_) | ConstantType(_) => Bivariant case TypeRef(_, `tparam`, _) => Covariant case BoundedWildcardType(bounds) => inType(bounds) case NullaryMethodType(restpe) => inType(restpe) case SingleType(pre, sym) => inType(pre) - case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args - case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args) - case TypeBounds(lo, hi) => inType(lo).flip & inType(hi) - case RefinedType(parents, defs) => inTypes(parents) & inSyms(defs.toList) - case MethodType(params, restpe) => inSyms(params).flip & inType(restpe) - case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe) - case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe) - case AnnotatedType(annots, tp) => inAnnots(annots) & inType(tp) + case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args + case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args) + case TypeBounds(lo, hi) => inType(lo).flip & inType(hi) + case RefinedType(parents, defs) => inTypes(parents) & inSyms(defs.toList) + case MethodType(params, restpe) => inSyms(params).flip & inType(restpe) + case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe) + case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe) + case AnnotatedType(annots, tp) => inAnnots(annots) & inType(tp) } - inType(tp) + def apply(): Variance = { + inType(tp) + } } } From e6cc02db5dfbc0929e4bea12aeb394830b45f2ee Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 11 Mar 2019 11:26:19 +1000 Subject: [PATCH 1658/2793] Make varianceInType allocation free with a reusable instance --- .../scala/reflect/internal/Variances.scala | 47 ++++++++++++------- 1 file changed, 29 insertions(+), 18 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Variances.scala b/src/reflect/scala/reflect/internal/Variances.scala index 09042e426ad0..a1294ae7a5a8 100644 --- a/src/reflect/scala/reflect/internal/Variances.scala +++ b/src/reflect/scala/reflect/internal/Variances.scala @@ -17,6 +17,7 @@ package internal import Variance._ import scala.collection.mutable import scala.annotation.tailrec +import scala.reflect.internal.util.ReusableInstance /** See comments at scala.reflect.internal.Variance. */ @@ -210,10 +211,14 @@ trait Variances { /** Compute variance of type parameter `tparam` in type `tp`. */ final def varianceInType(tp: Type)(tparam: Symbol): Variance = { - new varianceInType(tp, tparam).apply() + varianceInTypeCache.using(_.apply(tp, tparam)) } + private[this] val varianceInTypeCache = new ReusableInstance[varianceInType](() => new varianceInType) + + private final class varianceInType { + private[this] var tp: Type = _ + private[this] var tparam: Symbol = _ - private final class varianceInType(tp: Type, tparam: Symbol) { import Variance._ private def inArgs(sym: Symbol, args: List[Type]): Variance = foldExtract2(args, sym.typeParams)(inArgParam) private def inSyms(syms: List[Symbol]): Variance = foldExtract(syms)(inSym) @@ -226,24 +231,30 @@ trait Variances { private[this] lazy val inArgParam: Extractor2[Type, Symbol] = (a, b) => inType(a) * b.variance private[this] lazy val inSym: Extractor[Symbol] = (sym: Symbol) => if (sym.isAliasType) inType(sym.info).cut else inType(sym.info) private[this] val inType: Extractor[Type] = { - case ErrorType | WildcardType | NoType | NoPrefix => Bivariant - case ThisType(_) | ConstantType(_) => Bivariant - case TypeRef(_, `tparam`, _) => Covariant - case BoundedWildcardType(bounds) => inType(bounds) - case NullaryMethodType(restpe) => inType(restpe) - case SingleType(pre, sym) => inType(pre) - case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args - case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args) - case TypeBounds(lo, hi) => inType(lo).flip & inType(hi) - case RefinedType(parents, defs) => inTypes(parents) & inSyms(defs.toList) - case MethodType(params, restpe) => inSyms(params).flip & inType(restpe) - case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe) - case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe) - case AnnotatedType(annots, tp) => inAnnots(annots) & inType(tp) + case ErrorType | WildcardType | NoType | NoPrefix => Bivariant + case ThisType(_) | ConstantType(_) => Bivariant + case TypeRef(_, tparam, _) if tparam eq this.tparam => Covariant + case BoundedWildcardType(bounds) => inType(bounds) + case NullaryMethodType(restpe) => inType(restpe) + case SingleType(pre, sym) => inType(pre) + case TypeRef(pre, _, _) if tp.isHigherKinded => inType(pre) // a type constructor cannot occur in tp's args + case TypeRef(pre, sym, args) => inType(pre) & inArgs(sym, args) + case TypeBounds(lo, hi) => inType(lo).flip & inType(hi) + case RefinedType(parents, defs) => inTypes(parents) & inSyms(defs.toList) + case MethodType(params, restpe) => inSyms(params).flip & inType(restpe) + case PolyType(tparams, restpe) => inSyms(tparams).flip & inType(restpe) + case ExistentialType(tparams, restpe) => inSyms(tparams) & inType(restpe) + case AnnotatedType(annots, tp) => inAnnots(annots) & inType(tp) } - def apply(): Variance = { - inType(tp) + def apply(tp: Type, tparam: Symbol): Variance = { + this.tp = tp + this.tparam = tparam + try inType(tp) + finally { + this.tp = null + this.tparam = null + } } } } From a18111133829d9935bc32308159b984b2cb2c70b Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 3 Mar 2019 15:39:32 +0000 Subject: [PATCH 1659/2793] Avoid the call to BaseTypeSeq.toList in type Unification. The method `toList` of the `BaseTypeSeq` class creates a list from the elements already in an array. In the Types unification, this method was called to create a list of types that was immediately filtered out and folded with an `exists`. To avoid the call to `toList`, we do the following: - Add a `toIterator` method in the `BaseTypeSeq` class. This is a simple counter that goes through the array at once. - use the iterator.exists method instead. --- src/reflect/scala/reflect/internal/BaseTypeSeqs.scala | 3 +++ src/reflect/scala/reflect/internal/Types.scala | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala index 288f4e4ca1f7..6f92ef99d44c 100644 --- a/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/reflect/scala/reflect/internal/BaseTypeSeqs.scala @@ -130,6 +130,9 @@ trait BaseTypeSeqs { /** Return all evaluated types in this sequence as a list */ def toList: List[Type] = elems.toList + /** Return an iterator over all evaluated types in this sequence */ + def toIterator: Iterator[Type] = elems.iterator + def copy(head: Type, offset: Int): BaseTypeSeq = { val arr = new Array[Type](elems.length + offset) java.lang.System.arraycopy(elems, 0, arr, offset, elems.length) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 1c20dd98df94..fa49e8d87a60 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3326,7 +3326,7 @@ trait Types (tp.parents exists unifyFull) || ( // @PP: Is it going to be faster to filter out the parents we just checked? // That's what's done here but I'm not sure it matters. - tp.baseTypeSeq.toList.tail filterNot (tp.parents contains _) exists unifyFull + tp.baseTypeSeq.toIterator.drop(1).exists(bt => !tp.parents.contains(bt) && unifyFull(bt)) ) ) ) From 33d9f6f3031f55e817e759147c3dae90b17430c1 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 9 Mar 2019 15:29:00 +0000 Subject: [PATCH 1660/2793] Typers: by-pass the unzipped list of pairs. The modified code in the Typers file was creating a list of pairs, by using a `map2` function, which was immediately _unzipped_. Thus, the first list was a linear number of unneeded allocations. To avoid that intermediate list, we change the algorithm to directly generate the two lists on a single pass. To do this: - For the args1 list of trees, since the function can often return the input tree, unmodified, we use the main `mapConserve` function. - For the args list main list, , we use mutable ListBuffer, and within the `mapConserve` function we have instructions to add them. Co-authored-by: Jason Zaugg --- .../scala/tools/nsc/typechecker/Typers.scala | 31 ++++++++++--------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index a59a87a140d0..5cacbf53da9f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3516,11 +3516,11 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper catch { case _: IllegalArgumentException => args.map(_ => Nil) } // fail safe in case formalTypes fails to align to argslen else args.map(_ => Nil) // will type under argPt == WildcardType - val (args1, argTpes) = context.savingUndeterminedTypeParams() { + val argTpes: ListBuffer[Type] = ListBuffer.empty[Type] + val args1: List[Tree] = context.savingUndeterminedTypeParams() { val amode = forArgMode(fun, mode) - - map2(args, altArgPts) { (arg, argPtAlts) => - def typedArg0(tree: Tree) = { + map2Conserve(args, altArgPts) { (arg, argPtAlts) => + def typedArg0(tree: Tree): Tree = { // if we have an overloaded HOF such as `(f: Int => Int)Int (f: Char => Char)Char`, // and we're typing a function like `x => x` for the argument, try to collapse // the overloaded type into a single function type from which `typedFunction` @@ -3529,8 +3529,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (argPtAlts.nonEmpty && treeInfo.isFunctionMissingParamType(tree)) functionProto(argPtAlts) else WildcardType - val argTyped = typedArg(tree, amode, BYVALmode, argPt) - (argTyped, argTyped.tpe.deconst) + typedArg(tree, amode, BYVALmode, argPt) } arg match { @@ -3541,22 +3540,24 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper case AssignOrNamedArg(lhs@Ident(name), rhs) => // named args: only type the righthand sides ("unknown identifier" errors otherwise) // the assign is untyped; that's ok because we call doTypedApply - typedArg0(rhs) match { - case (rhsTyped, tp) => (treeCopy.AssignOrNamedArg(arg, lhs, rhsTyped), NamedType(name, tp)) - } + val rhsTyped = typedArg0(rhs) + argTpes += NamedType(name, rhsTyped.tpe.deconst) + treeCopy.AssignOrNamedArg(arg, lhs, rhsTyped) case treeInfo.WildcardStarArg(_) => - typedArg0(arg) match { - case (argTyped, tp) => (argTyped, RepeatedType(tp)) - } + val argTyped = typedArg0(arg) + argTpes += RepeatedType(argTyped.tpe.deconst) + argTyped case _ => - typedArg0(arg) + val argTyped = typedArg0(arg) + argTpes += argTyped.tpe.deconst + argTyped } - }.unzip + } } if (context.reporter.hasErrors) setError(tree) else { - inferMethodAlternative(fun, undetparams, argTpes, pt) + inferMethodAlternative(fun, undetparams, argTpes.toList, pt) doTypedApply(tree, adaptAfterOverloadResolution(fun, mode.forFunMode, WildcardType), args1, mode, pt) } } From 0afbd5d17934a3eae7bb4f83dd5c8d6892710b4b Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Tue, 12 Mar 2019 05:51:13 +0000 Subject: [PATCH 1661/2793] Backports Regression tests from 2.13.x to 2.12.x These test files are regression known to work in the 2.13.x branch, and which we have checked are also working in the current 2.12.x branch. We add them to regressions, as a further check on the development of the 2.12.x branch. --- test/files/neg/t1472.check | 7 +++++++ test/files/neg/t1472.scala | 16 ++++++++++++++++ test/files/neg/t2509-2.check | 7 +++++++ test/files/neg/t2509-2.flags | 1 + test/files/neg/t2509-2.scala | 28 ++++++++++++++++++++++++++++ test/files/neg/t4612.check | 6 ++++++ test/files/neg/t4612.scala | 16 ++++++++++++++++ test/files/neg/t6528.check | 4 ++++ test/files/neg/t6528.scala | 13 +++++++++++++ test/files/neg/xml-doctype.check | 10 ++++++++++ test/files/neg/xml-doctype.scala | 7 +++++++ test/files/neg/xml-entitydecl.check | 10 ++++++++++ test/files/neg/xml-entitydecl.scala | 9 +++++++++ test/files/pos/sd465.scala | 7 +++++++ test/files/pos/t10708.scala | 24 ++++++++++++++++++++++++ test/files/pos/t2030.scala | 9 +++++++++ test/files/pos/t5651.scala | 15 +++++++++++++++ test/files/pos/t6217.scala | 16 ++++++++++++++++ test/files/pos/t6317.scala | 18 ++++++++++++++++++ test/files/pos/t7662.scala | 7 +++++++ test/files/pos/t8093.scala | 4 ++++ test/files/pos/t8277.scala | 7 +++++++ test/files/pos/t9291.scala | 10 ++++++++++ test/files/pos/t9345.scala | 17 +++++++++++++++++ test/files/pos/t9371.scala | 21 +++++++++++++++++++++ test/files/pos/t9628.scala | 9 +++++++++ test/files/pos/t9818.scala | 17 +++++++++++++++++ 27 files changed, 315 insertions(+) create mode 100644 test/files/neg/t1472.check create mode 100644 test/files/neg/t1472.scala create mode 100644 test/files/neg/t2509-2.check create mode 100644 test/files/neg/t2509-2.flags create mode 100644 test/files/neg/t2509-2.scala create mode 100644 test/files/neg/t4612.check create mode 100644 test/files/neg/t4612.scala create mode 100644 test/files/neg/t6528.check create mode 100644 test/files/neg/t6528.scala create mode 100644 test/files/neg/xml-doctype.check create mode 100644 test/files/neg/xml-doctype.scala create mode 100644 test/files/neg/xml-entitydecl.check create mode 100644 test/files/neg/xml-entitydecl.scala create mode 100644 test/files/pos/sd465.scala create mode 100644 test/files/pos/t10708.scala create mode 100644 test/files/pos/t2030.scala create mode 100644 test/files/pos/t5651.scala create mode 100644 test/files/pos/t6217.scala create mode 100644 test/files/pos/t6317.scala create mode 100644 test/files/pos/t7662.scala create mode 100644 test/files/pos/t8093.scala create mode 100644 test/files/pos/t8277.scala create mode 100644 test/files/pos/t9291.scala create mode 100644 test/files/pos/t9345.scala create mode 100644 test/files/pos/t9371.scala create mode 100644 test/files/pos/t9628.scala create mode 100644 test/files/pos/t9818.scala diff --git a/test/files/neg/t1472.check b/test/files/neg/t1472.check new file mode 100644 index 000000000000..91b56004a516 --- /dev/null +++ b/test/files/neg/t1472.check @@ -0,0 +1,7 @@ +t1472.scala:7: error: illegal cyclic reference involving type Utmp + val a : (SA { type U = Utmp }) + ^ +t1472.scala:12: error: illegal cyclic reference involving type U + type Ttmp = this.a.type#T + ^ +two errors found diff --git a/test/files/neg/t1472.scala b/test/files/neg/t1472.scala new file mode 100644 index 000000000000..0caec037d017 --- /dev/null +++ b/test/files/neg/t1472.scala @@ -0,0 +1,16 @@ +object Test extends App { + type SA = { type U; type T; val f : T => (U, T) } + type SB = { type U; type T; val g : T => (U, T) } + + type S = { type Utmp = this.b.type#U + type Ttmp = this.a.type#T + val a : (SA { type U = Utmp }) + val b : (SB { type T = Ttmp }) } + + val AB : S = new { self => + type Utmp = this.b.type#U + type Ttmp = this.a.type#T + val a : (SA { type U = self.type#Utmp }) = null + val b : (SB { type T = self.type#Ttmp }) = null + } +} diff --git a/test/files/neg/t2509-2.check b/test/files/neg/t2509-2.check new file mode 100644 index 000000000000..f87a7e6bb52d --- /dev/null +++ b/test/files/neg/t2509-2.check @@ -0,0 +1,7 @@ +t2509-2.scala:26: error: ambiguous implicit values: + both value xb in object Test of type => X[B,Int] + and value xa in object Test of type => X[A,Boolean] + match expected type X[B,U] + val fb = f(new B) + ^ +one error found diff --git a/test/files/neg/t2509-2.flags b/test/files/neg/t2509-2.flags new file mode 100644 index 000000000000..cab9e99af3a9 --- /dev/null +++ b/test/files/neg/t2509-2.flags @@ -0,0 +1 @@ +-Xsource:3.0 diff --git a/test/files/neg/t2509-2.scala b/test/files/neg/t2509-2.scala new file mode 100644 index 000000000000..609bd8785f8a --- /dev/null +++ b/test/files/neg/t2509-2.scala @@ -0,0 +1,28 @@ +class A +class B extends A +class C extends B + +trait X[-T, U] { + val u: U +} + +object XA extends X[A, Boolean] { + val u = true +} + +object XB extends X[B, Int] { + val u = 23 +} + +object Test { + implicit def f[T, U](t: T)(implicit x: X[T, U]): U = x.u + implicit val xa: X[A, Boolean] = XA + implicit val xb: X[B, Int] = XB + + val fa = f(new A) + val ffa: Boolean = fa + + // Should be ambiguous + val fb = f(new B) + val ffb: Int = fb +} diff --git a/test/files/neg/t4612.check b/test/files/neg/t4612.check new file mode 100644 index 000000000000..2b6201b57408 --- /dev/null +++ b/test/files/neg/t4612.check @@ -0,0 +1,6 @@ +t4612.scala:13: error: type mismatch; + found : t4612.this.Bob + required: _1 + def foo = new Bob + ^ +one error found diff --git a/test/files/neg/t4612.scala b/test/files/neg/t4612.scala new file mode 100644 index 000000000000..a38fdde631ea --- /dev/null +++ b/test/files/neg/t4612.scala @@ -0,0 +1,16 @@ +class t4612 { + + trait Ann[A] { + def foo: A + } + + class Bob extends Ann[Bob] { + def foo = new Bob + + trait Cris extends Ann[Cris] { + self: Bob => + + def foo = new Bob + } + } +} diff --git a/test/files/neg/t6528.check b/test/files/neg/t6528.check new file mode 100644 index 000000000000..7820504f35f0 --- /dev/null +++ b/test/files/neg/t6528.check @@ -0,0 +1,4 @@ +t6528.scala:6: error: could not find implicit value for parameter e: CoSet[U,Any] + implicitly[CoSet[U, Any]] + ^ +one error found diff --git a/test/files/neg/t6528.scala b/test/files/neg/t6528.scala new file mode 100644 index 000000000000..3c58faed3c4d --- /dev/null +++ b/test/files/neg/t6528.scala @@ -0,0 +1,13 @@ +trait CoSet[U, +A <: U] + extends CoSetLike[U, A, ({type S[A1 <: U] = CoSet[U, A1]})#S] + +trait CoSetLike[U, +A <: U, +This[X] <: CoSetLike[U, A, This] with CoSet[U, A]] { + + implicitly[CoSet[U, Any]] + // should report "implicit not found" + // was triggering a StackOverflow as getClassParts looped over + // the steam of types: + // CoSet#6940[U#6966,A1#22868] + // CoSet#6940[U#6966,A1#22876] + // CoSet#6940[U#6966,A1#...] +} diff --git a/test/files/neg/xml-doctype.check b/test/files/neg/xml-doctype.check new file mode 100644 index 000000000000..0612cef5aac9 --- /dev/null +++ b/test/files/neg/xml-doctype.check @@ -0,0 +1,10 @@ +xml-doctype.scala:4: error: in XML literal: '-' expected instead of 'D' + + ^ +xml-doctype.scala:4: error: in XML literal: '-' expected instead of 'O' + + ^ +xml-doctype.scala:7: error: input ended while parsing XML +} + ^ +three errors found diff --git a/test/files/neg/xml-doctype.scala b/test/files/neg/xml-doctype.scala new file mode 100644 index 000000000000..df5406688270 --- /dev/null +++ b/test/files/neg/xml-doctype.scala @@ -0,0 +1,7 @@ +object foo { + val html = + + + + +} diff --git a/test/files/neg/xml-entitydecl.check b/test/files/neg/xml-entitydecl.check new file mode 100644 index 000000000000..71f1292b8eea --- /dev/null +++ b/test/files/neg/xml-entitydecl.check @@ -0,0 +1,10 @@ +xml-entitydecl.scala:4: error: in XML literal: '-' expected instead of 'D' + + + ]> + + +} diff --git a/test/files/pos/sd465.scala b/test/files/pos/sd465.scala new file mode 100644 index 000000000000..9b9900162113 --- /dev/null +++ b/test/files/pos/sd465.scala @@ -0,0 +1,7 @@ +object Test { + 0: Byte + 0: Int + + (+0): Byte + (+0): Int +} diff --git a/test/files/pos/t10708.scala b/test/files/pos/t10708.scala new file mode 100644 index 000000000000..19c928d66a50 --- /dev/null +++ b/test/files/pos/t10708.scala @@ -0,0 +1,24 @@ +trait BaseStream[T, S <: BaseStream[T, S]] +trait Stream[T] extends BaseStream[T, Stream[T]] +trait IntStream extends BaseStream[Integer, IntStream] + +sealed trait SS[T, S <: BaseStream[_, S]] +object SSImplicits extends Low { + implicit val IntValue: SS[Int, IntStream] = null +} +trait Low { + implicit def anyStreamShape[T]: SS[T, Stream[T]] = null +} + +import SSImplicits.{IntValue, anyStreamShape} + +class Test { + implicit def f[A, S <: BaseStream[_, S], CC](a: A)(implicit ss: SS[A, S]): S = ??? + + y + x + + def x = f(0): IntStream + def y = f[String, Stream[String], Vector[String]]("") + +} diff --git a/test/files/pos/t2030.scala b/test/files/pos/t2030.scala new file mode 100644 index 000000000000..4a70cf662821 --- /dev/null +++ b/test/files/pos/t2030.scala @@ -0,0 +1,9 @@ +// scalac: -Xsource:3.0 +import scala.collection.immutable._ + +object Test extends App { + val res0 = TreeSet(1, 2, 3, 4, 5, 6) + val res1 = res0.map(x => x) + println(res0.toList == res1.toList) + println(res1.getClass) +} diff --git a/test/files/pos/t5651.scala b/test/files/pos/t5651.scala new file mode 100644 index 000000000000..a3fa657fdb4e --- /dev/null +++ b/test/files/pos/t5651.scala @@ -0,0 +1,15 @@ +object Test { + trait Exp[+T] + case class Const[T](t: T) extends Exp[T] + implicit def pure[T](t: T): Exp[T] = Const(t) + case class LiftTuple2[A1, A2](t1: Exp[A1], t2: Exp[A2]) extends Exp[(A1, A2)] + implicit def tuple2ToTuple2ExpPrime[ArgFOO1, A2, E1 <% Exp[ArgFOO1], E2 <% Exp[A2]](tuple: (E1, E2)): LiftTuple2[ArgFOO1, A2] = LiftTuple2[ArgFOO1, A2](tuple._1, tuple._2) + + val a = pure(1) + val b = pure("") + val c = pure(2) + def asExp[T](t: Exp[T]) = t //an evaluation context triggering implicit conversions + tuple2ToTuple2ExpPrime(((a, b), c)) + asExp(tuple2ToTuple2ExpPrime( ((a, b), c) )) + asExp(((a, b), c)) //does not compile +} diff --git a/test/files/pos/t6217.scala b/test/files/pos/t6217.scala new file mode 100644 index 000000000000..45b19c6138c9 --- /dev/null +++ b/test/files/pos/t6217.scala @@ -0,0 +1,16 @@ +// scalac: -Xfatal-warnings +package p { + package _root_ { + package scala { + object Option { + def apply(b: Boolean) = if (b) "true" else "false" + } + } + } +} +package p { + object Test { + import p._root_.scala.Option + def f = Option(true) + } +} diff --git a/test/files/pos/t6317.scala b/test/files/pos/t6317.scala new file mode 100644 index 000000000000..b96ad7e8a5e1 --- /dev/null +++ b/test/files/pos/t6317.scala @@ -0,0 +1,18 @@ +abstract class C { + def overloaded(foo: String, bar: String): String + def overloaded(foo: String, bar: String, baz: String): Unit +} + +class ScalaCompilerKiller { + implicit def CWrapper(c: C) = new { + def overloaded(request: Any): Unit = {} + } + + val sps = List[(String, String)]() + + // to repro, need: implicit conversion, overloading, pair in synthetic scrutinee in function passed to higher-order method + (null: C).overloaded(sps.map(/* _ match */ { case (r, _) => r })) + + // workaround ... + (null: C).overloaded(sps.map(_ match { case (r, _) => r })) +} diff --git a/test/files/pos/t7662.scala b/test/files/pos/t7662.scala new file mode 100644 index 000000000000..82d48afb47ef --- /dev/null +++ b/test/files/pos/t7662.scala @@ -0,0 +1,7 @@ +abstract class Dist[@specialized(AnyRef) A, @specialized(Int) B] { + def apply(a: A): A + def iterateUntil(): Dist[A, B] = new Dist[A, B] { + def loop(a: A): A = a + def apply(a: A): A = loop(a) + } +} diff --git a/test/files/pos/t8093.scala b/test/files/pos/t8093.scala new file mode 100644 index 000000000000..e416c97aff77 --- /dev/null +++ b/test/files/pos/t8093.scala @@ -0,0 +1,4 @@ +package java +package lang + +object String diff --git a/test/files/pos/t8277.scala b/test/files/pos/t8277.scala new file mode 100644 index 000000000000..bb8c82ca7e19 --- /dev/null +++ b/test/files/pos/t8277.scala @@ -0,0 +1,7 @@ +class A{ + def p() = { + lazy val s = 1 + lazy val d = () + s + } +} diff --git a/test/files/pos/t9291.scala b/test/files/pos/t9291.scala new file mode 100644 index 000000000000..c1e206bd24e4 --- /dev/null +++ b/test/files/pos/t9291.scala @@ -0,0 +1,10 @@ +// more than one field is required to trigger crash +// there must be a default value for one of the parameters +case class OuterObject(field: Int = 1, anotherField: Int = 2) + +object Test { + OuterObject().copy(field = OuterObject().field) + + // declaring something without explicit type, with the same name as OuterObject.field + def field = "anything" +} diff --git a/test/files/pos/t9345.scala b/test/files/pos/t9345.scala new file mode 100644 index 000000000000..1038557b2801 --- /dev/null +++ b/test/files/pos/t9345.scala @@ -0,0 +1,17 @@ +trait Matcher[AA] +case object MatchOne extends Matcher[Int] + +object CollectIssue { + def apply[A](m: Matcher[A]): A = m match { + case MatchOne => + // This seems to break GADT refinement of A to Int. + // Comment it out and the program typechecks. + // Expanding the pattern matching anon partial function manually + // also allows compilation. + { case _ => 0 }: PartialFunction[Any, Int] + + // should conform to A, but doesn't. + + 1 + } +} diff --git a/test/files/pos/t9371.scala b/test/files/pos/t9371.scala new file mode 100644 index 000000000000..8448989a4d28 --- /dev/null +++ b/test/files/pos/t9371.scala @@ -0,0 +1,21 @@ +import scala.annotation.tailrec + +object TestCase { + + sealed trait Result[+A] + + type Operation[A] = Int => Result[A] + + case class Terminate[A](state: Int, value: A) extends Result[A] + case class Continue[A](state: Int, cont: Operation[A]) extends Result[A] + + @tailrec + def runConversion[A](state: Int, op: Operation[A]): (Int, A) = { + op(state) match { + case Continue(s, c) => + runConversion(s, c) + case Terminate(s, v) => + (s, v) + } + } +} diff --git a/test/files/pos/t9628.scala b/test/files/pos/t9628.scala new file mode 100644 index 000000000000..e418c9283b5f --- /dev/null +++ b/test/files/pos/t9628.scala @@ -0,0 +1,9 @@ +case class Foo(bar: String, foo: String) +case class Bar(bar: String) + +object FooBar { + def crash(): Unit = { + val foo = Foo("foo", "bar").copy(foo = "foo") + val bar = Bar(foo.bar) + } +} diff --git a/test/files/pos/t9818.scala b/test/files/pos/t9818.scala new file mode 100644 index 000000000000..2bdd0c385f44 --- /dev/null +++ b/test/files/pos/t9818.scala @@ -0,0 +1,17 @@ +trait A { + def g(x: Int = 0, y: Int = 1) = x + y + + def x: Int = ??? + + def ref: A +} + +trait B { + def f(a: Int, b: Int = 0) = a + b + + def foo(in: A): Unit = { + import in._ + + ref.g(x = f(0)) + } +} From 6e42ccadda96b834b42628370e9aff24895429af Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Tue, 12 Mar 2019 07:42:57 +0000 Subject: [PATCH 1662/2793] Replace the uses of map for substInfo The substInfo method from the `Symbol` class is a side-effectful mutation: it modifies the internal data of the object that receives it. As such, using a `map` to perform an operation on all elements only creates an extra list that is not needed. Thus, we replace the `map` with a `foreach`. --- .../scala/tools/nsc/transform/SpecializeTypes.scala | 3 ++- src/reflect/scala/reflect/internal/Symbols.scala | 6 ++++-- src/reflect/scala/reflect/internal/Types.scala | 3 ++- 3 files changed, 8 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 857c9430cc79..e35caa8d38a2 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -568,7 +568,8 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (env.contains(orig)) cln modifyInfo (info => TypeBounds(info.lowerBound, AnyRefTpe)) } - cloned map (_ substInfo (syms, cloned)) + cloned.foreach(_.substInfo(syms, cloned)) + cloned } /** Maps AnyRef bindings from a raw environment (holding AnyRefs) into type parameters from diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 17e651a78de0..9cffce4ea312 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3616,7 +3616,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = { val syms1 = mapList(syms)(symFn) - mapList(syms1)(_ substInfo (syms, syms1)) + syms1.foreach(_.substInfo(syms, syms1)) + syms1 } /** Derives a new list of symbols from the given list by mapping the given @@ -3631,7 +3632,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def deriveSymbols2[A](syms: List[Symbol], as: List[A], symFn: (Symbol, A) => Symbol): List[Symbol] = { val syms1 = map2(syms, as)(symFn) - mapList(syms1)(_ substInfo (syms, syms1)) + syms1.foreach(_.substInfo(syms, syms1)) + syms1 } /** Derives a new Type by first deriving new symbols as in deriveSymbols, diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index a7c10732f65c..6a653d1c5140 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3959,7 +3959,8 @@ trait Types val eparams = tparams map (tparam => clazz.newExistential(tparam.name.toTypeName, clazz.pos) setInfo tparam.info.bounds) - eparams map (_ substInfo (tparams, eparams)) + eparams foreach (_.substInfo(tparams, eparams)) + eparams } def typeParamsToExistentials(clazz: Symbol): List[Symbol] = typeParamsToExistentials(clazz, clazz.typeParams) From b2bee0c095fbe2ec680e55cad2ba8aae523f9438 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Tue, 12 Mar 2019 09:31:28 +0000 Subject: [PATCH 1663/2793] Optimisation: replace mapParamss with a foreachParamss We complement the `mapParamss` function of the Symbols cake slice with a foreachParamss method, that performs a side-effectful action. We replace several uses of mapParamss with the `foreachParamss`, to avoid the extra allocations. We also do other optimisations, such as merging a map followed by a foldLeft into the fold, and a map followed by a foreach into the foreach (which is another fold). --- .../scala/tools/nsc/transform/SpecializeTypes.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 4 ++-- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 7 ++++--- src/reflect/scala/reflect/internal/Symbols.scala | 2 ++ 4 files changed, 9 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 857c9430cc79..4c6037c7e601 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -696,7 +696,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { // resolved by the type checker. Later on, erasure re-typechecks everything and // chokes if it finds default parameters for specialized members, even though // they are never needed. - mapParamss(sym)(_ resetFlag DEFAULTPARAM) + foreachParamss(sym)(_ resetFlag DEFAULTPARAM) decls1 enter subst(fullEnv)(sym) } diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index e3e3bf7737fe..0ff03b937947 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1357,9 +1357,9 @@ abstract class RefChecks extends Transform { } // types of the value parameters - mapParamss(member)(p => checkAccessibilityOfType(p.tpe)) + foreachParamss(member)(p => checkAccessibilityOfType(p.tpe)) // upper bounds of type parameters - member.typeParams.map(_.info.upperBound.widen) foreach checkAccessibilityOfType + member.typeParams.foreach(tp => checkAccessibilityOfType(tp.info.upperBound.widen)) } private def checkByNameRightAssociativeDef(tree: DefDef) { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 68ee0eb86416..7e23c53c90dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -481,9 +481,10 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT val code = DefDef(newAcc, { val (receiver :: _) :: tail = newAcc.paramss val base: Tree = Select(Ident(receiver), sym) - val allParamTypes = mapParamss(sym)(_.tpe) - val args = map2(tail, allParamTypes)((params, tpes) => map2(params, tpes)(makeArg(_, receiver, _))) - args.foldLeft(base)(Apply(_, _)) + foldLeft2(tail, sym.info.paramss)(base){ (acc, params, pps) => + val y = map2(params, pps)( (param, pp) => makeArg(param, receiver, pp.tpe)) + Apply(acc, y) + } }) debuglog("created protected accessor: " + code) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 17e651a78de0..8f871b42078d 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3707,6 +3707,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def mapParamss[T](sym: Symbol)(f: Symbol => T): List[List[T]] = mmap(sym.info.paramss)(f) + def foreachParamss(sym: Symbol)(f: Symbol => Unit): Unit = mforeach(sym.info.paramss)(f) + def existingSymbols(syms: List[Symbol]): List[Symbol] = syms filter (s => (s ne null) && (s ne NoSymbol)) From 5319a9e1f5182ba6cbcb94c13020a40c733d22b4 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Tue, 12 Mar 2019 10:12:49 +0000 Subject: [PATCH 1664/2793] Superaccessors: merge several List method. The modified code was performing a `map`, followed by a `find`, followed by a `foreach`. The first `map` was allocating an intermediate list that was immediately consumed. We use fusion to join those three operations in a single foreach, to traverse without extra list allocations. --- .../scala/tools/nsc/typechecker/SuperAccessors.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 68ee0eb86416..add6b948d3b0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -157,8 +157,9 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (mix == tpnme.EMPTY && !owner.isTrait) { // scala/bug#4989 Check if an intermediate class between `clazz` and `owner` redeclares the method as abstract. val intermediateClasses = clazz.info.baseClasses.tail.takeWhile(_ != owner) - intermediateClasses.map(sym.overridingSymbol).find(s => s.isDeferred && !s.isAbstractOverride && !s.owner.isTrait).foreach { - absSym => + intermediateClasses.foreach { icls => + val absSym = sym.overridingSymbol(icls) + if (absSym.isDeferred && !absSym.isAbstractOverride && !absSym.owner.isTrait) reporter.error(sel.pos, s"${sym.fullLocationString} cannot be directly accessed from $clazz because ${absSym.owner} redeclares it as abstract") } } From 288d49a068e816d4702c8f5f6b5f14df8a6cdb59 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Tue, 12 Mar 2019 16:02:05 +0000 Subject: [PATCH 1665/2793] Definitions - SymbolSet class: deforest middle list. The code to compute the `commonOwner` was using a List.map, that allocates a new list, followed by a `distinct` operation, that may generate a smaller one, followed by a singleton-list check. We can achieve the same using a comparison between the head and all of the elements in the tail, at no extra List allocation cost. --- src/reflect/scala/reflect/internal/Definitions.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 74bbed336cc9..2828db3e01d4 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -162,10 +162,11 @@ trait Definitions extends api.StandardDefinitions { lazy val ScalaNumericValueClassesSet: SymbolSet = new SymbolSet(ScalaNumericValueClasses) final class SymbolSet(syms: List[Symbol]) { private[this] val ids: Array[Symbol] = syms.toArray - private[this] val commonOwner = syms.map(_.rawowner).distinct match { - case common :: Nil => common - case _ => null - } + private[this] val commonOwner = + if (syms.isEmpty) null else { + val hhOwner = syms.head.rawowner + if (syms.tail.forall(_.rawowner == hhOwner)) hhOwner else null + } final def contains(sym: Symbol): Boolean = { if (commonOwner != null && (commonOwner ne sym.rawowner)) return false From 9a41c4cd6f63b56c9c5fb946900ee09072f89e95 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Wed, 13 Mar 2019 21:36:57 +0000 Subject: [PATCH 1666/2793] Remove List Allocations in the Checkable Within the Checkable file, we add some changes to avoid some List allocations. - `propagateKnownTypes`: we merge the filter of base classes into the foreach block, as an if condition - `typeArgsInTopLevel`: we replace the recursive List creation and flatMapping with a ListBuffer and a recursive loop, we merge a List `map` into a `foreach`, and we replace the `filterNot` by a guard condition when adding into the buffer. - `allChildrenAreIrreconcilable`: we do not need to transform `toList` just to iterate (which forall does). However, we store the result of the `sealedChildren` since that can be expensive to compute. - In the `isNeverSubArg`, we merge the `map` of variance into the `exists3` fold. This may also have the side benefit of avoiding allocations of the `Variance` value class. --- .../tools/nsc/typechecker/Checkable.scala | 51 +++++++++++-------- 1 file changed, 30 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala index 3a3485e20ad7..3466c716c9bc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Checkable.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Checkable.scala @@ -15,6 +15,7 @@ package typechecker import Checkability._ import scala.language.postfixOps +import scala.collection.mutable.ListBuffer /** On pattern matcher checkability: * @@ -85,9 +86,8 @@ trait Checkable { def tparams = to.typeParams val tvars = tparams map (p => TypeVar(p)) val tvarType = appliedType(to, tvars) - val bases = from.baseClasses filter (to.baseClasses contains _) - bases foreach { bc => + from.baseClasses foreach { bc => if (to.baseClasses.contains(bc)){ val tps1 = (from baseType bc).typeArgs val tps2 = (tvarType baseType bc).typeArgs devWarningIf(!sameLength(tps1, tps2)) { @@ -106,9 +106,9 @@ trait Checkable { // else if (tparam.isContravariant) tp2 <:< tp1 // else tp1 =:= tp2 // ) - } + }} - val resArgs = tparams zip tvars map { + val resArgs = map2(tparams, tvars){ case (_, tvar) if tvar.instValid => tvar.constr.inst case (tparam, _) => tparam.tpeHK } @@ -127,14 +127,23 @@ trait Checkable { private def uncheckedOk(tp: Type) = tp hasAnnotation UncheckedClass private def typeArgsInTopLevelType(tp: Type): List[Type] = { - val tps = tp match { - case RefinedType(parents, _) => parents flatMap typeArgsInTopLevelType - case TypeRef(_, ArrayClass, arg :: Nil) => if (arg.typeSymbol.isAbstractType) arg :: Nil else typeArgsInTopLevelType(arg) - case TypeRef(pre, sym, args) => typeArgsInTopLevelType(pre) ++ args - case ExistentialType(tparams, underlying) => tparams.map(_.tpe) ++ typeArgsInTopLevelType(underlying) - case _ => Nil + val res: ListBuffer[Type] = ListBuffer.empty[Type] + def add(t: Type) = if (!isUnwarnableTypeArg(t)) res += t + def loop(tp: Type): Unit = tp match { + case RefinedType(parents, _) => + parents foreach loop + case TypeRef(_, ArrayClass, arg :: Nil) => + if (arg.typeSymbol.isAbstractType) add(arg) else loop(arg) + case TypeRef(pre, sym, args) => + loop(pre) + args.foreach(add) + case ExistentialType(tparams, underlying) => + tparams.foreach(tp => add(tp.tpe)) + loop(underlying) + case _ => () } - tps filterNot isUnwarnableTypeArg + loop(tp) + res.toList } private def scrutConformsToPatternType(scrut: Type, pattTp: Type): Boolean = { @@ -219,13 +228,12 @@ trait Checkable { && !(sym2 isSubClass sym1) ) /** Are all children of these symbols pairwise irreconcilable? */ - def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = ( - sym1.sealedChildren.toList forall (c1 => - sym2.sealedChildren.toList forall (c2 => - areIrreconcilableAsParents(c1, c2) - ) - ) - ) + def allChildrenAreIrreconcilable(sym1: Symbol, sym2: Symbol) = { + val sc1 = sym1.sealedChildren + val sc2 = sym2.sealedChildren + sc1.forall(c1 => sc2.forall(c2 => areIrreconcilableAsParents(c1, c2))) + } + /** Is it impossible for the given symbols to be parents in the same class? * This means given A and B, can there be an instance of A with B? This is the * case if neither A nor B is a subclass of the other, and one of the following @@ -255,13 +263,14 @@ trait Checkable { def isNeverSubClass(sym1: Symbol, sym2: Symbol) = areIrreconcilableAsParents(sym1, sym2) private def isNeverSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol]): Boolean = /*logResult(s"isNeverSubArgs($tps1, $tps2, $tparams)")*/ { - def isNeverSubArg(t1: Type, t2: Type, variance: Variance) = ( + def isNeverSubArg(t1: Type, t2: Type, tparam: Symbol) = { + val variance = tparam.variance if (variance.isInvariant) isNeverSameType(t1, t2) else if (variance.isCovariant) isNeverSubType(t2, t1) else if (variance.isContravariant) isNeverSubType(t1, t2) else false - ) - exists3(tps1, tps2, tparams map (_.variance))(isNeverSubArg) + } + exists3(tps1, tps2, tparams)(isNeverSubArg) } private def isNeverSameType(tp1: Type, tp2: Type): Boolean = (tp1, tp2) match { case (TypeRef(_, sym1, args1), TypeRef(_, sym2, args2)) => From 6a58cce5a0dcd4db42f6f44ee775ef351ff426b4 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Wed, 13 Mar 2019 14:27:28 +0000 Subject: [PATCH 1667/2793] Optimisations: avoid List allocations in the RefChecks. We add some small optimisations to the code in the RefChecks - We replace the `map length` calls with the utility methods in the Collections traits. - We replace a combined use of flatten, map, zip, and filter with the use of an iterator and a special iterator function. - We add to the Collections utils a function to create a special iterator, which combines zip, filter, and collect functions. --- .../tools/nsc/typechecker/RefChecks.scala | 16 ++++++++----- .../reflect/internal/util/Collections.scala | 24 +++++++++++++++++++ 2 files changed, 34 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0ff03b937947..0316cfcbbf4a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -686,16 +686,20 @@ abstract class RefChecks extends Transform { val matchingArity = matchingName filter { m => !m.isDeferred && (m.name == underlying.name) && - (m.paramLists.length == abstractParamLists.length) && - (m.paramLists.map(_.length).sum == abstractParamLists.map(_.length).sum) && - (m.tpe.typeParams.size == underlying.tpe.typeParams.size) + sameLength(m.paramLists, abstractParamLists) && + sumSize(m.paramLists, 0) == sumSize(abstractParamLists, 0) && + sameLength(m.tpe.typeParams, underlying.tpe.typeParams) } matchingArity match { // So far so good: only one candidate method case Scope(concrete) => - val mismatches = abstractParamLists.flatten.map(_.tpe) zip concrete.paramLists.flatten.map(_.tpe) filterNot { case (x, y) => x =:= y } - mismatches match { + val aplIter = abstractParamLists .iterator.flatten + val cplIter = concrete.paramLists.iterator.flatten + def mismatch(apl: Symbol, cpl: Symbol): Option[(Type, Type)] = + if (apl.tpe =:= cpl.tpe) None else Some(apl.tpe -> cpl.tpe) + + mapFilter2(aplIter, cplIter)(mismatch).take(2).toList match { // Only one mismatched parameter: say something useful. case (pa, pc) :: Nil => val abstractSym = pa.typeSymbol @@ -724,7 +728,7 @@ abstract class RefChecks extends Transform { ) undefined("\n(Note that %s does not match %s%s)".format(pa, pc, addendum)) - case xs => + case _ => undefined("") } case _ => diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index c75c44a10873..7adc294112e1 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -16,6 +16,7 @@ package reflect.internal.util import scala.collection.{ mutable, immutable } import scala.annotation.tailrec import mutable.ListBuffer +import java.util.NoSuchElementException /** Profiler driven changes. * TODO - inlining doesn't work from here because of the bug that @@ -308,6 +309,29 @@ trait Collections { true } + final def mapFilter2[A, B, C](itA: Iterator[A], itB: Iterator[B])(f: (A, B) => Option[C]): Iterator[C] = + new Iterator[C] { + private[this] var head: Option[C] = None + private[this] def advanceHead(): Unit = + while (head.isEmpty && itA.hasNext && itB.hasNext) { + val x = itA.next + val y = itB.next + head = f(x, y) + } + + def hasNext: Boolean = { + advanceHead() + ! head.isEmpty + } + + def next(): C = { + advanceHead() + val res = head getOrElse (throw new NoSuchElementException("next on empty Iterator")) + head = None + res + } + } + // "Opt" suffix or traverse clashes with the various traversers' traverses final def sequenceOpt[A](as: List[Option[A]]): Option[List[A]] = traverseOpt(as)(identity) final def traverseOpt[A, B](as: List[A])(f: A => Option[B]): Option[List[B]] = From afe6d85ea97655fa61d92c6a6fcf7f43677d40d7 Mon Sep 17 00:00:00 2001 From: Enno Runne <458526+ennru@users.noreply.github.com> Date: Wed, 13 Mar 2019 21:15:35 +0100 Subject: [PATCH 1668/2793] [backport] Prefer HTTPS over HTTP for pom.xml data --- build.sbt | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index fca522798fa2..96bc3fd0cc46 100644 --- a/build.sbt +++ b/build.sbt @@ -207,7 +207,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + "-doc-source-url", s"https://github.com/scala/scala/tree/${versionProperties.value.githubTree}€{FILE_PATH_EXT}#L€{FILE_LINE}" ), incOptions := (incOptions in LocalProject("root")).value, - apiURL := Some(url("http://www.scala-lang.org/api/" + versionProperties.value.mavenVersion + "/")), + apiURL := Some(url("https://www.scala-lang.org/api/" + versionProperties.value.mavenVersion + "/")), pomIncludeRepository := { _ => false }, pomExtra := { val base = @@ -276,9 +276,9 @@ def fixPom(extra: (String, scala.xml.Node)*): Setting[_] = { "/project/organization" -> LAMP/EPFL - http://lamp.epfl.ch/ + https://lamp.epfl.ch/ , - "/project/url" -> http://www.scala-lang.org/ + "/project/url" -> https://www.scala-lang.org/ ) ++ extra) } } From 0bd07f9f8c24d88aba2daa29e5791d5204887f84 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 16 Mar 2019 18:46:13 +0000 Subject: [PATCH 1669/2793] Replace "zipped" by combinators without list allocations. The `zipped` function performs a linear number of list allocations. In some places, it was used to feed the result into a `map` or a `foreach`. This commit replaces those usages with the `map2`, or `foreach2` functions from the collections utility. --- .../scala/tools/nsc/transform/patmat/MatchAnalysis.scala | 5 ++++- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 5 ++--- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 5 ++--- .../scala/tools/nsc/typechecker/TypeDiagnostics.scala | 4 ++-- 5 files changed, 11 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala index 2d311e5e6d85..71432b8ed6f1 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchAnalysis.scala @@ -337,7 +337,10 @@ trait MatchApproximation extends TreeAndTypeAnalysis with ScalaLogic with MatchT // debug.patmat ("normalize subst: "+ normalize) val okSubst = Substitution(unboundFrom.toList, unboundTo.toList) // it's important substitution does not duplicate trees here -- it helps to keep hash consing simple, anyway - pointsToBound ++= ((okSubst.from, okSubst.to).zipped filter { (f, t) => pointsToBound exists (sym => t.exists(_.symbol == sym)) })._1 + foreach2(okSubst.from, okSubst.to){(f, t) => + if (pointsToBound exists (sym => t.exists(_.symbol == sym))) + pointsToBound += f + } // debug.patmat("pointsToBound: "+ pointsToBound) accumSubst >>= okSubst diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index b62ec028b0b8..81be8539d468 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -1045,9 +1045,8 @@ trait ContextErrors { private[scala] def NotWithinBoundsErrorMessage(prefix: String, targs: List[Type], tparams: List[Symbol], explaintypes: Boolean) = { if (explaintypes) { val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds) - (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ)) - (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi)) - () + foreach2(targs, bounds)((targ, bound) => explainTypes(bound.lo, targ)) + foreach2(targs, bounds)((targ, bound) => explainTypes(targ, bound.hi)) } prefix + "type arguments " + targs.mkString("[", ",", "]") + diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 2e66eff3c82d..87fe2ba17acb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -143,7 +143,7 @@ trait Infer extends Checkable { */ def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): List[Type] = { if (tvars.isEmpty) Nil else { - printTyping("solving for " + parentheses((tparams, tvars).zipped map ((p, tv) => s"${p.name}: $tv"))) + printTyping("solving for " + parentheses(map2(tparams, tvars)((p, tv) => s"${p.name}: $tv"))) // !!! What should be done with the return value of "solve", which is at present ignored? // The historical commentary says "no panic, it's good enough to just guess a solution, // we'll find out later whether it works", meaning don't issue an error here when types diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 0316cfcbbf4a..95b1c25a7afd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1239,9 +1239,8 @@ abstract class RefChecks extends Transform { reporter.error(tree0.pos, ex.getMessage()) if (settings.explaintypes) { val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds) - (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ)) - (argtps, bounds).zipped map ((targ, bound) => explainTypes(targ, bound.hi)) - () + foreach2(argtps, bounds)((targ, bound) => explainTypes(bound.lo, targ)) + foreach2(argtps, bounds)((targ, bound) => explainTypes(targ, bound.hi)) } } private def isIrrefutable(pat: Tree, seltpe: Type): Boolean = pat match { diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 125c747c438d..6a7e527f9ad8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -224,8 +224,8 @@ trait TypeDiagnostics { val params = req.typeConstructor.typeParams if (foundArgs.nonEmpty && foundArgs.length == reqArgs.length) { - val relationships = (foundArgs, reqArgs, params).zipped map { - case (arg, reqArg, param) => + val relationships = map3(foundArgs, reqArgs, params){ + (arg, reqArg, param) => def mkMsg(isSubtype: Boolean) = { val op = if (isSubtype) "<:" else ">:" val suggest = if (isSubtype) "+" else "-" From 98e59b41c96a9b03d7030de1eec6a418c67a84a0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 19 Mar 2019 17:06:21 +1000 Subject: [PATCH 1670/2793] Recycle the hashset used for implicit shadowing --- .../tools/nsc/typechecker/Implicits.scala | 30 ++++++++++++------- 1 file changed, 20 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index e5e77f9f26cf..f7544e6c18bf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -21,10 +21,10 @@ package typechecker import scala.annotation.tailrec import scala.collection.mutable -import mutable.{ LinkedHashMap, ListBuffer } +import mutable.{LinkedHashMap, ListBuffer} import scala.util.matching.Regex import symtab.Flags._ -import scala.reflect.internal.util.{TriState, Statistics, StatisticsStatics} +import scala.reflect.internal.util.{ReusableInstance, Statistics, StatisticsStatics, TriState} import scala.reflect.internal.TypesStats import scala.language.implicitConversions @@ -919,8 +919,6 @@ trait Implicits { * enclosing scope, and so on. */ class ImplicitComputation(iss: Infoss, isLocalToCallsite: Boolean) { - private val shadower: Shadower = if (isLocalToCallsite) new LocalShadower else NoShadower - private var best: SearchResult = SearchFailure private def isIneligible(info: ImplicitInfo) = ( @@ -931,7 +929,7 @@ trait Implicits { /** True if a given ImplicitInfo (already known isValid) is eligible. */ - def survives(info: ImplicitInfo) = ( + def survives(info: ImplicitInfo, shadower: Shadower) = ( !isIneligible(info) // cyclic, erroneous, shadowed, or specially excluded && isPlausiblyCompatible(info.tpe, wildPt) // optimization to avoid matchesPt && !shadower.isShadowed(info.name) // OPT rare, only check for plausible candidates @@ -987,9 +985,9 @@ trait Implicits { /** Sorted list of eligible implicits. */ - val eligible = { + val eligible = Shadower.using(isLocalToCallsite){ shadower => val matches = iss flatMap { is => - val result = is filter (info => checkValid(info.sym) && survives(info)) + val result = is filter (info => checkValid(info.sym) && survives(info, shadower)) shadower addInfos is result } @@ -1655,14 +1653,26 @@ trait Implicits { def addInfos(infos: Infos): Unit def isShadowed(name: Name): Boolean } + object Shadower { + private[this] val localShadowerCache = new ReusableInstance[LocalShadower](() => new LocalShadower) + + def using[T](local: Boolean)(f: Shadower => T): T = + if (local) localShadowerCache.using { shadower => + shadower.clear() + f(shadower) + } + else f(NoShadower) + } /** Used for exclude implicits from outer scopes that are shadowed by same-named implicits */ private final class LocalShadower extends Shadower { - val shadowed = util.HashSet[Name](512) + // OPT: using j.l.HashSet as that retains the internal array on clear(), which makes it worth caching. + val shadowed = new java.util.HashSet[Name](512) def addInfos(infos: Infos): Unit = { - infos.foreach(i => shadowed.addEntry(i.name)) + infos.foreach(i => shadowed.add(i.name)) } - def isShadowed(name: Name) = shadowed(name) + def isShadowed(name: Name) = shadowed.contains(name) + def clear(): Unit = shadowed.clear() } /** Used for the implicits of expected type, when no shadowing checks are needed. */ private object NoShadower extends Shadower { From c5a69fdb50a0a3da63dae13ec614fe7397314b8f Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 19 Mar 2019 19:39:14 -0400 Subject: [PATCH 1671/2793] Don't give unhelpful unemptiness advice. As we've recently learned, sometimes `!isEmpty` is simply better than `nonEmpty`, performance-wise. Thus, let IntelliJ not bring the yellow highlight of shame down upon us for using it. --- src/intellij/scala.ipr.SAMPLE | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index 10985a2de98e..b5f03d96d7e8 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -18,7 +18,8 @@
- From b87e2c5b3b691b232a2ff269028fdefbbfa5e3b2 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 19 Mar 2019 19:42:48 -0400 Subject: [PATCH 1672/2793] mailmap me! --- .mailmap | 1 + 1 file changed, 1 insertion(+) diff --git a/.mailmap b/.mailmap index 7cab5ed019c7..393f40e46445 100644 --- a/.mailmap +++ b/.mailmap @@ -30,6 +30,7 @@ Eugene Burmako Eugene Vigdorchik François Garillot Geoff Reedy +Harrison Houghton Ilya Sergei Ingo Maier Ingo Maier From cad96e244b7743b6a3fea6b446ea0722850e836a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 Mar 2019 11:58:06 +1000 Subject: [PATCH 1673/2793] Refactor adjustTypeArgs, giving its result record a name --- .../tools/nsc/typechecker/Implicits.scala | 9 +- .../scala/tools/nsc/typechecker/Infer.scala | 91 ++++++++----------- 2 files changed, 41 insertions(+), 59 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index e5e77f9f26cf..5bd412336781 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -599,9 +599,9 @@ trait Implicits { false } else { val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) - val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(allUndetparams, tvars, targs) - val remainingUndet = allUndetparams diff okParams - val tpSubst = deriveTypeWithWildcards(remainingUndet)(tp.instantiateTypeParams(okParams, okArgs)) + val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) + val remainingUndet = allUndetparams diff adjusted.okParams + val tpSubst = deriveTypeWithWildcards(remainingUndet)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) if(!matchesPt(tpSubst, wildPt, remainingUndet)) { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) false @@ -820,7 +820,8 @@ trait Implicits { // filter out failures from type inference, don't want to remove them from undetParams! // we must be conservative in leaving type params in undetparams // prototype == WildcardType: want to remove all inferred Nothings - val AdjustedTypeArgs(okParams, okArgs) = adjustTypeArgs(undetParams, tvars, targs) + val adjusted = adjustTypeArgs(undetParams, tvars, targs) + import adjusted.{okParams, okArgs} val subst: TreeTypeSubstituter = if (okParams.isEmpty) EmptyTreeTypeSubstituter diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 87fe2ba17acb..d445c077d89c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -13,7 +13,8 @@ package scala.tools.nsc package typechecker -import scala.collection.{ mutable, immutable } +import scala.collection.mutable.ListBuffer +import scala.collection.{immutable, mutable} import scala.util.control.ControlThrowable import symtab.Flags._ import scala.reflect.internal.Depth @@ -447,8 +448,11 @@ trait Infer extends Checkable { * @return map from tparams to inferred arg, if inference was successful, tparams that map to None are considered left undetermined * type parameters that are inferred as `scala.Nothing` and that are not covariant in `restpe` are taken to be undetermined */ - def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs.Result = { - val buf = AdjustedTypeArgs.Result.newBuilder[Symbol, Option[Type]] + def adjustTypeArgs(tparams: List[Symbol], tvars: List[TypeVar], targs: List[Type], restpe: Type = WildcardType): AdjustedTypeArgs = { + val okParams = ListBuffer[Symbol]() + val okArgs = ListBuffer[Type]() + val undetParams = ListBuffer[Symbol]() + val allArgs = ListBuffer[Type]() foreach3(tparams, tvars, targs) { (tparam, tvar, targ) => val retract = ( @@ -456,18 +460,23 @@ trait Infer extends Checkable { && (restpe.isWildcard || !varianceInType(restpe)(tparam).isPositive) // don't retract covariant occurrences ) - buf += ((tparam, - if (retract) None - else Some( - if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass) + if (retract) { + undetParams += tparam + allArgs += NothingTpe + } else { + val arg = + if (targ.typeSymbol == RepeatedParamClass) targ.baseType(SeqClass) else if (targ.typeSymbol == JavaRepeatedParamClass) targ.baseType(ArrayClass) // this infers Foo.type instead of "object Foo" (see also widenIfNecessary) else if (targ.typeSymbol.isModuleClass || tvar.constr.avoidWiden) targ else targ.widen - ) - )) + okParams += tparam + okArgs += arg + allArgs += arg + } } - buf.result() + + new AdjustedTypeArgs(tparams, okParams.toList, okArgs.toList, undetParams.toList, allArgs.toList) } /** Return inferred type arguments, given type parameters, formal parameters, @@ -487,7 +496,7 @@ trait Infer extends Checkable { * @throws NoInstance */ def methTypeArgs(fn: Tree, tparams: List[Symbol], formals: List[Type], restpe: Type, - argtpes: List[Type], pt: Type): AdjustedTypeArgs.Result = { + argtpes: List[Type], pt: Type): AdjustedTypeArgs = { val tvars = tparams map freshVar if (!sameLength(formals, argtpes)) throw new NoInstance("parameter lists differ in length") @@ -703,12 +712,13 @@ trait Infer extends Checkable { ) def tryInstantiating(args: List[Type]) = falseIfNoInstance { val restpe = mt resultType args - val AdjustedTypeArgs.Undets(okparams, okargs, leftUndet) = methTypeArgs(EmptyTree, undetparams, formals, restpe, args, pt) - val restpeInst = restpe.instantiateTypeParams(okparams, okargs) + val adjusted = methTypeArgs(EmptyTree, undetparams, formals, restpe, args, pt) + import adjusted.{okParams, okArgs, undetParams} + val restpeInst = restpe.instantiateTypeParams(okParams, okArgs) // #2665: must use weak conformance, not regular one (follow the monomorphic case above) - exprTypeArgs(leftUndet, restpeInst, pt, useWeaklyCompatible = true) match { + exprTypeArgs(undetParams, restpeInst, pt, useWeaklyCompatible = true) match { case null => false - case _ => isWithinBounds(NoPrefix, NoSymbol, okparams, okargs) + case _ => isWithinBounds(NoPrefix, NoSymbol, okParams, okArgs) } } def typesCompatible(args: List[Type]) = undetparams match { @@ -911,15 +921,16 @@ trait Infer extends Checkable { substExpr(tree, tparams, targsStrict, pt) List() } else { - val AdjustedTypeArgs.Undets(okParams, okArgs, leftUndet) = adjustTypeArgs(tparams, tvars, targsStrict) + val adjusted = adjustTypeArgs(tparams, tvars, targsStrict) + import adjusted.{okParams, okArgs, undetParams} def solved_s = map2(okParams, okArgs)((p, a) => s"$p=$a") mkString "," - def undet_s = leftUndet match { + def undet_s = undetParams match { case Nil => "" case ps => ps.mkString(", undet=", ",", "") } printTyping(tree, s"infer solved $solved_s$undet_s") substExpr(tree, okParams, okArgs, pt) - leftUndet + undetParams } } @@ -956,15 +967,15 @@ trait Infer extends Checkable { val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst))) val restpe = fn.tpe.resultType(argtpes) - val AdjustedTypeArgs.AllArgsAndUndets(okparams, okargs, allargs, leftUndet) = - methTypeArgs(fn, undetparams, formals, restpe, argtpes, pt) + val adjusted = methTypeArgs(fn, undetparams, formals, restpe, argtpes, pt) + import adjusted.{okParams, okArgs, allArgs, undetParams} - if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) { - val treeSubst = new TreeTypeSubstituter(okparams, okargs) + if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allArgs, "inferred ")) { + val treeSubst = new TreeTypeSubstituter(okParams, okArgs) treeSubst traverseTrees fn :: args - notifyUndetparamsInferred(okparams, okargs) + notifyUndetparamsInferred(okParams, okArgs) - leftUndet match { + undetParams match { case Nil => Nil case xs => // #3890 @@ -1427,35 +1438,5 @@ trait Infer extends Checkable { } } - /** [Martin] Can someone comment this please? I have no idea what it's for - * and the code is not exactly readable. - */ - object AdjustedTypeArgs { - val Result = mutable.LinkedHashMap - type Result = mutable.LinkedHashMap[Symbol, Option[Type]] - - def unapply(m: Result): Some[(List[Symbol], List[Type])] = Some(toLists( - (m collect {case (p, Some(a)) => (p, a)}).unzip )) - - object Undets { - def unapply(m: Result): Some[(List[Symbol], List[Type], List[Symbol])] = Some(toLists{ - val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null) - val (okArgs, okTparams) = ok.unzip - (okArgs, okTparams, nok.keys) - }) - } - - object AllArgsAndUndets { - def unapply(m: Result): Some[(List[Symbol], List[Type], List[Type], List[Symbol])] = Some(toLists{ - val (ok, nok) = m.map{case (p, a) => (p, a.getOrElse(null))}.partition(_._2 ne null) - val (okArgs, okTparams) = ok.unzip - (okArgs, okTparams, m.values.map(_.getOrElse(NothingTpe)), nok.keys) - }) - } - - private def toLists[A1, A2](pxs: (Iterable[A1], Iterable[A2])) = (pxs._1.toList, pxs._2.toList) - private def toLists[A1, A2, A3](pxs: (Iterable[A1], Iterable[A2], Iterable[A3])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList) - private def toLists[A1, A2, A3, A4](pxs: (Iterable[A1], Iterable[A2], Iterable[A3], Iterable[A4])) = (pxs._1.toList, pxs._2.toList, pxs._3.toList, pxs._4.toList) - } - + case class AdjustedTypeArgs(tparams: List[Symbol], okParams: List[Symbol], okArgs: List[Type], undetParams: List[Symbol], allArgs: List[Type]) } From 5ba6776250e1bdbb020abd75dc2b9a482ad2ce1b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 22 Mar 2019 07:59:07 +1000 Subject: [PATCH 1674/2793] Avoid redundant computation of undetParams / refacator to avoid confusing similarly named idents --- .../scala/tools/nsc/typechecker/Implicits.scala | 5 ++--- .../scala/tools/nsc/typechecker/Infer.scala | 16 ++++++++-------- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 5bd412336781..725a4aeb7d3f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -600,9 +600,8 @@ trait Implicits { } else { val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) - val remainingUndet = allUndetparams diff adjusted.okParams - val tpSubst = deriveTypeWithWildcards(remainingUndet)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) - if(!matchesPt(tpSubst, wildPt, remainingUndet)) { + val tpSubst = deriveTypeWithWildcards(adjusted.undetParams)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) + if(!matchesPt(tpSubst, wildPt, adjusted.undetParams)) { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch2) false } else true diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index d445c077d89c..c7ca067d75e1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -476,7 +476,7 @@ trait Infer extends Checkable { } } - new AdjustedTypeArgs(tparams, okParams.toList, okArgs.toList, undetParams.toList, allArgs.toList) + AdjustedTypeArgs(okParams.toList, okArgs.toList, undetParams.toList, allArgs.toList) } /** Return inferred type arguments, given type parameters, formal parameters, @@ -952,13 +952,13 @@ trait Infer extends Checkable { * `fn(args)`, given prototype `pt`. * * @param fn fn: the function that needs to be instantiated. - * @param undetparams the parameters that need to be determined + * @param undetParams the parameters that need to be determined * @param args the actual arguments supplied in the call. * @param pt0 the expected type of the function application * @return The type parameters that remain uninstantiated, * and that thus have not been substituted. */ - def inferMethodInstance(fn: Tree, undetparams: List[Symbol], + def inferMethodInstance(fn: Tree, undetParams: List[Symbol], args: List[Tree], pt0: Type): List[Symbol] = fn.tpe match { case mt @ MethodType(params0, _) => try { @@ -967,15 +967,15 @@ trait Infer extends Checkable { val argtpes = tupleIfNecessary(formals, args map (x => elimAnonymousClass(x.tpe.deconst))) val restpe = fn.tpe.resultType(argtpes) - val adjusted = methTypeArgs(fn, undetparams, formals, restpe, argtpes, pt) - import adjusted.{okParams, okArgs, allArgs, undetParams} + val adjusted = methTypeArgs(fn, undetParams, formals, restpe, argtpes, pt) + import adjusted.{okParams, okArgs, allArgs} - if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allArgs, "inferred ")) { + if (checkBounds(fn, NoPrefix, NoSymbol, undetParams, allArgs, "inferred ")) { val treeSubst = new TreeTypeSubstituter(okParams, okArgs) treeSubst traverseTrees fn :: args notifyUndetparamsInferred(okParams, okArgs) - undetParams match { + adjusted.undetParams match { case Nil => Nil case xs => // #3890 @@ -1438,5 +1438,5 @@ trait Infer extends Checkable { } } - case class AdjustedTypeArgs(tparams: List[Symbol], okParams: List[Symbol], okArgs: List[Type], undetParams: List[Symbol], allArgs: List[Type]) + final case class AdjustedTypeArgs(okParams: List[Symbol], okArgs: List[Type], undetParams: List[Symbol], allArgs: List[Type]) } From 78cf068dc24978eabffa996a21cdecb548eb8b99 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Thu, 14 Mar 2019 20:41:18 +0000 Subject: [PATCH 1675/2793] Submerge the Variance extractor function into the solve functions. In the `solvedTypes` and the `solve` functions, there is a third parameter to give the specific variances, in the context of the resolution, of each parameter which goes in the second parameter. In fact, this `variances` list is always a `map` of a function, which is different in each call, on the second list of symbols. We replace the third parameter from being the list of variances to being the function that is used to get that third list, and thus merge the application of that list in each step of the foreach. This has these benefits: - We avoid allocating the list of variances, particularly for the case in which we are just using a constant function to Invariant, before the call. - Since the only relevant information is whether or not a type parameter is contravariant, which is one bit (boolean), we use a BitSet to store that information. - To use a BitSet, we need indices, so in the solve we replace the use of map and foreach, by the utility foreachWithIndex. - By using a Variance.Extractor instead of a Function1, as required by the List.map function, we can avoid allocations of Variance objects, and use instead the underlying integer value. There could be a small performance prejudice: the double-nested loop of the solve method could compute the variances up to N times. --- .../reflect/macros/compiler/Validators.scala | 2 +- .../tools/nsc/typechecker/Implicits.scala | 4 +-- .../scala/tools/nsc/typechecker/Infer.scala | 19 +++++------ .../scala/tools/nsc/typechecker/Typers.scala | 4 +-- .../scala/reflect/internal/Types.scala | 2 +- .../internal/tpe/TypeConstraints.scala | 33 ++++++++++++------- .../reflect/internal/util/Collections.scala | 2 +- 7 files changed, 36 insertions(+), 30 deletions(-) diff --git a/src/compiler/scala/reflect/macros/compiler/Validators.scala b/src/compiler/scala/reflect/macros/compiler/Validators.scala index cb8cf79640bf..510061159f3e 100644 --- a/src/compiler/scala/reflect/macros/compiler/Validators.scala +++ b/src/compiler/scala/reflect/macros/compiler/Validators.scala @@ -72,7 +72,7 @@ trait Validators { checkMacroImplResultTypeMismatch(atpeToRtpe(aret), rret) val maxLubDepth = lubDepth(aparamss.flatten map (_.tpe)) max lubDepth(rparamss.flatten map (_.tpe)) - val atargs = solvedTypes(atvars, atparams, atparams map varianceInType(aret), upper = false, maxLubDepth) + val atargs = solvedTypes(atvars, atparams, varianceInType(aret), upper = false, maxLubDepth) val boundsOk = typer.silent(_.infer.checkBounds(macroDdef, NoPrefix, NoSymbol, atparams, atargs, "")) boundsOk match { case SilentResultValue(true) => // do nothing, success diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 14a0dc918021..e340e45516e2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -598,7 +598,7 @@ trait Implicits { if (StatisticsStatics.areSomeColdStatsEnabled) statistics.incCounter(matchesPtInstMismatch1) false } else { - val targs = solvedTypes(tvars, allUndetparams, allUndetparams map varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) + val targs = solvedTypes(tvars, allUndetparams, varianceInType(wildPt), upper = false, lubDepth(tpInstantiated :: wildPt :: Nil)) val adjusted = adjustTypeArgs(allUndetparams, tvars, targs) val tpSubst = deriveTypeWithWildcards(adjusted.undetParams)(tp.instantiateTypeParams(adjusted.okParams, adjusted.okArgs)) if(!matchesPt(tpSubst, wildPt, adjusted.undetParams)) { @@ -796,7 +796,7 @@ trait Implicits { if (tvars.nonEmpty) typingLog("solve", ptLine("tvars" -> tvars, "tvars.constr" -> tvars.map(_.constr))) - val targs = solvedTypes(tvars, undetParams, undetParams map varianceInType(pt), upper = false, lubDepth(itree3.tpe :: pt :: Nil)) + val targs = solvedTypes(tvars, undetParams, varianceInType(pt), upper = false, lubDepth(itree3.tpe :: pt :: Nil)) // #2421: check that we correctly instantiated type parameters outside of the implicit tree: checkBounds(itree3, NoPrefix, NoSymbol, undetParams, targs, "inferred ") diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index c7ca067d75e1..6ae668dd16b0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -137,12 +137,12 @@ trait Infer extends Checkable { * * @param tvars All type variables to be instantiated. * @param tparams The type parameters corresponding to `tvars` - * @param variances The variances of type parameters; need to reverse + * @param getVariance Function to extract variances of type parameters; we need to reverse * solution direction for all contravariant variables. * @param upper When `true` search for max solution else min. * @throws NoInstance */ - def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): List[Type] = { + def solvedTypes(tvars: List[TypeVar], tparams: List[Symbol], getVariance: Variance.Extractor[Symbol], upper: Boolean, depth: Depth): List[Type] = { if (tvars.isEmpty) Nil else { printTyping("solving for " + parentheses(map2(tparams, tvars)((p, tv) => s"${p.name}: $tv"))) // !!! What should be done with the return value of "solve", which is at present ignored? @@ -150,7 +150,7 @@ trait Infer extends Checkable { // we'll find out later whether it works", meaning don't issue an error here when types // don't conform to bounds. That means you can never trust the results of implicit search. // For an example where this was not being heeded, scala/bug#2421. - solve(tvars, tparams, variances, upper, depth) + solve(tvars, tparams, getVariance, upper, depth) tvars map instantiate } } @@ -377,7 +377,7 @@ trait Infer extends Checkable { case mt: MethodType if mt.isImplicit && isFullyDefined(pt) => MethodType(mt.params, AnyTpe) case _ => restpe } - def solve() = solvedTypes(tvars, tparams, tparams map varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil)) + def solve() = solvedTypes(tvars, tparams, varianceInType(variance), upper = false, lubDepth(restpe :: pt :: Nil)) if (conforms) try solve() catch { case _: NoInstance => null } @@ -535,7 +535,7 @@ trait Infer extends Checkable { "argument expression's type is not compatible with formal parameter type" + foundReqMsg(tp1, pt1)) } } - val targs = solvedTypes(tvars, tparams, tparams map varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes)) + val targs = solvedTypes(tvars, tparams, varianceInTypes(formals), upper = false, lubDepth(formals) max lubDepth(argtpes)) // Can warn about inferring Any/AnyVal as long as they don't appear // explicitly anywhere amongst the formal, argument, result, or expected type. // ...or lower bound of a type param, since they're asking for it. @@ -1016,13 +1016,12 @@ trait Infer extends Checkable { try { // debuglog("TVARS "+ (tvars map (_.constr))) // look at the argument types of the primary constructor corresponding to the pattern - val variances = - if (ctorTp.paramTypes.isEmpty) undetparams map varianceInType(ctorTp) - else undetparams map varianceInTypes(ctorTp.paramTypes) + val varianceFun: Variance.Extractor[Symbol] = + if (ctorTp.paramTypes.isEmpty) varianceInType(ctorTp) else varianceInTypes(ctorTp.paramTypes) // Note: this is the only place where solvedTypes (or, indirectly, solve) is called // with upper = true. - val targs = solvedTypes(tvars, undetparams, variances, upper = true, lubDepth(resTp :: pt :: Nil)) + val targs = solvedTypes(tvars, undetparams, varianceFun, upper = true, lubDepth(resTp :: pt :: Nil)) // checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ") // no checkBounds here. If we enable it, test bug602 fails. // TODO: reinstate checkBounds, return params that fail to meet their bounds to undetparams @@ -1091,7 +1090,7 @@ trait Infer extends Checkable { val tvars1 = tvars map (_.cloneInternal) // Note: right now it's not clear that solving is complete, or how it can be made complete! // So we should come back to this and investigate. - solve(tvars1, tvars1 map (_.origin.typeSymbol), tvars1 map (_ => Variance.Covariant), upper = false, Depth.AnyDepth) + solve(tvars1, tvars1.map(_.origin.typeSymbol), (_ => Variance.Covariant), upper = false, Depth.AnyDepth) } // this is quite nasty: it destructively changes the info of the syms of e.g., method type params diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 5cacbf53da9f..1ddf41bc2a34 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2930,10 +2930,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // use function type subtyping, not method type subtyping (the latter is invariant in argument types) fun.tpe <:< functionType(samInfoWithTVars.paramTypes, samInfoWithTVars.finalResultType) - val variances = tparams map varianceInType(sam.info) - // solve constraints tracked by tvars - val targs = solvedTypes(tvars, tparams, variances, upper = false, lubDepth(sam.info :: Nil)) + val targs = solvedTypes(tvars, tparams, varianceInType(sam.info), upper = false, lubDepth(sam.info :: Nil)) debuglog(s"sam infer: $pt --> ${appliedType(samTyCon, targs)} by ${fun.tpe} <:< $samInfoWithTVars --> $targs for $tparams") diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 6a653d1c5140..e08ad231eb5b 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2871,7 +2871,7 @@ trait Types val tvars = quantifiedFresh map (tparam => TypeVar(tparam)) val underlying1 = underlying.instantiateTypeParams(quantified, tvars) // fuse subst quantified -> quantifiedFresh -> tvars op(underlying1) && { - solve(tvars, quantifiedFresh, quantifiedFresh map (_ => Invariant), upper = false, depth) && + solve(tvars, quantifiedFresh, (_ => Invariant), upper = false, depth) && isWithinBounds(NoPrefix, NoSymbol, quantifiedFresh, tvars map (_.inst)) } } diff --git a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala index bc3d9794a37b..258785371848 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeConstraints.scala @@ -17,6 +17,7 @@ package tpe import scala.collection.{ generic } import generic.Clearable +import scala.collection.mutable.BitSet private[internal] trait TypeConstraints { self: SymbolTable => @@ -195,22 +196,30 @@ private[internal] trait TypeConstraints { /** Solve constraint collected in types `tvars`. * - * @param tvars All type variables to be instantiated. - * @param tparams The type parameters corresponding to `tvars` - * @param variances The variances of type parameters; need to reverse + * @param tvars All type variables to be instantiated. + * @param tparams The type parameters corresponding to `tvars` + * @param getVariance Function to extract variances of type parameters; we need to reverse * solution direction for all contravariant variables. - * @param upper When `true` search for max solution else min. + * @param upper When `true` search for max solution else min. */ - def solve(tvars: List[TypeVar], tparams: List[Symbol], variances: List[Variance], upper: Boolean, depth: Depth): Boolean = { + def solve(tvars: List[TypeVar], tparams: List[Symbol], getVariance: Variance.Extractor[Symbol], upper: Boolean, depth: Depth): Boolean = { + assert(tvars.corresponds(tparams)((tvar, tparam) => tvar.origin.typeSymbol eq tparam), (tparams, tvars.map(_.origin.typeSymbol))) + val areContravariant: BitSet = BitSet.empty + foreachWithIndex(tparams){(tparam, ix) => + if (getVariance(tparam).isContravariant) areContravariant += ix + } - def solveOne(tvar: TypeVar, tparam: Symbol, variance: Variance) { + def solveOne(tvar: TypeVar, ix: Int): Unit = { + val tparam = tvar.origin.typeSymbol + val isContravariant = areContravariant(ix) if (tvar.constr.inst == NoType) { - val up = if (variance.isContravariant) !upper else upper + val up = if (isContravariant) !upper else upper tvar.constr.inst = null val bound: Type = if (up) tparam.info.upperBound else tparam.info.lowerBound //Console.println("solveOne0(tv, tp, v, b)="+(tvar, tparam, variance, bound)) var cyclic = bound contains tparam - foreach3(tvars, tparams, variances)((tvar2, tparam2, variance2) => { + foreachWithIndex(tvars){ (tvar2, jx) => + val tparam2 = tvar2.origin.typeSymbol val ok = (tparam2 != tparam) && ( (bound contains tparam2) || up && (tparam2.info.lowerBound =:= tparam.tpeHK) @@ -218,9 +227,9 @@ private[internal] trait TypeConstraints { ) if (ok) { if (tvar2.constr.inst eq null) cyclic = true - solveOne(tvar2, tparam2, variance2) + solveOne(tvar2, jx) } - }) + } if (!cyclic) { if (up) { if (bound.typeSymbol != AnyClass) { @@ -260,7 +269,7 @@ private[internal] trait TypeConstraints { if (depth.isAnyDepth) lub(tvar.constr.loBounds) else lub(tvar.constr.loBounds, depth) } - ) + ) debuglog(s"$tvar setInst $newInst") tvar setInst newInst @@ -269,7 +278,7 @@ private[internal] trait TypeConstraints { } // println("solving "+tvars+"/"+tparams+"/"+(tparams map (_.info))) - foreach3(tvars, tparams, variances)(solveOne) + foreachWithIndex(tvars)(solveOne) def logBounds(tv: TypeVar) = log { val what = if (!tv.instValid) "is invalid" else s"does not conform to bounds: ${tv.constr}" diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 7adc294112e1..bf5b86a1c539 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -197,7 +197,7 @@ trait Collections { xss.isEmpty || xss.head.isEmpty && flattensToEmpty(xss.tail) } - final def foreachWithIndex[A, B](xs: List[A])(f: (A, Int) => Unit) { + final def foreachWithIndex[A](xs: List[A])(f: (A, Int) => Unit) { var index = 0 var ys = xs while (!ys.isEmpty) { From 152350892c5ded9c636cddc723b6815150b5e927 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Tue, 19 Mar 2019 02:23:06 +0000 Subject: [PATCH 1676/2793] Infer: replace List reverse by ListBuffer In the `makeFullyDefined` method of the Infer file, instead of building a List by appending each element and then reversing the list, which builds (allocates) 2 lists, we now use a ListBuffer and append in the right order, so we only allocate one list. For convenience, and if the frequent case is to have no tparams at all, we start `tparams` as `null`, and only allocate unless called at least once. --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index c7ca067d75e1..6e47fd09fd3a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -346,10 +346,12 @@ trait Infer extends Checkable { * by existentially bound variables. */ def makeFullyDefined(tp: Type): Type = { - var tparams: List[Symbol] = Nil + var tparams_ : ListBuffer[Symbol] = null + def tparams: ListBuffer[Symbol] = { if (tparams_ == null) tparams_ = ListBuffer.empty ; tparams_ } + def tparamsList: List[Symbol] = if (tparams_ == null) Nil else tparams_.toList def addTypeParam(bounds: TypeBounds): Type = { val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds - tparams ::= tparam + tparams += tparam tparam.tpe } val tp1 = tp map { @@ -358,7 +360,7 @@ trait Infer extends Checkable { case t => t } if (tp eq tp1) tp - else existentialAbstraction(tparams.reverse, tp1) + else existentialAbstraction(tparamsList, tp1) } def ensureFullyDefined(tp: Type): Type = if (isFullyDefined(tp)) tp else makeFullyDefined(tp) From 6a088fa72b765aac62781bc6c1883a75f0c61f4d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 Mar 2019 13:17:06 +1000 Subject: [PATCH 1677/2793] Further refactor makeFullyDefined into a full TypeMap We were already paying for the allocation of the capturing lambda passed to `Type.map`, let's just make the `TypeMap` apparent and use it to store the lazily contructed buffer of existential type params. While we're cleaning this up, move the name logic out to `nme.existentialName`, and have that cache the names for small values of `i`. --- .../scala/tools/nsc/typechecker/Infer.scala | 32 +++++++++++-------- .../scala/reflect/internal/StdNames.scala | 4 +++ 2 files changed, 23 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 6e47fd09fd3a..20bd554a7ea7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -346,21 +346,27 @@ trait Infer extends Checkable { * by existentially bound variables. */ def makeFullyDefined(tp: Type): Type = { - var tparams_ : ListBuffer[Symbol] = null - def tparams: ListBuffer[Symbol] = { if (tparams_ == null) tparams_ = ListBuffer.empty ; tparams_ } - def tparamsList: List[Symbol] = if (tparams_ == null) Nil else tparams_.toList - def addTypeParam(bounds: TypeBounds): Type = { - val tparam = context.owner.newExistential(newTypeName("_"+tparams.size), context.tree.pos.focus) setInfo bounds - tparams += tparam - tparam.tpe - } - val tp1 = tp map { - case WildcardType => addTypeParam(TypeBounds.empty) - case BoundedWildcardType(bounds) => addTypeParam(bounds) - case t => t + object typeMap extends TypeMap { + def tparamsList: List[Symbol] = if (tparams_ == null) Nil else tparams_.toList + private var tparams_ : ListBuffer[Symbol] = null + private var i = 0 + private def nextI(): Int = try i finally i += 1 + private def addTypeParam(bounds: TypeBounds): Type = { + val tparam = context.owner.newExistential(nme.existentialName(nextI()), context.tree.pos.focus) setInfo bounds + if (tparams_ == null) tparams_ = ListBuffer.empty + tparams_ += tparam + tparam.tpe + } + + override def apply(tp: Type): Type = mapOver(tp) match { + case WildcardType => addTypeParam(TypeBounds.empty) + case BoundedWildcardType(bounds) => addTypeParam(bounds) + case tp => tp + } } + val tp1 = typeMap(tp) if (tp eq tp1) tp - else existentialAbstraction(tparamsList, tp1) + else existentialAbstraction(typeMap.tparamsList, tp1) } def ensureFullyDefined(tp: Type): Type = if (isFullyDefined(tp)) tp else makeFullyDefined(tp) diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 38b64f63dc56..6428d83cdf18 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -512,6 +512,10 @@ trait StdNames { /** The name of a setter for protected symbols. Used for inherited Java fields. */ def protSetterName(name: Name): TermName = newTermName(PROTECTED_SET_PREFIX + name) + private[this] val existentialNames = (0 to 22).map(existentialName0) + private def existentialName0(i: Int) = newTypeName("_" + i) + final def existentialName(i: Int): TypeName = if (i < existentialNames.length) existentialNames(i) else existentialName0(i) + final val Nil: NameType = "Nil" final val Predef: NameType = "Predef" From 8dd313d3c615ca957cad640ff101b4e1fb967385 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Wed, 19 Sep 2018 17:34:57 +0100 Subject: [PATCH 1678/2793] Type depth: avoid List.map when computing maximum over list. The existing method to compute a `typeDepth` was using for some cases a `map (_.info)` method, which creates a list that is then dropped. We change the implementation of `maxDepth`: - We add a `maximumBy` method to `Depth` companion object, - We use lambda functions that bypass the `_.info`. - Use a separate trait DepthFunction, to avoid boxing-unboxing. - Use while loop to avoid boxing Using the generic `List.foldLeft` was incurring the boxing penalty. Using a while loop should prevent that. --- .../scala/reflect/internal/Depth.scala | 12 +++++++ .../scala/reflect/internal/Types.scala | 33 +++++-------------- 2 files changed, 20 insertions(+), 25 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Depth.scala b/src/reflect/scala/reflect/internal/Depth.scala index b6e4a1ef64aa..36690cae6d43 100644 --- a/src/reflect/scala/reflect/internal/Depth.scala +++ b/src/reflect/scala/reflect/internal/Depth.scala @@ -31,6 +31,8 @@ final class Depth private (val depth: Int) extends AnyVal with Ordered[Depth] { override def toString = s"Depth($depth)" } +trait DepthFunction[A] { def apply(a: A): Depth } + object Depth { // A don't care value for the depth parameter in lubs/glbs and related operations. // When passed this value, the recursion budget will be inferred from the shape of @@ -49,4 +51,14 @@ object Depth { if (depth < AnyDepthValue) AnyDepth else new Depth(depth) } + + def maximumBy[A](xs: List[A])(ff: DepthFunction[A]): Depth = { + var ys: List[A] = xs + var mm: Depth = Zero + while (!ys.isEmpty){ + mm = mm max ff(ys.head) + ys = ys.tail + } + mm + } } diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 6a653d1c5140..6b401b82cf65 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4038,7 +4038,7 @@ trait Types /** The maximum allowable depth of lubs or glbs over types `ts`. */ def lubDepth(ts: List[Type]): Depth = { - val td = typeDepth(ts) + val td = maxDepth(ts) val bd = baseTypeSeqDepth(ts) lubDepthAdjust(td, td max bd) } @@ -4055,9 +4055,9 @@ trait Types else td.decr max (bd decr 3) ) - private def symTypeDepth(syms: List[Symbol]): Depth = typeDepth(syms map (_.info)) - private def typeDepth(tps: List[Type]): Depth = maxDepth(tps) - private def baseTypeSeqDepth(tps: List[Type]): Depth = maxbaseTypeSeqDepth(tps) + private def infoTypeDepth(sym: Symbol): Depth = typeDepth(sym.info) + private def symTypeDepth(syms: List[Symbol]): Depth = Depth.maximumBy(syms)(infoTypeDepth) + private def baseTypeSeqDepth(tps: List[Type]): Depth = Depth.maximumBy(tps)((t: Type) => t.baseTypeSeqDepth) /** Is intersection of given types populated? That is, * for all types tp1, tp2 in intersection @@ -4803,8 +4803,8 @@ trait Types /** The maximum depth of type `tp` */ def typeDepth(tp: Type): Depth = tp match { - case TypeRef(pre, sym, args) => typeDepth(pre) max typeDepth(args).incr - case RefinedType(parents, decls) => typeDepth(parents) max symTypeDepth(decls.toList).incr + case TypeRef(pre, sym, args) => typeDepth(pre) max maxDepth(args).incr + case RefinedType(parents, decls) => maxDepth(parents) max symTypeDepth(decls.toList).incr case TypeBounds(lo, hi) => typeDepth(lo) max typeDepth(hi) case MethodType(paramtypes, result) => typeDepth(result) case NullaryMethodType(result) => typeDepth(result) @@ -4813,25 +4813,8 @@ trait Types case _ => Depth(1) } - //OPT replaced with tail recursive function to save on #closures - // was: - // var d = 0 - // for (tp <- tps) d = d max by(tp) //!!!OPT!!! - // d - private[scala] def maxDepth(tps: List[Type]): Depth = { - @tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match { - case tp :: rest => loop(rest, acc max typeDepth(tp)) - case _ => acc - } - loop(tps, Depth.Zero) - } - private[scala] def maxbaseTypeSeqDepth(tps: List[Type]): Depth = { - @tailrec def loop(tps: List[Type], acc: Depth): Depth = tps match { - case tp :: rest => loop(rest, acc max tp.baseTypeSeqDepth) - case _ => acc - } - loop(tps, Depth.Zero) - } + private[scala] def maxDepth(tps: List[Type]): Depth = + Depth.maximumBy(tps)(typeDepth) @tailrec private def areTrivialTypes(tps: List[Type]): Boolean = tps match { case tp :: rest => tp.isTrivial && areTrivialTypes(rest) From c78902a7d89994cf13255355b5fc2faeb0488bfe Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Tue, 18 Sep 2018 16:47:07 +0100 Subject: [PATCH 1679/2793] Replace a list of booleans by a mutable BitSet. The modified code saves, for each element of a list, a boolean state variable that is modified before an operation and restored afterwards. The code was using a `List.map`, which creates a linked list with the same length as the input list, with as many Boolean objects. We change the code to use a BitSet instead, which needs less memory. --- src/reflect/scala/reflect/internal/Types.scala | 13 +++++++++++-- .../reflect/internal/util/Collections.scala | 18 ++++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 6b401b82cf65..e238d03669e0 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4519,11 +4519,20 @@ trait Types // sides of a subtyping/equality judgement, which can lead to recursive types // being constructed. See pos/t0851 for a situation where this happens. @inline final def suspendingTypeVars[T](tvs: List[TypeVar])(op: => T): T = { - val saved = tvs map (_.suspended) + val saved = bitSetByPredicate(tvs)(_.suspended) tvs foreach (_.suspended = true) try op - finally foreach2(tvs, saved)(_.suspended = _) + finally { + var index = 0 + var sss = tvs + while (sss != Nil) { + val tv = sss.head + tv.suspended = saved(index) + index += 1 + sss = sss.tail + } + } } final def stripExistentialsAndTypeVars(ts: List[Type], expandLazyBaseType: Boolean = false): (List[Type], List[Symbol]) = { diff --git a/src/reflect/scala/reflect/internal/util/Collections.scala b/src/reflect/scala/reflect/internal/util/Collections.scala index 7adc294112e1..93e7519e52d1 100644 --- a/src/reflect/scala/reflect/internal/util/Collections.scala +++ b/src/reflect/scala/reflect/internal/util/Collections.scala @@ -350,6 +350,24 @@ trait Collections { Some(result.toList) } + final def bitSetByPredicate[A](xs: List[A])(pred: A => Boolean): mutable.BitSet = { + val bs = new mutable.BitSet() + var ys = xs + var i: Int = 0 + while (! ys.isEmpty){ + if (pred(ys.head)) + bs.add(i) + ys = ys.tail + i += 1 + } + bs + } + + final def sequence[A](as: List[Option[A]]): Option[List[A]] = { + if (as.exists (_.isEmpty)) None + else Some(as.flatten) + } + final def transposeSafe[A](ass: List[List[A]]): Option[List[List[A]]] = try { Some(ass.transpose) } catch { From f82e7f5a0ef24157cf1a5d9c210e02c460881972 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Wed, 12 Sep 2018 15:56:17 +0100 Subject: [PATCH 1680/2793] Inline WeakReference get method The `get` method from the `WeakReference` class uses the `Option.apply` method to avoid null references. This was allocating a "Some" object which is immediately read, and then no longer needed. This commit inlines the code from `WeakReference`, and avoids creating that Some object. --- src/compiler/scala/tools/nsc/SubComponent.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/SubComponent.scala b/src/compiler/scala/tools/nsc/SubComponent.scala index 6489eed3347a..9ecb41f81fe1 100644 --- a/src/compiler/scala/tools/nsc/SubComponent.scala +++ b/src/compiler/scala/tools/nsc/SubComponent.scala @@ -70,10 +70,10 @@ abstract class SubComponent { /** The phase corresponding to this subcomponent in the current compiler run */ def ownPhase: Phase = { - ownPhaseCache.get match { - case Some(phase) if ownPhaseRunId == global.currentRunId => - phase - case _ => + val cache = ownPhaseCache.underlying.get + if (cache != null && ownPhaseRunId == global.currentRunId) + cache + else { val phase = global.currentRun.phaseNamed(phaseName) ownPhaseCache = new WeakReference(phase) ownPhaseRunId = global.currentRunId From 157bf11a323072efddc6163f34aa1d220a13424d Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Mon, 24 Sep 2018 04:48:31 +0100 Subject: [PATCH 1681/2793] Small improvement: bring test forward This code calls two expensive operations, `dropSingletonType`, and a `exists` loop that calls the `Type.contains` method. We reorder the code, to bring forward a quick boolean check, so that we may sometimes avoid those expensive computations. --- src/reflect/scala/reflect/internal/tpe/TypeMaps.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala index dd6ab0081f9f..a2f96d5e1f7b 100644 --- a/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala +++ b/src/reflect/scala/reflect/internal/tpe/TypeMaps.scala @@ -408,15 +408,13 @@ private[internal] trait TypeMaps { val tp1 = mapOver(tp) if (variance.isInvariant) tp1 else tp1 match { - case TypeRef(pre, sym, args) if tparams contains sym => + case TypeRef(pre, sym, args) if tparams.contains(sym) && occurCount(sym) == 1 => val repl = if (variance.isPositive) dropSingletonType(tp1.upperBound) else tp1.lowerBound - val count = occurCount(sym) - val containsTypeParam = tparams exists (repl contains _) def msg = { val word = if (variance.isPositive) "upper" else "lower" s"Widened lone occurrence of $tp1 inside existential to $word bound" } - if (!repl.typeSymbol.isBottomClass && count == 1 && !containsTypeParam) + if (!repl.typeSymbol.isBottomClass && !tparams.exists(repl.contains)) debuglogResult(msg)(repl) else tp1 From eee658d14297804364668e0af0edef7bddd7e8bc Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Sun, 30 Sep 2018 23:23:17 +0100 Subject: [PATCH 1682/2793] mergePrefixAndArgs: Small performance hack The code here was creating a temporary extra list of heads, which is then used in two folds (a forall, an exists, and a map). We can avoid that list by submerging the (_.head) in each fold. --- src/reflect/scala/reflect/internal/Types.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e238d03669e0..3223367c3adb 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4591,10 +4591,10 @@ trait Types NoType // something is wrong: an array without a type arg. } else { - val args = argss map (_.head) - if (args.tail forall (_ =:= args.head)) typeRef(pre, sym, List(args.head)) - else if (args exists (arg => isPrimitiveValueClass(arg.typeSymbol))) ObjectTpe - else typeRef(pre, sym, List(lub(args))) + val argH = argss.head.head + if (argss.tail forall (_.head =:= argH)) typeRef(pre, sym, List(argH)) + else if (argss exists (args => isPrimitiveValueClass(args.head.typeSymbol))) ObjectTpe + else typeRef(pre, sym, List(lub(argss.map(_.head)))) } } else transposeSafe(argss) match { From 06c861dd81c38e7cda50aec1de224c987f046b7c Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Sat, 12 Jan 2019 22:56:31 +0000 Subject: [PATCH 1683/2793] Small tweak to "isHotForTs" In the `isHotForTs`, we avoid the call to `map` which creates a list that is immediately discarded. We replace it with a call to corresponds, essentially merging the map into the fold of forall. --- src/reflect/scala/reflect/internal/tpe/GlbLubs.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 55566c67325a..37de4674e9d4 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -113,7 +113,8 @@ private[internal] trait GlbLubs { var lubListDepth = Depth.Zero // This catches some recursive situations which would otherwise // befuddle us, e.g. pos/hklub0.scala - def isHotForTs(xs: List[Type]) = ts exists (_.typeParams == xs.map(_.typeSymbol)) + def isHotForT(tyPar: Symbol, x: Type): Boolean = tyPar eq x.typeSymbol + def isHotForTs(xs: List[Type]) = ts.exists(_.typeParams.corresponds(xs)(isHotForT(_,_))) def elimHigherOrderTypeParam(tp: Type) = tp match { case TypeRef(_, _, args) if args.nonEmpty && isHotForTs(args) => From ad6b90678cc2a3338507f05735c6e50922abd284 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Wed, 16 Jan 2019 00:00:00 +0000 Subject: [PATCH 1684/2793] Small tweak: Avoid list allocation A `map`, followed by a `distinct`, follow by a uniqueness-check, is equivalent to a forall that compares all tail to the head. This allows us to avoid allocating lists here. --- src/reflect/scala/reflect/internal/tpe/GlbLubs.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 37de4674e9d4..3a4a07d0d6fe 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -277,12 +277,12 @@ private[internal] trait GlbLubs { // the type constructor of the calculated lub instead. This // is because lubbing type constructors tends to result in types // which have been applied to dummies or Nothing. - ts.map(_.typeParams.size).distinct match { - case x :: Nil if res.typeParams.size != x => - logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor) - case _ => - res - } + val rtps = res.typeParams.size + val hs = ts.head.typeParams.size + if (hs != rtps && ts.forall(_.typeParams.size == hs)) + logResult(s"Stripping type args from lub because $res is not consistent with $ts")(res.typeConstructor) + else + res } finally { lubResults.clear() From 1da8232b99d6c6fbac3fee453272341a569b6a16 Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Mon, 31 Dec 2018 01:55:06 +0200 Subject: [PATCH 1685/2793] Don't zip AppliedTypeVar params and typeArgs Avoid unnecessary allocations. --- .../scala/reflect/internal/Types.scala | 19 ++++++++----------- 1 file changed, 8 insertions(+), 11 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 3223367c3adb..aab28ae0dfb6 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -2989,8 +2989,8 @@ trait Types else new TypeVar(origin, constr) {} } else if (args.size == params.size) { - if (untouchable) new AppliedTypeVar(origin, constr, params zip args) with UntouchableTypeVar - else new AppliedTypeVar(origin, constr, params zip args) + if (untouchable) new AppliedTypeVar(origin, constr, params, args) with UntouchableTypeVar + else new AppliedTypeVar(origin, constr, params, args) } else if (args.isEmpty) { if (untouchable) new HKTypeVar(origin, constr, params) with UntouchableTypeVar @@ -3019,20 +3019,17 @@ trait Types override def isHigherKinded = true } - /** Precondition: zipped params/args nonEmpty. (Size equivalence enforced structurally.) - */ + /** Precondition: `params.length == typeArgs.length > 0` (enforced structurally). */ class AppliedTypeVar( _origin: Type, _constr: TypeConstraint, - zippedArgs: List[(Symbol, Type)] + override val params: List[Symbol], + override val typeArgs: List[Type] ) extends TypeVar(_origin, _constr) { - - require(zippedArgs.nonEmpty, this) - - override def params: List[Symbol] = zippedArgs map (_._1) - override def typeArgs: List[Type] = zippedArgs map (_._2) - + require(params.nonEmpty && sameLength(params, typeArgs), this) override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]") + override def setInst(tp: Type): this.type = + super.setInst(if (isSubArgs(typeArgs, tp.typeArgs, params, Depth.AnyDepth)) tp.typeConstructor else NoType) } trait UntouchableTypeVar extends TypeVar { From c496e6a26080e3e57937b305c56041e88c4fee0f Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 17 Mar 2019 21:45:50 +0000 Subject: [PATCH 1686/2793] Uncurry: avoid intermediate lists, remove ParamTransform ADT: The erase method in the Uncurry file was creating several lists: - The input lists of lists `vparamss` and `dd.symbol.info.paramss` were flattened, into a List that was only used in a single loop run, with the `map2` function, to create... - ... `paramTransforms`, a list of objects of the "ParamTransform" ADT, which has the subclasses "Identity" and "Packed". - The `allParams` was mapped from the previous one, by getting only the `param`. This is returned in first element of response tuple. - A list of pairs that is obtained by a `collect` - The lists `packedParams` and `tempVals`, that are obtained by unzipping the collected list above. - The `packedParams` was only use to map it into the list of symbols, which is used for a symbol substitution. We rewrite this code to avoid generating so many lists: - We replace the list flattening with flattened iterators. - We use mutable ListBuffers, and use a single pass over those iterators, so in each iteration we add an element to three lists. - We generate three lists: the `allParams`, the symbols of the `packedParams` (without the packed params), and the tempVals. We cannot remove the map of tempVals to symbols, because tempVals can be needed for the output. However... - If we detect that the tree is the Empty tree, we avoid inserting the `packedParams` symbols, or the tempVals. - We remove the `ParamTransform` ADT. Also, since the `Identity` and `Packed` classes were just a carrier of its fields from one part of the `erase` method to another part of it, we can remove them and directly add the info in their fields to the lists were we need to. --- .../scala/tools/nsc/transform/UnCurry.scala | 40 ++++++++++++------- 1 file changed, 26 insertions(+), 14 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 4849d85f84cf..bd2baa102f67 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -18,6 +18,7 @@ import scala.annotation.tailrec import symtab.Flags._ import scala.collection.mutable +import scala.collection.mutable.ListBuffer import scala.reflect.internal.util.ListOfNil /* */ @@ -670,11 +671,6 @@ abstract class UnCurry extends InfoTransform * }}} */ private object dependentParamTypeErasure { - sealed abstract class ParamTransform { - def param: ValDef - } - final case class Identity(param: ValDef) extends ParamTransform - final case class Packed(param: ValDef, tempVal: ValDef) extends ParamTransform def isDependent(dd: DefDef): Boolean = enteringUncurry { @@ -687,10 +683,23 @@ abstract class UnCurry extends InfoTransform */ def erase(dd: DefDef): (List[List[ValDef]], Tree) = { import dd.{ vparamss, rhs } - val paramTransforms: List[ParamTransform] = - map2(vparamss.flatten, dd.symbol.info.paramss.flatten) { (p, infoParam) => + val (allParams, packedParamsSyms, tempVals): (List[ValDef], List[Symbol], List[ValDef]) = { + + val allParamsBuf: ListBuffer[ValDef] = ListBuffer.empty + val packedParamsSymsBuf: ListBuffer[Symbol] = ListBuffer.empty + val tempValsBuf: ListBuffer[ValDef] = ListBuffer.empty + + def addPacked(param: ValDef, tempVal: ValDef): Unit = { + allParamsBuf += param + if (rhs != EmptyTree) { + packedParamsSymsBuf += param.symbol + tempValsBuf += tempVal + } + } + + def addParamTransform(p: ValDef, infoParam: Symbol): Unit = { val packedType = infoParam.info - if (packedType =:= p.symbol.info) Identity(p) + if (packedType =:= p.symbol.info) allParamsBuf += p else { // The Uncurry info transformer existentially abstracted over value parameters // from the previous parameter lists. @@ -746,19 +755,22 @@ abstract class UnCurry extends InfoTransform val newSym = dd.symbol.newTermSymbol(tempValName, p.pos, SYNTHETIC).setInfo(info) atPos(p.pos)(ValDef(newSym, gen.mkAttributedCast(Ident(p.symbol), info))) } - Packed(newParam, tempVal) + addPacked(newParam, tempVal) } } - val allParams = paramTransforms map (_.param) - val (packedParams, tempVals) = paramTransforms.collect { - case Packed(param, tempVal) => (param, tempVal) - }.unzip + val viter = vparamss.iterator.flatten + val piter = dd.symbol.info.paramss.iterator.flatten + while (viter.hasNext && piter.hasNext) + addParamTransform(viter.next, piter.next) + + (allParamsBuf.toList, packedParamsSymsBuf.toList, tempValsBuf.toList) + } val rhs1 = if (rhs == EmptyTree || tempVals.isEmpty) rhs else { localTyper.typedPos(rhs.pos) { // Patch the method body to refer to the temp vals - val rhsSubstituted = rhs.substituteSymbols(packedParams map (_.symbol), tempVals map (_.symbol)) + val rhsSubstituted = rhs.substituteSymbols(packedParamsSyms, tempVals.map(_.symbol)) // The new method body: { val p$1 = p.asInstanceOf[]; ...; } Block(tempVals, rhsSubstituted) } From 1d5e5648b90b65395e16548fd4c09f7e01be9bc5 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Sat, 23 Mar 2019 03:12:42 +0000 Subject: [PATCH 1687/2793] Avoid the use of modifyInfo in maps The modifyInfo method is a modification of a mutable field in place, so the returned value is the receiver object itself. However, in some places a `map` function is used to apply a modification to all types in a list, and that allocates an entire list which is identical to the input list. In some places we have found this, we replace the map with a foreach. --- .../tools/nsc/transform/ExtensionMethods.scala | 3 ++- .../tools/nsc/typechecker/PatternTypers.scala | 3 ++- .../reflect/internal/ExistentialsAndSkolems.scala | 3 ++- src/reflect/scala/reflect/internal/Symbols.scala | 14 ++++++++++---- 4 files changed, 16 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 1fcea0128b5d..73766c570fd6 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -175,7 +175,8 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { // need to modify the bounds of the cloned type parameters, but we // don't want to substitute for the cloned type parameters themselves. val tparams = tparamsFromMethod ::: tparamsFromClass - GenPolyType(tparams map (_ modifyInfo fixtparam), fixres(resultType)) + tparams foreach (_ modifyInfo fixtparam) + GenPolyType(tparams, fixres(resultType)) // For reference, calling fix on the GenPolyType plays out like this: // error: scala.reflect.internal.Types$TypeError: type arguments [B#7344,A#6966] diff --git a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala index e192bf0aa757..74f39da564fe 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatternTypers.scala @@ -266,7 +266,8 @@ trait PatternTypers { // tree1's remaining type-slack skolems will be deskolemized (to the method type parameter skolems) tree1 modifyType { case MethodType(ctorArgs, restpe) => // ctorArgs are actually in a covariant position, since this is the type of the subpatterns of the pattern represented by this Apply node - copyMethodType(tree1.tpe, ctorArgs map (_ modifyInfo extrapolate), extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type + ctorArgs foreach (_ modifyInfo extrapolate) + copyMethodType(tree1.tpe, ctorArgs, extrapolate(restpe)) // no need to clone ctorArgs, this is OUR method type case tp => tp } } diff --git a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala index 34db867060a8..5df285887582 100644 --- a/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala +++ b/src/reflect/scala/reflect/internal/ExistentialsAndSkolems.scala @@ -110,7 +110,8 @@ trait ExistentialsAndSkolems { val typeParamTypes = typeParams map (_.tpeHK) def doSubst(info: Type) = info.subst(rawSyms, typeParamTypes) - creator(typeParams map (_ modifyInfo doSubst), doSubst(tp)) + typeParams foreach (_ modifyInfo doSubst) + creator(typeParams, doSubst(tp)) } /** diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index fbe817fb9332..3341cee8aa2f 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3688,10 +3688,16 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @param infoFn the function to apply to the infos * @return the newly created, info-adjusted symbols */ - def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] = - mapList(cloneSymbols(syms))(_ modifyInfo infoFn) - def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] = - mapList(cloneSymbolsAtOwner(syms, owner))(_ modifyInfo infoFn) + def cloneSymbolsAndModify(syms: List[Symbol], infoFn: Type => Type): List[Symbol] = { + val cloned = cloneSymbols(syms) + cloned foreach (_ modifyInfo infoFn) + cloned + } + def cloneSymbolsAtOwnerAndModify(syms: List[Symbol], owner: Symbol, infoFn: Type => Type): List[Symbol] = { + val cloned = cloneSymbolsAtOwner(syms, owner) + cloned foreach (_ modifyInfo infoFn) + cloned + } /** Functions which perform the standard clone/substituting on the given symbols and type, * then call the creator function with the new symbols and type as arguments. From 02ea34754bcbc01fcad45bfef457ac75bf05049f Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 8 Mar 2019 16:07:04 +1000 Subject: [PATCH 1688/2793] Refactor super type handling in checkAccessible --- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 246913589723..b7b43f3225b2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -282,11 +282,11 @@ trait Infer extends Checkable { catch { case ex: MalformedType => malformed(ex, pre memberType underlyingSymbol(sym)) } ) tree setSymbol sym1 setType ( - pre match { - case _: SuperType => - if (!sym.isConstructor && !owntype.isInstanceOf[OverloadedType]) owntype // OPT: avoid lambda allocation and Type.map - else owntype map ((tp: Type) => if (tp eq pre) site.symbol.thisType else tp) - case _ => owntype + pre match { + // OPT: avoid lambda allocation and Type.map for super constructor calls + case _: SuperType if !sym.isConstructor && !owntype.isInstanceOf[OverloadedType] => + owntype map ((tp: Type) => if (tp eq pre) site.symbol.thisType else tp) + case _ => owntype } ) } From 74de660e220013d84c4b723b37d29d4df29f28e6 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Sat, 23 Mar 2019 19:57:54 +0000 Subject: [PATCH 1689/2793] Merge two calls to List.map into a single call The `pts` variable here was only used for passing it to another map, so we can merge them into a single map composing their operations. This would save the allocation of `pts`. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 5cacbf53da9f..93812b1a8f0a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3571,9 +3571,8 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper // a fictitious new cloned method symbol for each call site that takes on a signature // governed by a) the argument types and b) the expected type val args1 = typedArgs(args, forArgMode(fun, mode)) - val pts = args1.map(_.tpe.deconst) val clone = fun.symbol.cloneSymbol.withoutAnnotations - val cloneParams = pts map (pt => clone.newValueParameter(freshTermName()).setInfo(pt)) + val cloneParams = args1.map(arg => clone.newValueParameter(freshTermName()).setInfo(arg.tpe.deconst)) val resultType = if (isFullyDefined(pt)) pt else ObjectTpe clone.modifyInfo(mt => copyMethodType(mt, cloneParams, resultType)) val fun1 = fun.setSymbol(clone).setType(clone.info) From bd7c8bb231f1bae416160dc6d30c3d8304a0e6dc Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Tue, 26 Mar 2019 00:16:32 +0000 Subject: [PATCH 1690/2793] JavaMirrors: avoid the use of List flatten and map In the JavaMirrors trait, in the function "mkMethodMirror", the code was creating several unnecessary lists. - We replace a call to `List.flatten.length` with a call to the `sumSize` method from the collections utils. - We replace a `map` followed by an `exists` by merging the function of the `map` into the `exists`. - We replace a call to `flatten` followed by an `exists` by a call to `mexists`. This should, in total, prevent the creation of 5*N allocations, with N being the sum of the length of all the tparams. --- .../scala/reflect/runtime/JavaMirrors.scala | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 0160578c0119..59f6005261e7 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -326,12 +326,13 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive // that's because we want to have decent performance // therefore we move special cases into separate subclasses // rather than have them on a hot path them in a unified implementation of the `apply` method - private def mkMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): MethodMirror = { - def existsParam(pred: Type => Boolean) = symbol.paramss.flatten.map(_.info).exists(pred) - if (isBytecodelessMethod(symbol)) new BytecodelessMethodMirror(receiver, symbol) - else if (existsParam(isByNameParam) || existsParam(isValueClassParam)) new JavaTransformingMethodMirror(receiver, symbol) - else { - symbol.paramss.flatten.length match { + private def mkMethodMirror[T: ClassTag](receiver: T, symbol: MethodSymbol): MethodMirror = + if (isBytecodelessMethod(symbol)) + new BytecodelessMethodMirror(receiver, symbol) + else if (mexists(symbol.paramss)(p => isByNameParam(p.info) || isValueClassParam(p.info))) + new JavaTransformingMethodMirror(receiver, symbol) + else + sumSize(symbol.paramss, 0) match { case 0 => new JavaVanillaMethodMirror0(receiver, symbol) case 1 => new JavaVanillaMethodMirror1(receiver, symbol) case 2 => new JavaVanillaMethodMirror2(receiver, symbol) @@ -339,8 +340,6 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive case 4 => new JavaVanillaMethodMirror4(receiver, symbol) case _ => new JavaVanillaMethodMirror(receiver, symbol) } - } - } private abstract class JavaMethodMirror(val symbol: MethodSymbol, protected val ret: DerivedValueClassMetadata) extends MethodMirror { lazy val jmeth = ensureAccessible(methodToJava(symbol)) From 64b70427ac6a65fdebaff4d41cc3629085b32d07 Mon Sep 17 00:00:00 2001 From: Diego Alonso Date: Tue, 26 Mar 2019 01:29:38 +0000 Subject: [PATCH 1691/2793] Namers: some small optimisations to avoid allocations In the Namers file, we apply some of the optimisations intended to avoid list allocations: - We merge a `map` followed by a contains into an exists, - We replace some calls to `map2` whose result is not used by calls to `foreach`, which avoids allocations. --- src/compiler/scala/tools/nsc/typechecker/Namers.scala | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 685f169395a2..74db109014c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1534,7 +1534,7 @@ trait Namers extends MethodSynthesis { assert(!overrides || vparams.length == baseParamss.head.length, ""+ meth.fullName + ", "+ overridden.fullName) val rvparams = rvparamss(previous.length) var baseParams = if (overrides) baseParamss.head else Nil - map2(vparams, rvparams)((vparam, rvparam) => { + foreach2(vparams, rvparams){ (vparam, rvparam) => val sym = vparam.symbol // true if the corresponding parameter of the base class has a default argument val baseHasDefault = overrides && baseParams.head.hasDefault @@ -1576,8 +1576,7 @@ trait Namers extends MethodSynthesis { // if we use Wildcard as expected, we get "Nothing => Nothing", and the default is not usable. // TODO: this is a very brittle approach; I sincerely hope that Denys's research into hygiene // will open the doors to a much better way of doing this kind of stuff - val tparamNames = defTparams map { case TypeDef(_, name, _, _) => name } - val eraseAllMentionsOfTparams = new TypeTreeSubstituter(tparamNames contains _) + val eraseAllMentionsOfTparams = new TypeTreeSubstituter(x => defTparams.exists(_.name == x)) eraseAllMentionsOfTparams(rvparam.tpt match { // default getter for by-name params case AppliedTypeTree(_, List(arg)) if sym.hasFlag(BYNAMEPARAM) => arg @@ -1607,7 +1606,7 @@ trait Namers extends MethodSynthesis { } posCounter += 1 if (overrides) baseParams = baseParams.tail - }) + } if (overrides) baseParamss = baseParamss.tail previous :+ vparams } @@ -2070,7 +2069,7 @@ trait Namers extends MethodSynthesis { if (defnSym.isTerm) { // for polymorphic DefDefs, create type skolems and assign them to the tparam trees. val skolems = deriveFreshSkolems(tparams map (_.symbol)) - map2(tparams, skolems)(_ setSymbol _) + foreach2(tparams, skolems)(_ setSymbol _) } def completeImpl(sym: Symbol) = { From 2ac1be7abf3e8d81e47e2a5d89252d543b461f87 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Thu, 28 Jun 2018 11:38:50 +0200 Subject: [PATCH 1692/2793] [backport] Avoid using bootstrap JARs in partest Build speclib/instrumented.jar for partest - Build automatically from sources instead of fetching a pre-built binary which can get out of date and needs to be updated manually - Remove outdated build script (from the ant days) (cherry picked from commit 4070df494d749feb7e796750871b2814c46b4ccd) Modify tests that used JARs to use sources A handful of tests use JARs from the bootstrap repository. I don't believe we have a good reason to do this anymore: - we now have multi-round partest support, so we can exercise the classfile parser, even for test cases that include .java sources - we can and should whittle down minimal test cases instead of using a large third party JAR like jsoup. I've wanted to clean this up for a while. Our recent and ongoing infratructure problems with this Bintray hosted repository have spurred me into action. (cherry picked from commit 01d6b847172b662fe6ad8a8b70948162cc4f1b88) --- build.sbt | 66 +- project/VersionUtil.scala | 25 +- project/plugins.sbt | 3 +- test/files/jvm/annotations.check | 40 +- .../jvm/annotations/SourceAnnotation_1.java | 12 + .../Test_2.scala} | 38 +- .../files/jvm/genericNest/OuterTParams_1.java | 7 + .../Test_2.scala} | 3 +- test/files/jvm/methvsfield.java | 11 - .../Test_2.scala} | 2 +- test/files/jvm/methvsfield/methvsfield.java | 9 + .../jvm/{nest.scala => nest/Test_2.scala} | 13 +- test/files/jvm/nest/nest_1.java | 42 + test/files/jvm/outerEnum/OuterEnum_1.java | 7 + .../Test_2.scala} | 2 +- test/files/jvm/t0014.check | 2 +- test/files/jvm/t0014.scala | 5 - test/files/jvm/t0014/NestedAnnotations_1.java | 24 + test/files/jvm/t0014/Test_2.scala | 5 + .../macro-incompatible-macro-engine-c.check | 4 - .../macro-incompatible-macro-engine-c.scala | 3 - test/files/pos/cycle-jsoup.scala | 5 - test/files/pos/cycle-jsoup/Jsoup_1.java | 24 + test/files/pos/cycle-jsoup/Test_2.scala | 5 + test/instrumented/boxes.patch | 83 +- .../library/scala/runtime/BoxesRunTime.java | 843 ------------------ .../library/scala/runtime/ScalaRunTime.scala | 270 ------ test/instrumented/mkinstrumented.sh | 51 -- test/instrumented/srt.patch | 32 +- 29 files changed, 313 insertions(+), 1323 deletions(-) create mode 100644 test/files/jvm/annotations/SourceAnnotation_1.java rename test/files/jvm/{annotations.scala => annotations/Test_2.scala} (84%) create mode 100644 test/files/jvm/genericNest/OuterTParams_1.java rename test/files/jvm/{genericNest.scala => genericNest/Test_2.scala} (78%) delete mode 100644 test/files/jvm/methvsfield.java rename test/files/jvm/{methvsfield.scala => methvsfield/Test_2.scala} (52%) create mode 100644 test/files/jvm/methvsfield/methvsfield.java rename test/files/jvm/{nest.scala => nest/Test_2.scala} (62%) create mode 100644 test/files/jvm/nest/nest_1.java create mode 100644 test/files/jvm/outerEnum/OuterEnum_1.java rename test/files/jvm/{outerEnum.scala => outerEnum/Test_2.scala} (61%) delete mode 100644 test/files/jvm/t0014.scala create mode 100644 test/files/jvm/t0014/NestedAnnotations_1.java create mode 100644 test/files/jvm/t0014/Test_2.scala delete mode 100644 test/files/neg/macro-incompatible-macro-engine-c.check delete mode 100644 test/files/neg/macro-incompatible-macro-engine-c.scala delete mode 100644 test/files/pos/cycle-jsoup.scala create mode 100644 test/files/pos/cycle-jsoup/Jsoup_1.java create mode 100644 test/files/pos/cycle-jsoup/Test_2.scala delete mode 100644 test/instrumented/library/scala/runtime/BoxesRunTime.java delete mode 100644 test/instrumented/library/scala/runtime/ScalaRunTime.scala delete mode 100755 test/instrumented/mkinstrumented.sh diff --git a/build.sbt b/build.sbt index 96bc3fd0cc46..0651a09e3758 100644 --- a/build.sbt +++ b/build.sbt @@ -55,19 +55,6 @@ val asmDep = "org.scala-lang.modules" % "scala-asm" % versionPr val jlineDep = "jline" % "jline" % versionProps("jline.version") val antDep = "org.apache.ant" % "ant" % "1.9.4" -val partestDependencies = Seq( - "annotations" -> "02fe2ed93766323a13f22c7a7e2ecdcd84259b6c", - "enums" -> "981392dbd1f727b152cd1c908c5fce60ad9d07f7", - "genericNest" -> "b1ec8a095cec4902b3609d74d274c04365c59c04", - "jsoup-1.3.1" -> "346d3dff4088839d6b4d163efa2892124039d216", - "macro210" -> "3794ec22d9b27f2b179bd34e9b46db771b934ec3", - "methvsfield" -> "be8454d5e7751b063ade201c225dcedefd252775", - "nest" -> "cd33e0a0ea249eb42363a2f8ba531186345ff68c" -).map(bootstrapDep("test/files/lib")) ++ Seq( - bootstrapDep("test/files/codelib")("code" -> "e737b123d31eede5594ceda07caafed1673ec472") % "test", - bootstrapDep("test/files/speclib")("instrumented" -> "1b11ac773055c1e942c6b5eb4aabdf02292a7194") % "test" -) - /** Publish to ./dists/maven-sbt, similar to the Ant build which publishes to ./dists/maven. This * can be used to compare the output of the sbt and Ant builds during the transition period. Any * real publishing should be done with sbt's standard `publish` task. */ @@ -641,6 +628,48 @@ lazy val partestExtras = Project("partest-extras", file(".") / "src" / "partest- unmanagedSourceDirectories in Compile := List(baseDirectory.value) ) +// An instrumented version of BoxesRunTime and ScalaRunTime for partest's "specialized" test category +lazy val specLib = project.in(file("test") / "instrumented") + .dependsOn(library, reflect, compiler) + .settings(clearSourceAndResourceDirectories) + .settings(commonSettings) + .settings(disableDocs) + .settings(disablePublishing) + .settings( + sourceGenerators in Compile += Def.task { + import scala.collection.JavaConverters._ + val srcBase = (sourceDirectories in Compile in library).value.head / "scala/runtime" + val targetBase = (sourceManaged in Compile).value / "scala/runtime" + def patch(srcFile: String, patchFile: String): File = try { + val patchLines: List[String] = IO.readLines(baseDirectory.value / patchFile) + val origLines: List[String] = IO.readLines(srcBase / srcFile) + import difflib.DiffUtils + val p = DiffUtils.parseUnifiedDiff(patchLines.asJava) + val r = DiffUtils.patch(origLines.asJava, p) + val target = targetBase / srcFile + val patched = r.asScala.toList + IO.writeLines(target, patched) + if (patched == origLines) { + println(p) + println(patchLines.mkString("\n")) + println(origLines.mkString("\n")) + throw new RuntimeException("Patch did not apply any changes! " + baseDirectory.value / patchFile + " / " + (srcBase / srcFile)) + } + + target + } catch { case ex: Exception => + streams.value.log.error(s"Error patching $srcFile: $ex") + throw ex + } + IO.createDirectory(targetBase) + Seq( + patch("BoxesRunTime.java", "boxes.patch"), + patch("ScalaRunTime.scala", "srt.patch") + ) + }.taskValue + ) + + lazy val junit = project.in(file("test") / "junit") .dependsOn(library, reflect, compiler, partestExtras, scaladoc) .settings(clearSourceAndResourceDirectories) @@ -760,7 +789,6 @@ lazy val test = project .settings(Defaults.itSettings) .settings( libraryDependencies ++= Seq(asmDep, partestDep, scalaXmlDep), - libraryDependencies ++= partestDependencies, // no main sources sources in Compile := Seq.empty, // test sources are compiled in partest run, not here @@ -773,12 +801,14 @@ lazy val test = project testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.java_opts=-Xmx1024M -Xms64M"), testOptions in IntegrationTest += Tests.Argument("-Dpartest.scalac_opts=" + (scalacOptions in Compile).value.mkString(" ")), - testOptions in IntegrationTest += Tests.Setup { () => + testOptions in IntegrationTest += { val cp = (dependencyClasspath in Test).value val baseDir = (baseDirectory in ThisBuild).value - // Copy code.jar and instrumented.jar (resolved in the otherwise unused scope "test") to the location where partest expects them - copyBootstrapJar(cp, baseDir, "test/files/codelib", "code") - copyBootstrapJar(cp, baseDir, "test/files/speclib", "instrumented") + val instrumentedJar = (packagedArtifact in (LocalProject("specLib"), Compile, packageBin)).value._2 + Tests.Setup { () => + // Copy instrumented.jar (from specLib)to the location where partest expects it. + IO.copyFile(instrumentedJar, baseDir / "test/files/speclib/instrumented.jar") + } }, definedTests in IntegrationTest += new sbt.TestDefinition( "partest", diff --git a/project/VersionUtil.scala b/project/VersionUtil.scala index dd8e18dd8c16..9952961ea971 100644 --- a/project/VersionUtil.scala +++ b/project/VersionUtil.scala @@ -29,10 +29,10 @@ object VersionUtil { lazy val generatePropertiesFileSettings = Seq[Setting[_]]( copyrightString := "Copyright 2002-2019, LAMP/EPFL and Lightbend, Inc.", shellWelcomeString := """ - | ________ ___ / / ___ - | / __/ __// _ | / / / _ | + | ________ ___ / / ___ + | / __/ __// _ | / / / _ | | __\ \/ /__/ __ |/ /__/ __ | - | /____/\___/_/ |_/____/_/ | | + | /____/\___/_/ |_/____/_/ | | | |/ %s""".stripMargin.lines.drop(1).map(s => s"${ "%n" }${ s }").mkString, resourceGenerators in Compile += generateVersionPropertiesFile.map(file => Seq(file)).taskValue, generateVersionPropertiesFile := generateVersionPropertiesFileImpl.value @@ -191,23 +191,4 @@ object VersionUtil { // exclusion of the scala-library transitive dependency avoids eviction warnings during `update`: m2.exclude("org.scala-lang", "*") } - - private def bootstrapOrganization(path: String) = - "org.scala-lang.scala-sha-bootstrap." + path.replace('/', '.') - - /** Build a dependency to a JAR file in the bootstrap repository */ - def bootstrapDep(path: String)(libNameAndSha: (String, String)): ModuleID = - bootstrapOrganization(path) % libNameAndSha._1 % libNameAndSha._2 from - s"https://repo.lightbend.com/typesafe/scala-sha-bootstrap/org/scala-lang/bootstrap/${libNameAndSha._2}/$path/${libNameAndSha._1}.jar" - - /** Copy a bootstrap dependency JAR that is on the classpath to a file */ - def copyBootstrapJar(cp: Seq[Attributed[File]], baseDir: File, path: String, libName: String): Unit = { - val org = bootstrapOrganization(path) - val resolved = cp.find { a => - val mod = a.get(moduleID.key) - mod.map(_.organization) == Some(org) && mod.map(_.name) == Some(libName) - }.map(_.data).get - if(!(baseDir / path).exists()) IO.createDirectory(baseDir / path) - IO.copyFile(resolved, baseDir / path / s"$libName.jar") - } } diff --git a/project/plugins.sbt b/project/plugins.sbt index d4461ac6ce88..96f27899ff8f 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -23,7 +23,8 @@ addSbtPlugin("com.typesafe" % "sbt-mima-plugin" % "0.1.18") libraryDependencies ++= Seq( "org.eclipse.jgit" % "org.eclipse.jgit" % "4.6.0.201612231935-r", - "org.slf4j" % "slf4j-nop" % "1.7.23" + "org.slf4j" % "slf4j-nop" % "1.7.23", + "com.googlecode.java-diff-utils" % "diffutils" % "1.3.0" ) concurrentRestrictions in Global := Seq( diff --git a/test/files/jvm/annotations.check b/test/files/jvm/annotations.check index 43f85ca199cb..d0e36da050ac 100644 --- a/test/files/jvm/annotations.check +++ b/test/files/jvm/annotations.check @@ -1,64 +1,64 @@ -annotations.scala:7: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods +Test_2.scala:7: warning: class remote in package scala is deprecated (since 2.12.0): extend java.rmi.Remote instead and add @throws[java.rmi.RemoteException] to public methods def foo: Unit = () ^ class java.rmi.RemoteException class java.io.IOException @java.lang.Deprecated() -@test.SourceAnnotation(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=http://scala-lang.org) +@test.SourceAnnotation_1(mails={scala@lists.epfl.ch,scala-lounge@lists.epfl.ch}, value=http://scala-lang.org) class Test4$Foo1 -@test.SourceAnnotation(mails={you@bloodsuckers.com}, value=http://bloodsuckers.com) +@test.SourceAnnotation_1(mails={you@bloodsuckers.com}, value=http://bloodsuckers.com) class Test4$Foo2 -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://bloodsuckers.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://bloodsuckers.com) class Test4$Foo3 -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=file:///dev/null) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=file:///dev/null) private final int Test4$Foo4.x -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=file:///dev/zero) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=file:///dev/zero) public int Test4$Foo5.bar() -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=primary constructor) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=primary constructor) public Test4$Foo6(java.lang.String) -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=secondary constructor) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=secondary constructor) public Test4$Foo7() -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=constructor val) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=constructor val) public Test4$Foo8(int) -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) private int Test4$Foo9.z -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) private int Test4$Foo9.z2 -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) private int Test4$Foo9.z3 -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) public int Test4$Foo9.getZ() -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) public int Test4$Foo9.getZ2() -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://eppli.com) public int Test4$Foo9.getZ3() -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://apple.com) public int Test4$Foo9.x() -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=http://uppla.com) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=http://uppla.com) public void Test4$Foo9.setY(int) -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=on param 1) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=on param 1) public Test4$Foo10(java.lang.String) -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=on param 2) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=on param 2) private final java.lang.String Test4$Foo11.name -@test.SourceAnnotation(mails={bill.gates@bloodsuckers.com}, value=on param 3) +@test.SourceAnnotation_1(mails={bill.gates@bloodsuckers.com}, value=on param 3) public void Test4$Foo12.name_$eq(java.lang.String) 0 diff --git a/test/files/jvm/annotations/SourceAnnotation_1.java b/test/files/jvm/annotations/SourceAnnotation_1.java new file mode 100644 index 000000000000..dc9629a0262e --- /dev/null +++ b/test/files/jvm/annotations/SourceAnnotation_1.java @@ -0,0 +1,12 @@ +package test; + +import java.lang.annotation.Annotation; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; + +@Retention(value=RetentionPolicy.RUNTIME) +public @interface SourceAnnotation_1 { + public String value(); + + public String[] mails() default {"bill.gates@bloodsuckers.com"}; +} diff --git a/test/files/jvm/annotations.scala b/test/files/jvm/annotations/Test_2.scala similarity index 84% rename from test/files/jvm/annotations.scala rename to test/files/jvm/annotations/Test_2.scala index c42eceef4cab..dd3e4fd5f88f 100644 --- a/test/files/jvm/annotations.scala +++ b/test/files/jvm/annotations/Test_2.scala @@ -69,57 +69,57 @@ public class Main { } */ object Test4 { - import test.SourceAnnotation // defined in SourceAnnotation.java - @SourceAnnotation(value = "http://scala-lang.org", + import test.SourceAnnotation_1 + @SourceAnnotation_1(value = "http://scala-lang.org", mails = Array("scala@lists.epfl.ch", "scala-lounge@lists.epfl.ch")) class Foo1 - @SourceAnnotation(value = "http://bloodsuckers.com", + @SourceAnnotation_1(value = "http://bloodsuckers.com", mails = Array("you@bloodsuckers.com")) class Foo2 - @SourceAnnotation("http://bloodsuckers.com") + @SourceAnnotation_1("http://bloodsuckers.com") class Foo3 class Foo4 { - @SourceAnnotation("file:///dev/null") + @SourceAnnotation_1("file:///dev/null") val x = 1 } class Foo5 { - @SourceAnnotation("file:///dev/zero") + @SourceAnnotation_1("file:///dev/zero") def bar: Int = 0 } - class Foo6 @SourceAnnotation("primary constructor") (s: String) { + class Foo6 @SourceAnnotation_1("primary constructor") (s: String) { // to guarantee that primary constructor annotations // are not applied to secondary constructors def this() = this("") } class Foo7(s: String) { - @SourceAnnotation("secondary constructor") + @SourceAnnotation_1("secondary constructor") def this() = this("") } - class Foo8(@SourceAnnotation("constructor val") val n: Int) {} + class Foo8(@SourceAnnotation_1("constructor val") val n: Int) {} class Foo9 { import scala.annotation.meta._ import scala.beans.BeanProperty - @(SourceAnnotation @getter)("http://apple.com") val x = 0 - @BeanProperty @(SourceAnnotation @beanSetter)("http://uppla.com") var y = 0 + @(SourceAnnotation_1 @getter)("http://apple.com") val x = 0 + @BeanProperty @(SourceAnnotation_1 @beanSetter)("http://uppla.com") var y = 0 - type myAnn = SourceAnnotation @beanGetter @field + type myAnn = SourceAnnotation_1 @beanGetter @field @BeanProperty @myAnn("http://eppli.com") var z = 0 - type myAnn2[T] = SourceAnnotation @beanGetter @field + type myAnn2[T] = SourceAnnotation_1 @beanGetter @field @BeanProperty @myAnn2[String]("http://eppli.com") var z2 = 0 - type myAnn3[CC[_]] = SourceAnnotation @beanGetter @field + type myAnn3[CC[_]] = SourceAnnotation_1 @beanGetter @field @BeanProperty @myAnn3[List]("http://eppli.com") var z3 = 0 } - class Foo10(@SourceAnnotation("on param 1") val name: String) - class Foo11(@(SourceAnnotation @scala.annotation.meta.field)("on param 2") val name: String) - class Foo12(@(SourceAnnotation @scala.annotation.meta.setter)("on param 3") var name: String) + class Foo10(@SourceAnnotation_1("on param 1") val name: String) + class Foo11(@(SourceAnnotation_1 @scala.annotation.meta.field)("on param 2") val name: String) + class Foo12(@(SourceAnnotation_1 @scala.annotation.meta.setter)("on param 3") var name: String) def run { import java.lang.annotation.Annotation import java.lang.reflect.AnnotatedElement def printSourceAnnotation(a: Annotation) { - val ann = a.asInstanceOf[SourceAnnotation] - println("@test.SourceAnnotation(mails=" + ann.mails.deep.mkString("{", ",", "}") + + val ann = a.asInstanceOf[SourceAnnotation_1] + println("@test.SourceAnnotation_1(mails=" + ann.mails.deep.mkString("{", ",", "}") + ", value=" + ann.value + ")") } def printSourceAnnotations(target: AnnotatedElement) { diff --git a/test/files/jvm/genericNest/OuterTParams_1.java b/test/files/jvm/genericNest/OuterTParams_1.java new file mode 100644 index 000000000000..28eaf6d9ba40 --- /dev/null +++ b/test/files/jvm/genericNest/OuterTParams_1.java @@ -0,0 +1,7 @@ +public class OuterTParams_1 { + class InnerClass { + public A method() { + return null; + } + } +} diff --git a/test/files/jvm/genericNest.scala b/test/files/jvm/genericNest/Test_2.scala similarity index 78% rename from test/files/jvm/genericNest.scala rename to test/files/jvm/genericNest/Test_2.scala index f82f198ffd72..5aef15aefe22 100644 --- a/test/files/jvm/genericNest.scala +++ b/test/files/jvm/genericNest/Test_2.scala @@ -1,8 +1,7 @@ /** found in genericNest.jar, compiled from OuterTParams.java */ -import nestpkg._; // bug #695 -object ForceParse extends OuterTParams[AnyRef] { +object ForceParse extends OuterTParams_1[AnyRef] { // Force import of HarderToParse.InnerClass, // which has confusing method signature. var field: InnerClass = null diff --git a/test/files/jvm/methvsfield.java b/test/files/jvm/methvsfield.java deleted file mode 100644 index dadc98669ad5..000000000000 --- a/test/files/jvm/methvsfield.java +++ /dev/null @@ -1,11 +0,0 @@ -// This should be compiled with javac and saved -// in ../lib/methvsfield.jar . -class MethVsField -{ - int three = 3; - - int three() - { - return 3; - } -} diff --git a/test/files/jvm/methvsfield.scala b/test/files/jvm/methvsfield/Test_2.scala similarity index 52% rename from test/files/jvm/methvsfield.scala rename to test/files/jvm/methvsfield/Test_2.scala index 9b7c56591c01..5389836be277 100644 --- a/test/files/jvm/methvsfield.scala +++ b/test/files/jvm/methvsfield/Test_2.scala @@ -1,4 +1,4 @@ // bug #1062 object Test extends App { - println((new MethVsField).three) + println((new MethVsField_1).three) } diff --git a/test/files/jvm/methvsfield/methvsfield.java b/test/files/jvm/methvsfield/methvsfield.java new file mode 100644 index 000000000000..359dbfada1a2 --- /dev/null +++ b/test/files/jvm/methvsfield/methvsfield.java @@ -0,0 +1,9 @@ +class MethVsField_1 +{ + int three = 3; + + int three() + { + return 3; + } +} diff --git a/test/files/jvm/nest.scala b/test/files/jvm/nest/Test_2.scala similarity index 62% rename from test/files/jvm/nest.scala rename to test/files/jvm/nest/Test_2.scala index 45745f570090..ebffba77924b 100644 --- a/test/files/jvm/nest.scala +++ b/test/files/jvm/nest/Test_2.scala @@ -2,20 +2,19 @@ // Test Scala interaction with Java nested classes and static members. //############################################################################ -/** found in nest.jar, compiled from nest.java */ import nestpkg._; object Test extends App { - val x = nest.best.rest.test + val x = nest_1.best.rest.test Console.println(x.inc(1)) - val o = new nest.best; - val r = new nest.best.rest; - Console.println(nest.best.rest.test.inc(2)) - Console.println(nest.best.rest.x) + val o = new nest_1.best; + val r = new nest_1.best.rest; + Console.println(nest_1.best.rest.test.inc(2)) + Console.println(nest_1.best.rest.x) print("Instantiating public inner class: ") - val outer = new nest + val outer = new nest_1 val inn = new outer.Inn(42) inn.doSomething } diff --git a/test/files/jvm/nest/nest_1.java b/test/files/jvm/nest/nest_1.java new file mode 100644 index 000000000000..b4d95a2b7272 --- /dev/null +++ b/test/files/jvm/nest/nest_1.java @@ -0,0 +1,42 @@ +package nestpkg; + +import java.io.PrintStream; + +public class nest_1 { + String name = "Outer name"; + + protected class ProtInn { + protected ProtInn() { + } + + public void doSomething() { + System.out.println("ProtInn " + nest_1.this.name); + } + } + + public class Inn { + int x; + + public Inn(int n) { + this.x = n; + } + + public void doSomething() { + System.out.println("Inn " + nest_1.this.name + " x: " + this.x); + } + } + + public static class best { + + public static class rest { + public static rest test = new rest(); + public static int x = 10; + + public int inc(int n) { + return n + 1; + } + } + + } + +} \ No newline at end of file diff --git a/test/files/jvm/outerEnum/OuterEnum_1.java b/test/files/jvm/outerEnum/OuterEnum_1.java new file mode 100644 index 000000000000..a556df22f8bd --- /dev/null +++ b/test/files/jvm/outerEnum/OuterEnum_1.java @@ -0,0 +1,7 @@ +package enums; + +public class OuterEnum_1 { + public static enum Foo { + Bar; + } +} diff --git a/test/files/jvm/outerEnum.scala b/test/files/jvm/outerEnum/Test_2.scala similarity index 61% rename from test/files/jvm/outerEnum.scala rename to test/files/jvm/outerEnum/Test_2.scala index 278cef314122..3649813a3ecc 100644 --- a/test/files/jvm/outerEnum.scala +++ b/test/files/jvm/outerEnum/Test_2.scala @@ -2,7 +2,7 @@ import enums._ object Test extends App { def foo { - val res: OuterEnum.Foo = OuterEnum.Foo.Bar + val res: OuterEnum_1.Foo = OuterEnum_1.Foo.Bar println(res) } foo diff --git a/test/files/jvm/t0014.check b/test/files/jvm/t0014.check index bece7db7af3c..7f60ba66f660 100644 --- a/test/files/jvm/t0014.check +++ b/test/files/jvm/t0014.check @@ -1 +1 @@ -test.NestedAnnotations +test.NestedAnnotations_1 diff --git a/test/files/jvm/t0014.scala b/test/files/jvm/t0014.scala deleted file mode 100644 index a1948702dc0f..000000000000 --- a/test/files/jvm/t0014.scala +++ /dev/null @@ -1,5 +0,0 @@ -object Test { - def main(args: Array[String]) { - println(classOf[test.NestedAnnotations].getName) - } -} diff --git a/test/files/jvm/t0014/NestedAnnotations_1.java b/test/files/jvm/t0014/NestedAnnotations_1.java new file mode 100644 index 000000000000..47e28c507023 --- /dev/null +++ b/test/files/jvm/t0014/NestedAnnotations_1.java @@ -0,0 +1,24 @@ +package test; + +import java.lang.annotation.Annotation; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +public class NestedAnnotations_1 { + @OuterAnno(inner=@InnerAnno(name="inner")) + String field; + + @Target(value={ElementType.FIELD}) + @Retention(value=RetentionPolicy.RUNTIME) + public static @interface OuterAnno { + public InnerAnno inner(); + } + + @Target(value={ElementType.FIELD}) + @Retention(value=RetentionPolicy.RUNTIME) + public static @interface InnerAnno { + public String name(); + } +} \ No newline at end of file diff --git a/test/files/jvm/t0014/Test_2.scala b/test/files/jvm/t0014/Test_2.scala new file mode 100644 index 000000000000..1ab68cb6feda --- /dev/null +++ b/test/files/jvm/t0014/Test_2.scala @@ -0,0 +1,5 @@ +object Test { + def main(args: Array[String]) { + println(classOf[test.NestedAnnotations_1].getName) + } +} diff --git a/test/files/neg/macro-incompatible-macro-engine-c.check b/test/files/neg/macro-incompatible-macro-engine-c.check deleted file mode 100644 index fb6c59ab7c7e..000000000000 --- a/test/files/neg/macro-incompatible-macro-engine-c.check +++ /dev/null @@ -1,4 +0,0 @@ -macro-incompatible-macro-engine-c.scala:2: error: can't expand macros compiled by previous versions of Scala - MacroLibCompiledByScala210x.foo - ^ -one error found diff --git a/test/files/neg/macro-incompatible-macro-engine-c.scala b/test/files/neg/macro-incompatible-macro-engine-c.scala deleted file mode 100644 index 037ac5f45605..000000000000 --- a/test/files/neg/macro-incompatible-macro-engine-c.scala +++ /dev/null @@ -1,3 +0,0 @@ -object Test extends App { - MacroLibCompiledByScala210x.foo -} \ No newline at end of file diff --git a/test/files/pos/cycle-jsoup.scala b/test/files/pos/cycle-jsoup.scala deleted file mode 100644 index 879e693537c0..000000000000 --- a/test/files/pos/cycle-jsoup.scala +++ /dev/null @@ -1,5 +0,0 @@ -object Test { - def main(args : Array[String]) { - org.jsoup.Jsoup.parse(null: java.net.URL, 3000) - } -} diff --git a/test/files/pos/cycle-jsoup/Jsoup_1.java b/test/files/pos/cycle-jsoup/Jsoup_1.java new file mode 100644 index 000000000000..2d53f9af58da --- /dev/null +++ b/test/files/pos/cycle-jsoup/Jsoup_1.java @@ -0,0 +1,24 @@ +package org.jsoup; + +import java.net.URL; + +public class Jsoup_1 { + public static Document parse(URL url, int timeoutMillis) { + return null; + } + + public static class Document extends Element { + public class OutputSettings { + } + } + + public static class Element extends Node { + void outerHtmlTail(StringBuilder accum, int depth, Document.OutputSettings out) { + } + } + + public static class Node { + protected void indent(StringBuilder accum, int depth, Document.OutputSettings out) { + } + } +} diff --git a/test/files/pos/cycle-jsoup/Test_2.scala b/test/files/pos/cycle-jsoup/Test_2.scala new file mode 100644 index 000000000000..f60c50f74345 --- /dev/null +++ b/test/files/pos/cycle-jsoup/Test_2.scala @@ -0,0 +1,5 @@ +object Test { + def main(args : Array[String]): Unit = { + org.jsoup.Jsoup_1.parse(null: java.net.URL, 3000) + } +} diff --git a/test/instrumented/boxes.patch b/test/instrumented/boxes.patch index 2bb324322185..9a0d107058d4 100644 --- a/test/instrumented/boxes.patch +++ b/test/instrumented/boxes.patch @@ -1,29 +1,54 @@ -9c9 -< ---- -> /* INSTRUMENTED VERSION */ -51a52,59 -> public static int booleanBoxCount = 0; -> public static int characterBoxCount = 0; -> public static int byteBoxCount = 0; -> public static int shortBoxCount = 0; -> public static int integerBoxCount = 0; -> public static int longBoxCount = 0; -> public static int floatBoxCount = 0; -> public static int doubleBoxCount = 0; -53a62 -> booleanBoxCount += 1; -57a67 -> characterBoxCount += 1; -61a72 -> byteBoxCount += 1; -65a77 -> shortBoxCount += 1; -69a82 -> integerBoxCount += 1; -73a87 -> longBoxCount += 1; -77a92 -> floatBoxCount += 1; -83a99 -> doubleBoxCount += 1; +$ diff -U1 /Users/jz/code/scala/src/library/scala/runtime/BoxesRunTime.java /Users/jz/code/scala/target/specLib/src_managed/main/scala/runtime/BoxesRunTime.java +--- /Users/jz/code/scala/src/library/scala/runtime/BoxesRunTime.java 2019-03-27 11:05:19.000000000 +1000 ++++ /Users/jz/code/scala/target/specLib/src_managed/main/scala/runtime/BoxesRunTime.java 2019-03-27 11:40:41.000000000 +1000 +@@ -30,4 +30,14 @@ + * @version 2.0 */ +-public final class BoxesRunTime +-{ ++public final class BoxesRunTime { ++ /* INSTRUMENTED VERSION */ ++ public static int booleanBoxCount = 0; ++ public static int characterBoxCount = 0; ++ public static int byteBoxCount = 0; ++ public static int shortBoxCount = 0; ++ public static int integerBoxCount = 0; ++ public static int longBoxCount = 0; ++ public static int floatBoxCount = 0; ++ public static int doubleBoxCount = 0; ++ ++ + private static final int CHAR = 0, /* BYTE = 1, SHORT = 2, */ INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7; +@@ -50,2 +60,3 @@ + public static java.lang.Boolean boxToBoolean(boolean b) { ++ booleanBoxCount += 1; + return java.lang.Boolean.valueOf(b); +@@ -54,2 +65,3 @@ + public static java.lang.Character boxToCharacter(char c) { ++ characterBoxCount += 1; + return java.lang.Character.valueOf(c); +@@ -57,3 +69,5 @@ + ++ + public static java.lang.Byte boxToByte(byte b) { ++ byteBoxCount += 1; + return java.lang.Byte.valueOf(b); +@@ -62,2 +76,3 @@ + public static java.lang.Short boxToShort(short s) { ++ shortBoxCount += 1; + return java.lang.Short.valueOf(s); +@@ -66,2 +81,3 @@ + public static java.lang.Integer boxToInteger(int i) { ++ integerBoxCount += 1; + return java.lang.Integer.valueOf(i); +@@ -70,2 +86,3 @@ + public static java.lang.Long boxToLong(long l) { ++ longBoxCount += 1; + return java.lang.Long.valueOf(l); +@@ -74,2 +91,3 @@ + public static java.lang.Float boxToFloat(float f) { ++ floatBoxCount += 1; + return java.lang.Float.valueOf(f); +@@ -78,2 +96,3 @@ + public static java.lang.Double boxToDouble(double d) { ++ doubleBoxCount += 1; + // System.out.println("box " + d); \ No newline at end of file diff --git a/test/instrumented/library/scala/runtime/BoxesRunTime.java b/test/instrumented/library/scala/runtime/BoxesRunTime.java deleted file mode 100644 index 05ce2941a8fb..000000000000 --- a/test/instrumented/library/scala/runtime/BoxesRunTime.java +++ /dev/null @@ -1,843 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2006-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -/* INSTRUMENTED VERSION */ - -package scala.runtime; - -import java.io.*; -import scala.math.ScalaNumber; - -/** An object (static class) that defines methods used for creating, - * reverting, and calculating with, boxed values. There are four classes - * of methods in this object: - * - Convenience boxing methods which call the static valueOf method - * on the boxed class, thus utilizing the JVM boxing cache. - * - Convenience unboxing methods returning default value on null. - * - The generalised comparison method to be used when an object may - * be a boxed value. - * - Standard value operators for boxed number and quasi-number values. - * - * @author Gilles Dubochet - * @author Martin Odersky - * @contributor Stepan Koltsov - * @version 2.0 */ -public final class BoxesRunTime -{ - private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7; - - /** We don't need to return BYTE and SHORT, as everything which might - * care widens to INT. - */ - private static int typeCode(Object a) { - if (a instanceof java.lang.Integer) return INT; - if (a instanceof java.lang.Double) return DOUBLE; - if (a instanceof java.lang.Long) return LONG; - if (a instanceof java.lang.Character) return CHAR; - if (a instanceof java.lang.Float) return FLOAT; - if ((a instanceof java.lang.Byte) || (a instanceof java.lang.Short)) return INT; - return OTHER; - } - - private static String boxDescription(Object a) { - return "" + a.getClass().getSimpleName() + "(" + a + ")"; - } - -/* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */ - public static int booleanBoxCount = 0; - public static int characterBoxCount = 0; - public static int byteBoxCount = 0; - public static int shortBoxCount = 0; - public static int integerBoxCount = 0; - public static int longBoxCount = 0; - public static int floatBoxCount = 0; - public static int doubleBoxCount = 0; - - public static java.lang.Boolean boxToBoolean(boolean b) { - booleanBoxCount += 1; - return java.lang.Boolean.valueOf(b); - } - - public static java.lang.Character boxToCharacter(char c) { - characterBoxCount += 1; - return java.lang.Character.valueOf(c); - } - - public static java.lang.Byte boxToByte(byte b) { - byteBoxCount += 1; - return java.lang.Byte.valueOf(b); - } - - public static java.lang.Short boxToShort(short s) { - shortBoxCount += 1; - return java.lang.Short.valueOf(s); - } - - public static java.lang.Integer boxToInteger(int i) { - integerBoxCount += 1; - return java.lang.Integer.valueOf(i); - } - - public static java.lang.Long boxToLong(long l) { - longBoxCount += 1; - return java.lang.Long.valueOf(l); - } - - public static java.lang.Float boxToFloat(float f) { - floatBoxCount += 1; - return java.lang.Float.valueOf(f); - } - - public static java.lang.Double boxToDouble(double d) { - // System.out.println("box " + d); - // (new Throwable()).printStackTrace(); - doubleBoxCount += 1; - return java.lang.Double.valueOf(d); - } - -/* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */ - - public static boolean unboxToBoolean(Object b) { - return b == null ? false : ((java.lang.Boolean)b).booleanValue(); - } - - public static char unboxToChar(Object c) { - return c == null ? 0 : ((java.lang.Character)c).charValue(); - } - - public static byte unboxToByte(Object b) { - return b == null ? 0 : ((java.lang.Byte)b).byteValue(); - } - - public static short unboxToShort(Object s) { - return s == null ? 0 : ((java.lang.Short)s).shortValue(); - } - - public static int unboxToInt(Object i) { - return i == null ? 0 : ((java.lang.Integer)i).intValue(); - } - - public static long unboxToLong(Object l) { - return l == null ? 0 : ((java.lang.Long)l).longValue(); - } - - public static float unboxToFloat(Object f) { - return f == null ? 0.0f : ((java.lang.Float)f).floatValue(); - } - - public static double unboxToDouble(Object d) { - // System.out.println("unbox " + d); - return d == null ? 0.0d : ((java.lang.Double)d).doubleValue(); - } - - /* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */ - - public static boolean equals(Object x, Object y) { - if (x == y) return true; - return equals2(x, y); - } - - /** Since all applicable logic has to be present in the equals method of a ScalaNumber - * in any case, we dispatch to it as soon as we spot one on either side. - */ - public static boolean equals2(Object x, Object y) { - if (x instanceof java.lang.Number) - return equalsNumObject((java.lang.Number)x, y); - if (x instanceof java.lang.Character) - return equalsCharObject((java.lang.Character)x, y); - if (x == null) - return y == null; - - return x.equals(y); - } - - public static boolean equalsNumObject(java.lang.Number xn, Object y) { - if (y instanceof java.lang.Number) - return equalsNumNum(xn, (java.lang.Number)y); - if (y instanceof java.lang.Character) - return equalsNumChar(xn, (java.lang.Character)y); - if (xn == null) - return y == null; - - return xn.equals(y); - } - - public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) { - int xcode = typeCode(xn); - int ycode = typeCode(yn); - switch (ycode > xcode ? ycode : xcode) { - case INT: - return xn.intValue() == yn.intValue(); - case LONG: - return xn.longValue() == yn.longValue(); - case FLOAT: - return xn.floatValue() == yn.floatValue(); - case DOUBLE: - return xn.doubleValue() == yn.doubleValue(); - default: - if ((yn instanceof ScalaNumber) && !(xn instanceof ScalaNumber)) - return yn.equals(xn); - } - if (xn == null) - return yn == null; - - return xn.equals(yn); - } - - public static boolean equalsCharObject(java.lang.Character xc, Object y) { - if (y instanceof java.lang.Character) - return xc.charValue() == ((java.lang.Character)y).charValue(); - if (y instanceof java.lang.Number) - return equalsNumChar((java.lang.Number)y, xc); - if (xc == null) - return y == null; - - return xc.equals(y); - } - - private static boolean equalsNumChar(java.lang.Number xn, java.lang.Character yc) { - if (yc == null) - return xn == null; - - char ch = yc.charValue(); - switch (typeCode(xn)) { - case INT: - return xn.intValue() == ch; - case LONG: - return xn.longValue() == ch; - case FLOAT: - return xn.floatValue() == ch; - case DOUBLE: - return xn.doubleValue() == ch; - default: - return xn.equals(yc); - } - } - - /** Hashcode algorithm is driven by the requirements imposed - * by primitive equality semantics, namely that equal objects - * have equal hashCodes. The first priority are the integral/char - * types, which already have the same hashCodes for the same - * values except for Long. So Long's hashCode is altered to - * conform to Int's for all values in Int's range. - * - * Float is problematic because it's far too small to hold - * all the Ints, so for instance Int.MaxValue.toFloat claims - * to be == to each of the largest 64 Ints. There is no way - * to preserve equals/hashCode alignment without compromising - * the hashCode distribution, so Floats are only guaranteed - * to have the same hashCode for whole Floats in the range - * Short.MinValue to Short.MaxValue (2^16 total.) - * - * Double has its hashCode altered to match the entire Int range, - * but is not guaranteed beyond that. (But could/should it be? - * The hashCode is only 32 bits so this is a more tractable - * issue than Float's, but it might be better simply to exclude it.) - * - * Note: BigInt and BigDecimal, being arbitrary precision, could - * be made consistent with all other types for the Int range, but - * as yet have not. - * - * Note: Among primitives, Float.NaN != Float.NaN, but the boxed - * versions are equal. This still needs reconciliation. - */ - public static int hashFromLong(java.lang.Long n) { - int iv = n.intValue(); - if (iv == n.longValue()) return iv; - else return n.hashCode(); - } - public static int hashFromDouble(java.lang.Double n) { - int iv = n.intValue(); - double dv = n.doubleValue(); - if (iv == dv) return iv; - - long lv = n.longValue(); - if (lv == dv) return java.lang.Long.valueOf(lv).hashCode(); - - float fv = n.floatValue(); - if (fv == dv) return java.lang.Float.valueOf(fv).hashCode(); - else return n.hashCode(); - } - public static int hashFromFloat(java.lang.Float n) { - int iv = n.intValue(); - float fv = n.floatValue(); - if (iv == fv) return iv; - - long lv = n.longValue(); - if (lv == fv) return java.lang.Long.valueOf(lv).hashCode(); - else return n.hashCode(); - } - public static int hashFromNumber(java.lang.Number n) { - if (n instanceof java.lang.Long) return hashFromLong((java.lang.Long)n); - else if (n instanceof java.lang.Double) return hashFromDouble((java.lang.Double)n); - else if (n instanceof java.lang.Float) return hashFromFloat((java.lang.Float)n); - else return n.hashCode(); - } - - private static int unboxCharOrInt(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).intValue(); - } - private static long unboxCharOrLong(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).longValue(); - } - private static float unboxCharOrFloat(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).floatValue(); - } - private static double unboxCharOrDouble(Object arg1, int code) { - if (code == CHAR) - return ((java.lang.Character) arg1).charValue(); - else - return ((java.lang.Number) arg1).doubleValue(); - } - -/* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */ - - /** arg1 + arg2 */ - public static Object add(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - return boxToInteger(unboxCharOrInt(arg1, code1) + unboxCharOrInt(arg2, code2)); - } - if (maxcode <= LONG) { - return boxToLong(unboxCharOrLong(arg1, code1) + unboxCharOrLong(arg2, code2)); - } - if (maxcode <= FLOAT) { - return boxToFloat(unboxCharOrFloat(arg1, code1) + unboxCharOrFloat(arg2, code2)); - } - if (maxcode <= DOUBLE) { - return boxToDouble(unboxCharOrDouble(arg1, code1) + unboxCharOrDouble(arg2, code2)); - } - throw new NoSuchMethodException(); - } - - /** arg1 - arg2 */ - public static Object subtract(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - return boxToInteger(unboxCharOrInt(arg1, code1) - unboxCharOrInt(arg2, code2)); - } - if (maxcode <= LONG) { - return boxToLong(unboxCharOrLong(arg1, code1) - unboxCharOrLong(arg2, code2)); - } - if (maxcode <= FLOAT) { - return boxToFloat(unboxCharOrFloat(arg1, code1) - unboxCharOrFloat(arg2, code2)); - } - if (maxcode <= DOUBLE) { - return boxToDouble(unboxCharOrDouble(arg1, code1) - unboxCharOrDouble(arg2, code2)); - } - throw new NoSuchMethodException(); - } - - /** arg1 * arg2 */ - public static Object multiply(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - return boxToInteger(unboxCharOrInt(arg1, code1) * unboxCharOrInt(arg2, code2)); - } - if (maxcode <= LONG) { - return boxToLong(unboxCharOrLong(arg1, code1) * unboxCharOrLong(arg2, code2)); - } - if (maxcode <= FLOAT) { - return boxToFloat(unboxCharOrFloat(arg1, code1) * unboxCharOrFloat(arg2, code2)); - } - if (maxcode <= DOUBLE) { - return boxToDouble(unboxCharOrDouble(arg1, code1) * unboxCharOrDouble(arg2, code2)); - } - throw new NoSuchMethodException(); - } - - /** arg1 / arg2 */ - public static Object divide(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) / unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) / unboxCharOrLong(arg2, code2)); - if (maxcode <= FLOAT) - return boxToFloat(unboxCharOrFloat(arg1, code1) / unboxCharOrFloat(arg2, code2)); - if (maxcode <= DOUBLE) - return boxToDouble(unboxCharOrDouble(arg1, code1) / unboxCharOrDouble(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 % arg2 */ - public static Object takeModulo(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) % unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) % unboxCharOrLong(arg2, code2)); - if (maxcode <= FLOAT) - return boxToFloat(unboxCharOrFloat(arg1, code1) % unboxCharOrFloat(arg2, code2)); - if (maxcode <= DOUBLE) - return boxToDouble(unboxCharOrDouble(arg1, code1) % unboxCharOrDouble(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 >> arg2 */ - public static Object shiftSignedRight(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - if (code1 <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToInteger(val1 >> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToInteger(val1 >> val2); - } - } - if (code1 <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToLong(val1 >> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToLong(val1 >> val2); - } - } - throw new NoSuchMethodException(); - } - - /** arg1 << arg2 */ - public static Object shiftSignedLeft(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - if (code1 <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToInteger(val1 << val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToInteger(val1 << val2); - } - } - if (code1 <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToLong(val1 << val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToLong(val1 << val2); - } - } - throw new NoSuchMethodException(); - } - - /** arg1 >>> arg2 */ - public static Object shiftLogicalRight(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - if (code1 <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToInteger(val1 >>> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToInteger(val1 >>> val2); - } - } - if (code1 <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - if (code2 <= INT) { - int val2 = unboxCharOrInt(arg2, code2); - return boxToLong(val1 >>> val2); - } - if (code2 <= LONG) { - long val2 = unboxCharOrLong(arg2, code2); - return boxToLong(val1 >>> val2); - } - } - throw new NoSuchMethodException(); - } - - /** -arg */ - public static Object negate(Object arg) throws NoSuchMethodException { - int code = typeCode(arg); - if (code <= INT) { - int val = unboxCharOrInt(arg, code); - return boxToInteger(-val); - } - if (code <= LONG) { - long val = unboxCharOrLong(arg, code); - return boxToLong(-val); - } - if (code <= FLOAT) { - float val = unboxCharOrFloat(arg, code); - return boxToFloat(-val); - } - if (code <= DOUBLE) { - double val = unboxCharOrDouble(arg, code); - return boxToDouble(-val); - } - throw new NoSuchMethodException(); - } - - /** +arg */ - public static Object positive(Object arg) throws NoSuchMethodException { - int code = typeCode(arg); - if (code <= INT) { - return boxToInteger(+unboxCharOrInt(arg, code)); - } - if (code <= LONG) { - return boxToLong(+unboxCharOrLong(arg, code)); - } - if (code <= FLOAT) { - return boxToFloat(+unboxCharOrFloat(arg, code)); - } - if (code <= DOUBLE) { - return boxToDouble(+unboxCharOrDouble(arg, code)); - } - throw new NoSuchMethodException(); - } - - /** arg1 & arg2 */ - public static Object takeAnd(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() & ((java.lang.Boolean) arg2).booleanValue()); - else - throw new NoSuchMethodException(); - } - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) & unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) & unboxCharOrLong(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 | arg2 */ - public static Object takeOr(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() | ((java.lang.Boolean) arg2).booleanValue()); - else - throw new NoSuchMethodException(); - } - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) | unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) | unboxCharOrLong(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 ^ arg2 */ - public static Object takeXor(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) || (arg2 instanceof Boolean)) { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() ^ ((java.lang.Boolean) arg2).booleanValue()); - else - throw new NoSuchMethodException(); - } - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - - if (maxcode <= INT) - return boxToInteger(unboxCharOrInt(arg1, code1) ^ unboxCharOrInt(arg2, code2)); - if (maxcode <= LONG) - return boxToLong(unboxCharOrLong(arg1, code1) ^ unboxCharOrLong(arg2, code2)); - - throw new NoSuchMethodException(); - } - - /** arg1 && arg2 */ - public static Object takeConditionalAnd(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) { - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() && ((java.lang.Boolean) arg2).booleanValue()); - } - throw new NoSuchMethodException(); - } - - /** arg1 || arg2 */ - public static Object takeConditionalOr(Object arg1, Object arg2) throws NoSuchMethodException { - if ((arg1 instanceof Boolean) && (arg2 instanceof Boolean)) { - return boxToBoolean(((java.lang.Boolean) arg1).booleanValue() || ((java.lang.Boolean) arg2).booleanValue()); - } - throw new NoSuchMethodException(); - } - - /** ~arg */ - public static Object complement(Object arg) throws NoSuchMethodException { - int code = typeCode(arg); - if (code <= INT) { - return boxToInteger(~unboxCharOrInt(arg, code)); - } - if (code <= LONG) { - return boxToLong(~unboxCharOrLong(arg, code)); - } - throw new NoSuchMethodException(); - } - - /** !arg */ - public static Object takeNot(Object arg) throws NoSuchMethodException { - if (arg instanceof Boolean) { - return boxToBoolean(!((java.lang.Boolean) arg).booleanValue()); - } - throw new NoSuchMethodException(); - } - - public static Object testEqual(Object arg1, Object arg2) throws NoSuchMethodException { - return boxToBoolean(arg1 == arg2); - } - - public static Object testNotEqual(Object arg1, Object arg2) throws NoSuchMethodException { - return boxToBoolean(arg1 != arg2); - } - - public static Object testLessThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 < val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 < val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 < val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 < val2); - } - throw new NoSuchMethodException(); - } - - public static Object testLessOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 <= val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 <= val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 <= val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 <= val2); - } - throw new NoSuchMethodException(); - } - - public static Object testGreaterOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 >= val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 >= val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 >= val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 >= val2); - } - throw new NoSuchMethodException(); - } - - public static Object testGreaterThan(Object arg1, Object arg2) throws NoSuchMethodException { - int code1 = typeCode(arg1); - int code2 = typeCode(arg2); - int maxcode = (code1 < code2) ? code2 : code1; - if (maxcode <= INT) { - int val1 = unboxCharOrInt(arg1, code1); - int val2 = unboxCharOrInt(arg2, code2); - return boxToBoolean(val1 > val2); - } - if (maxcode <= LONG) { - long val1 = unboxCharOrLong(arg1, code1); - long val2 = unboxCharOrLong(arg2, code2); - return boxToBoolean(val1 > val2); - } - if (maxcode <= FLOAT) { - float val1 = unboxCharOrFloat(arg1, code1); - float val2 = unboxCharOrFloat(arg2, code2); - return boxToBoolean(val1 > val2); - } - if (maxcode <= DOUBLE) { - double val1 = unboxCharOrDouble(arg1, code1); - double val2 = unboxCharOrDouble(arg2, code2); - return boxToBoolean(val1 > val2); - } - throw new NoSuchMethodException(); - } - - public static boolean isBoxedNumberOrBoolean(Object arg) { - return (arg instanceof java.lang.Boolean) || isBoxedNumber(arg); - } - public static boolean isBoxedNumber(Object arg) { - return ( - (arg instanceof java.lang.Integer) - || (arg instanceof java.lang.Long) - || (arg instanceof java.lang.Double) - || (arg instanceof java.lang.Float) - || (arg instanceof java.lang.Short) - || (arg instanceof java.lang.Character) - || (arg instanceof java.lang.Byte) - ); - } - - /** arg.toChar */ - public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg)); - if (arg instanceof java.lang.Short) return boxToCharacter((char)unboxToShort(arg)); - if (arg instanceof java.lang.Character) return (java.lang.Character)arg; - if (arg instanceof java.lang.Long) return boxToCharacter((char)unboxToLong(arg)); - if (arg instanceof java.lang.Byte) return boxToCharacter((char)unboxToByte(arg)); - if (arg instanceof java.lang.Float) return boxToCharacter((char)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return boxToCharacter((char)unboxToDouble(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toByte */ - public static java.lang.Byte toByte(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToByte((byte)unboxToInt(arg)); - if (arg instanceof java.lang.Character) return boxToByte((byte)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return (java.lang.Byte)arg; - if (arg instanceof java.lang.Long) return boxToByte((byte)unboxToLong(arg)); - if (arg instanceof java.lang.Short) return boxToByte((byte)unboxToShort(arg)); - if (arg instanceof java.lang.Float) return boxToByte((byte)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return boxToByte((byte)unboxToDouble(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toShort */ - public static java.lang.Short toShort(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToShort((short)unboxToInt(arg)); - if (arg instanceof java.lang.Long) return boxToShort((short)unboxToLong(arg)); - if (arg instanceof java.lang.Character) return boxToShort((short)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToShort((short)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return (java.lang.Short)arg; - if (arg instanceof java.lang.Float) return boxToShort((short)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return boxToShort((short)unboxToDouble(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toInt */ - public static java.lang.Integer toInteger(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return (java.lang.Integer)arg; - if (arg instanceof java.lang.Long) return boxToInteger((int)unboxToLong(arg)); - if (arg instanceof java.lang.Double) return boxToInteger((int)unboxToDouble(arg)); - if (arg instanceof java.lang.Float) return boxToInteger((int)unboxToFloat(arg)); - if (arg instanceof java.lang.Character) return boxToInteger((int)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToInteger((int)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToInteger((int)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toLong */ - public static java.lang.Long toLong(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToLong((long)unboxToInt(arg)); - if (arg instanceof java.lang.Double) return boxToLong((long)unboxToDouble(arg)); - if (arg instanceof java.lang.Float) return boxToLong((long)unboxToFloat(arg)); - if (arg instanceof java.lang.Long) return (java.lang.Long)arg; - if (arg instanceof java.lang.Character) return boxToLong((long)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToLong((long)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToLong((long)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toFloat */ - public static java.lang.Float toFloat(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToFloat((float)unboxToInt(arg)); - if (arg instanceof java.lang.Long) return boxToFloat((float)unboxToLong(arg)); - if (arg instanceof java.lang.Float) return (java.lang.Float)arg; - if (arg instanceof java.lang.Double) return boxToFloat((float)unboxToDouble(arg)); - if (arg instanceof java.lang.Character) return boxToFloat((float)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToFloat((float)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToFloat((float)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - - /** arg.toDouble */ - public static java.lang.Double toDouble(Object arg) throws NoSuchMethodException { - if (arg instanceof java.lang.Integer) return boxToDouble((double)unboxToInt(arg)); - if (arg instanceof java.lang.Float) return boxToDouble((double)unboxToFloat(arg)); - if (arg instanceof java.lang.Double) return (java.lang.Double)arg; - if (arg instanceof java.lang.Long) return boxToDouble((double)unboxToLong(arg)); - if (arg instanceof java.lang.Character) return boxToDouble((double)unboxToChar(arg)); - if (arg instanceof java.lang.Byte) return boxToDouble((double)unboxToByte(arg)); - if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg)); - throw new NoSuchMethodException(); - } - -} diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala deleted file mode 100644 index c533ca3127e3..000000000000 --- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala +++ /dev/null @@ -1,270 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2013, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -/* INSTRUMENTED VERSION */ - -package scala -package runtime - -import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator } -import scala.collection.mutable.WrappedArray -import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: } -import scala.collection.generic.{ Sorted } -import scala.reflect.{ ClassTag, classTag } -import scala.util.control.ControlThrowable -import scala.xml.{ Node, MetaData } -import java.lang.{ Class => jClass } - -import java.lang.Double.doubleToLongBits -import java.lang.reflect.{ Modifier, Method => JMethod } - -/** The object ScalaRunTime provides support methods required by - * the scala runtime. All these methods should be considered - * outside the API and subject to change or removal without notice. - */ -object ScalaRunTime { - def isArray(x: AnyRef): Boolean = isArray(x, 1) - def isArray(x: Any, atLevel: Int): Boolean = - x != null && isArrayClass(x.getClass, atLevel) - - private def isArrayClass(clazz: jClass[_], atLevel: Int): Boolean = - clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) - - /** Return the class object representing an array with element class `clazz`. - */ - def arrayClass(clazz: jClass[_]): jClass[_] = { - // newInstance throws an exception if the erasure is Void.TYPE. see scala/bug#5680 - if (clazz == java.lang.Void.TYPE) classOf[Array[Unit]] - else java.lang.reflect.Array.newInstance(clazz, 0).getClass - } - - /** Return the class object representing an unboxed value type, - * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler - * rewrites expressions like 5.getClass to come here. - */ - def anyValClass[T <: AnyVal : ClassTag](value: T): jClass[T] = - classTag[T].runtimeClass.asInstanceOf[jClass[T]] - - var arrayApplyCount = 0 - - /** Retrieve generic array element */ - def array_apply(xs: AnyRef, idx: Int): Any = { - arrayApplyCount += 1 - xs match { - case x: Array[AnyRef] => x(idx).asInstanceOf[Any] - case x: Array[Int] => x(idx).asInstanceOf[Any] - case x: Array[Double] => x(idx).asInstanceOf[Any] - case x: Array[Long] => x(idx).asInstanceOf[Any] - case x: Array[Float] => x(idx).asInstanceOf[Any] - case x: Array[Char] => x(idx).asInstanceOf[Any] - case x: Array[Byte] => x(idx).asInstanceOf[Any] - case x: Array[Short] => x(idx).asInstanceOf[Any] - case x: Array[Boolean] => x(idx).asInstanceOf[Any] - case x: Array[Unit] => x(idx).asInstanceOf[Any] - case null => throw new NullPointerException - } - } - - /** update generic array element */ - def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { - arrayApplyCount += 1 - xs match { - case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] - case x: Array[Int] => x(idx) = value.asInstanceOf[Int] - case x: Array[Double] => x(idx) = value.asInstanceOf[Double] - case x: Array[Long] => x(idx) = value.asInstanceOf[Long] - case x: Array[Float] => x(idx) = value.asInstanceOf[Float] - case x: Array[Char] => x(idx) = value.asInstanceOf[Char] - case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte] - case x: Array[Short] => x(idx) = value.asInstanceOf[Short] - case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean] - case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit] - case null => throw new NullPointerException - } - } - - /** Get generic array length */ - def array_length(xs: AnyRef): Int = xs match { - case x: Array[AnyRef] => x.length - case x: Array[Int] => x.length - case x: Array[Double] => x.length - case x: Array[Long] => x.length - case x: Array[Float] => x.length - case x: Array[Char] => x.length - case x: Array[Byte] => x.length - case x: Array[Short] => x.length - case x: Array[Boolean] => x.length - case x: Array[Unit] => x.length - case null => throw new NullPointerException - } - - def array_clone(xs: AnyRef): AnyRef = xs match { - case x: Array[AnyRef] => x.clone() - case x: Array[Int] => x.clone() - case x: Array[Double] => x.clone() - case x: Array[Long] => x.clone() - case x: Array[Float] => x.clone() - case x: Array[Char] => x.clone() - case x: Array[Byte] => x.clone() - case x: Array[Short] => x.clone() - case x: Array[Boolean] => x.clone() - case x: Array[Unit] => x - case null => throw new NullPointerException - } - - /** Convert an array to an object array. - * Needed to deal with vararg arguments of primitive types that are passed - * to a generic Java vararg parameter T ... - */ - def toObjectArray(src: AnyRef): Array[Object] = src match { - case x: Array[AnyRef] => x - case _ => - val length = array_length(src) - val dest = new Array[Object](length) - for (i <- 0 until length) - array_update(dest, i, array_apply(src, i)) - dest - } - - def toArray[T](xs: scala.collection.Seq[T]) = { - val arr = new Array[AnyRef](xs.length) - var i = 0 - for (x <- xs) { - arr(i) = x.asInstanceOf[AnyRef] - i += 1 - } - arr - } - - // Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957 - // More background at ticket #2318. - def ensureAccessible(m: JMethod): JMethod = { - if (!m.isAccessible) { - try m setAccessible true - catch { case _: SecurityException => () } - } - m - } - - def _toString(x: Product): String = - x.productIterator.mkString(x.productPrefix + "(", ",", ")") - - def _hashCode(x: Product): Int = scala.util.hashing.MurmurHash3.productHash(x) - - /** A helper for case classes. */ - def typedProductIterator[T](x: Product): Iterator[T] = { - new AbstractIterator[T] { - private var c: Int = 0 - private val cmax = x.productArity - def hasNext = c < cmax - def next() = { - val result = x.productElement(c) - c += 1 - result.asInstanceOf[T] - } - } - } - - /** Implementation of `##`. */ - def hash(x: Any): Int = - if (x == null) 0 - else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number]) - else x.hashCode - - /** Given any Scala value, convert it to a String. - * - * The primary motivation for this method is to provide a means for - * correctly obtaining a String representation of a value, while - * avoiding the pitfalls of naively calling toString on said value. - * In particular, it addresses the fact that (a) toString cannot be - * called on null and (b) depending on the apparent type of an - * array, toString may or may not print it in a human-readable form. - * - * @param arg the value to stringify - * @return a string representation of arg. - */ - def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) - def stringOf(arg: Any, maxElements: Int): String = { - def packageOf(x: AnyRef) = x.getClass.getPackage match { - case null => "" - case p => p.getName - } - def isScalaClass(x: AnyRef) = packageOf(x) startsWith "scala." - def isScalaCompilerClass(x: AnyRef) = packageOf(x) startsWith "scala.tools.nsc." - - // includes specialized subclasses and future proofed against hypothetical TupleN (for N > 22) - def isTuple(x: Any) = x != null && x.getClass.getName.startsWith("scala.Tuple") - - // When doing our own iteration is dangerous - def useOwnToString(x: Any) = x match { - // Node extends NodeSeq extends Seq[Node] and MetaData extends Iterable[MetaData] - case _: Node | _: MetaData => true - // Range/NumericRange have a custom toString to avoid walking a gazillion elements - case _: Range | _: NumericRange[_] => true - // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 - case _: Sorted[_, _] => true - // StringBuilder(a, b, c) and similar not so attractive - case _: StringLike[_] => true - // Don't want to evaluate any elements in a view - case _: TraversableView[_, _] => true - // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom - // collections which may have useful toString methods - ticket #3710 - // or c) print AbstractFiles which are somehow also Iterable[AbstractFile]s. - case x: Traversable[_] => !x.hasDefiniteSize || !isScalaClass(x) || isScalaCompilerClass(x) - // Otherwise, nothing could possibly go wrong - case _ => false - } - - // A variation on inner for maps so they print -> instead of bare tuples - def mapInner(arg: Any): String = arg match { - case (k, v) => inner(k) + " -> " + inner(v) - case _ => inner(arg) - } - - // Special casing Unit arrays, the value class which uses a reference array type. - def arrayToString(x: AnyRef) = { - if (x.getClass.getComponentType == classOf[BoxedUnit]) - 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")") - else - WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") - } - - // The recursively applied attempt to prettify Array printing. - // Note that iterator is used if possible and foreach is used as a - // last resort, because the parallel collections "foreach" in a - // random order even on sequences. - def inner(arg: Any): String = arg match { - case null => "null" - case "" => "\"\"" - case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x - case x if useOwnToString(x) => x.toString - case x: AnyRef if isArray(x) => arrayToString(x) - case x: scala.collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") - case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") - case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma - case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") - case x => x.toString - } - - // The try/catch is defense against iterables which aren't actually designed - // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. - try inner(arg) - catch { - case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg - } - } - - /** stringOf formatted for use in a repl result. */ - def replStringOf(arg: Any, maxElements: Int): String = { - val s = stringOf(arg, maxElements) - val nl = if (s contains "\n") "\n" else "" - - nl + s + "\n" - } -} diff --git a/test/instrumented/mkinstrumented.sh b/test/instrumented/mkinstrumented.sh deleted file mode 100755 index d734dd2e00fc..000000000000 --- a/test/instrumented/mkinstrumented.sh +++ /dev/null @@ -1,51 +0,0 @@ -#/bin/sh -# -# Used to compile a jar with instrumented versions of certain classes. -# - -set -e - -run () { - echo "% $@" - "$@" -} - -if [ $# -ne 1 ] -then - echo "Must provide build dir ('target' or 'build')." - exit 1 -fi - -scriptDir=$(cd $(dirname $0) && pwd) - -TOPDIR="$scriptDir/../.." -RUNTIME="$TOPDIR/src/library/scala/runtime" -SOURCES="$RUNTIME/BoxesRunTime.java $RUNTIME/ScalaRunTime.scala" -SCALAC=$TOPDIR/$1/pack/bin/scalac -SRC_DIR="$scriptDir/library/scala/runtime" -SCALALIB=$TOPDIR/$1/pack/lib/scala-library.jar -CLASSDIR="$scriptDir/classes" -ARTIFACT=instrumented.jar -DESTINATION="$TOPDIR/test/files/speclib" - -[[ -x "$SCALAC" ]] || exit 1; - -# compile it -run rm -rf $CLASSDIR && mkdir $CLASSDIR -run cp $SOURCES $SRC_DIR -( cd $SRC_DIR && run patch BoxesRunTime.java $scriptDir/boxes.patch && run patch ScalaRunTime.scala $scriptDir/srt.patch ) - -ORIG=$(find $SRC_DIR -name '*.orig') -[[ -z "$ORIG" ]] || rm -f $ORIG - -JSOURCES=$(find $SRC_DIR -name "*.java" -print) -SOURCES=$(find $SRC_DIR -type f -print) -# echo $SOURCES -run $SCALAC -d $CLASSDIR $SOURCES -run javac -cp $SCALALIB -d $CLASSDIR $JSOURCES - -# jar it up -run cd $CLASSDIR -run jar cf $ARTIFACT . -run mv -f $ARTIFACT "$DESTINATION" -echo "$(cd "$DESTINATION" && pwd)/$ARTIFACT has been created." \ No newline at end of file diff --git a/test/instrumented/srt.patch b/test/instrumented/srt.patch index ee619b2ecb6a..7c57c4c608f8 100644 --- a/test/instrumented/srt.patch +++ b/test/instrumented/srt.patch @@ -1,10 +1,22 @@ -8a9,10 -> /* INSTRUMENTED VERSION */ -> -68a71,72 -> var arrayApplyCount = 0 -> -70a75 -> arrayApplyCount += 1 -87a93 -> arrayApplyCount += 1 +--- /Users/jz/code/scala/src/library/scala/runtime/ScalaRunTime.scala 2019-03-27 11:05:28.000000000 +1000 ++++ /Users/jz/code/scala/target/specLib/src_managed/main/scala/runtime/ScalaRunTime.scala 2019-03-27 11:38:17.000000000 +1000 +@@ -28,2 +28,4 @@ + */ ++/* INSTRUMENTED VERSION */ ++ + object ScalaRunTime { +@@ -56,2 +58,3 @@ + def array_apply(xs: AnyRef, idx: Int): Any = { ++ arrayApplyCount += 1 + xs match { +@@ -70,2 +73,3 @@ + } ++ var arrayApplyCount = 0 + +@@ -73,2 +77,3 @@ + def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { ++ arrayUpdateCount += 1 + xs match { +@@ -87,2 +92,3 @@ + } ++ var arrayUpdateCount = 0 \ No newline at end of file From 75deed255ed54e53e3e29b4601feee3f4197aefd Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 30 Mar 2019 13:43:01 +0000 Subject: [PATCH 1693/2793] Cleanup Tranforsm: reduce allocations. - We replace the use of `.flatten.size` with the function `sumSizes`, which does not need any extra allocations. - We unify `List.filter` and a `List.map` into `List.collect`. --- src/compiler/scala/tools/nsc/transform/CleanUp.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/transform/CleanUp.scala b/src/compiler/scala/tools/nsc/transform/CleanUp.scala index 7a298f591cb8..92c6b74cd6e4 100644 --- a/src/compiler/scala/tools/nsc/transform/CleanUp.scala +++ b/src/compiler/scala/tools/nsc/transform/CleanUp.scala @@ -339,7 +339,8 @@ abstract class CleanUp extends Statics with Transform with ast.TreeDSL { (mparams, resType) case tpe @ OverloadedType(pre, alts) => reporter.warning(ad.pos, s"Overloaded type reached the backend! This is a bug in scalac.\n Symbol: ${ad.symbol}\n Overloads: $tpe\n Arguments: " + ad.args.map(_.tpe)) - alts filter (_.paramss.flatten.size == params.length) map (_.tpe) match { + val fittingAlts = alts collect { case alt if sumSize(alt.paramss, 0) == params.length => alt.tpe } + fittingAlts match { case mt @ MethodType(mparams, resType) :: Nil => reporter.warning(NoPosition, "Only one overload has the right arity, proceeding with overload " + mt) (mparams, resType) From f66d0d8f6f908f44ff1602678a8aaaad45e1dedd Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso Blas" Date: Sat, 30 Mar 2019 15:46:01 +0000 Subject: [PATCH 1694/2793] Avoid Changes that change semantics The override of the `setInst` method may be semantic-changing. --- src/reflect/scala/reflect/internal/Types.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index aab28ae0dfb6..943e4cc1e9c0 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -3028,8 +3028,6 @@ trait Types ) extends TypeVar(_origin, _constr) { require(params.nonEmpty && sameLength(params, typeArgs), this) override def safeToString: String = super.safeToString + typeArgs.map(_.safeToString).mkString("[", ", ", "]") - override def setInst(tp: Type): this.type = - super.setInst(if (isSubArgs(typeArgs, tp.typeArgs, params, Depth.AnyDepth)) tp.typeConstructor else NoType) } trait UntouchableTypeVar extends TypeVar { From f483112dbfe4b1f0757b4d05c0dffd8da734b0a6 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 30 Mar 2019 20:17:10 +0000 Subject: [PATCH 1695/2793] Duplicators: avoid allocations in call to invalidateAll The method `invalidateAll` is just a foreach loop. In that case, rather than flattening the `vparamss` and pre-pending the `tparams`, which allocates a list that is just consumed, we run the call on tparams and then use a foreach on the `vparamss`. --- src/compiler/scala/tools/nsc/typechecker/Duplicators.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 213ae2785261..577cb04f2b38 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -174,7 +174,8 @@ abstract class Duplicators extends Analyzer { case DefDef(_, name, tparams, vparamss, _, rhs) => // invalidate parameters - invalidateAll(tparams ::: vparamss.flatten) + invalidateAll(tparams) + vparamss foreach (x => invalidateAll(x)) tree.symbol = NoSymbol case Function(vparams, _) => From 5319278716828fe85b0aa76d30b56bc521c67f60 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 31 Mar 2019 02:17:10 +0100 Subject: [PATCH 1696/2793] SpecializeTypes/Unify: avoid allocations The `unify` method, for the cases of `MethodType`, and `PolyType`, it was using two calls to `List.map` to generate a list, that was then fed into a "foldLeft2" method. We merge the maps into the `foldLeft2` to avoid those allocations. --- .../tools/nsc/transform/SpecializeTypes.scala | 37 ++++++++++++------- 1 file changed, 24 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index ffdcd2b151dd..6cb15fdf2e28 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1164,15 +1164,25 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case (MethodType(params1, res1), MethodType(params2, res2)) => if (strict && params1.length != params2.length) unifyError(tp1, tp2) debuglog(s"Unify methods $tp1 and $tp2") - unify(res1 :: (params1 map (_.tpe)), res2 :: (params2 map (_.tpe)), env, strict) + val env1 = unifyAux(res1, res2, env, strict) + if (params1.isEmpty) env1 + else + foldLeft2(params1, params2)(env1){ (e, p1, p2) => unifyAux(p1.tpe, p2.tpe, e, strict) } case (PolyType(tparams1, res1), PolyType(tparams2, res2)) => debuglog(s"Unify polytypes $tp1 and $tp2") if (strict && tparams1.length != tparams2.length) unifyError(tp1, tp2) - else if (tparams && tparams1.length == tparams2.length) - unify(res1 :: tparams1.map(_.info), res2 :: tparams2.map(_.info), env, strict) + else if (tparams && tparams1.length == tparams2.length) { + val env1 = unifyAux(res1, res2, env, strict) + if (tparams1.isEmpty) env1 + else + foldLeft2(tparams1, tparams2)(env1){ (e, tp1, tp2) => unifyAux(tp1.info, tp2.info, e, strict) } + } else unify(res1, res2, env, strict) + case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => + val env1 = unifyAux(lo1, lo2, env, strict) + unifyAux(hi1, hi2, env1, strict) case (PolyType(_, res), other) => unify(res, other, env, strict) case (ThisType(_), ThisType(_)) => env case (_, SingleType(_, _)) => unify(tp1, tp2.underlying, env, strict) @@ -1182,26 +1192,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { case (RefinedType(_, _), RefinedType(_, _)) => env case (AnnotatedType(_, tp1), tp2) => unify(tp2, tp1, env, strict) case (ExistentialType(_, res1), _) => unify(tp2, res1, env, strict) - case (TypeBounds(lo1, hi1), TypeBounds(lo2, hi2)) => unify(List(lo1, hi1), List(lo2, hi2), env, strict) case _ => debuglog(s"don't know how to unify $tp1 [${tp1.getClass}] with $tp2 [${tp2.getClass}]") env } - private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv = { + private def unify(tp1: List[Type], tp2: List[Type], env: TypeEnv, strict: Boolean): TypeEnv = if (tp1.isEmpty || tp2.isEmpty) env else foldLeft2(tp1, tp2)(env) { (env, arg1, arg2) => - if (!strict) unify(arg1, arg2, env, strict) + unifyAux(arg1, arg2, env, strict) + } + + private def unifyAux(arg1: Type, arg2: Type, env: TypeEnv, strict: Boolean): TypeEnv = + if (!strict) unify(arg1, arg2, env, strict) + else { + val nenv = unify(arg1, arg2, emptyEnv, strict) + if (env.keySet.intersect(nenv.keySet).isEmpty) env ++ nenv else { - val nenv = unify(arg1, arg2, emptyEnv, strict) - if (env.keySet.intersect(nenv.keySet).isEmpty) env ++ nenv - else { - debuglog(s"could not unify: u($arg1, $arg2) yields $nenv, env: $env") - unifyError(tp1, tp2) - } + debuglog(s"could not unify: u($arg1, $arg2) yields $nenv, env: $env") + unifyError(arg1, arg2) } } - } /** Apply the type environment 'env' to the given type. All type * bindings are supposed to be to primitive types. A type variable From 38c6439cc6defa79fad63ac4354fde3f0aa35bda Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 30 Mar 2019 23:26:08 +0000 Subject: [PATCH 1697/2793] Types - isWithinBounds: avoid middle list Methos `isWithinBounds` method creates an auxiliary list `bounds`, as a result of a `List.map`, to instantiate the type parameters of each type. This list is passed to a `corresponds` method that consumes it. We merge the map function into the `corresponds` loop. Also, for code readability, we make the instantiatedBounds local. However, we have to exclude the case in which the list of `args` contains any type with annotations, since that has more processing. --- src/reflect/scala/reflect/internal/Types.scala | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index e08ad231eb5b..79ecc9031dad 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4486,15 +4486,19 @@ trait Types /** Do type arguments `targs` conform to formal parameters `tparams`? */ def isWithinBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): Boolean = { - var bounds = instantiatedBounds(pre, owner, tparams, targs) - if (targs exists typeHasAnnotations) + def instantiatedBound(tparam: Symbol): TypeBounds = + tparam.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds + + if (targs exists typeHasAnnotations){ + var bounds = mapList(tparams)(instantiatedBound) bounds = adaptBoundsToAnnotations(bounds, tparams, targs) - (bounds corresponds targs)(boundsContainType) + (bounds corresponds targs)(boundsContainType) + } else + (tparams corresponds targs){ (tparam, targ) => + boundsContainType(instantiatedBound(tparam), targ) + } } - def instantiatedBounds(pre: Type, owner: Symbol, tparams: List[Symbol], targs: List[Type]): List[TypeBounds] = - mapList(tparams)(_.info.asSeenFrom(pre, owner).instantiateTypeParams(tparams, targs).bounds) - def elimAnonymousClass(t: Type) = t match { case TypeRef(pre, clazz, Nil) if clazz.isAnonymousClass => clazz.classBound.asSeenFrom(pre, clazz.owner) From 9c57e78be5609275d752e47b796dc5790e4391f7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 9 Apr 2019 04:47:28 +0000 Subject: [PATCH 1698/2793] [backport] Fix interop between Java and generic inner Scala classes Remove the synthetic outer parameter from the generic signature, in line with what `javac` expects and would do itself. Ignore the result type and the outer param in the fast path check in NeedsSigCollector. This has the effect of omitting Java generic signatures for the constructors of anonymous classes. Such signatures were inconsistent before this change anyway. --- .../scala/tools/nsc/transform/Erasure.scala | 45 ++++++++++++------- test/files/jvm/t10880.check | 4 +- test/files/run/t10889.check | 1 + test/files/run/t10889/O.scala | 6 +++ test/files/run/t10889/Test.java | 6 +++ 5 files changed, 44 insertions(+), 18 deletions(-) create mode 100644 test/files/run/t10889.check create mode 100644 test/files/run/t10889/O.scala create mode 100644 test/files/run/t10889/Test.java diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index ff428cc156b4..e088d071c4a7 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -50,8 +50,8 @@ abstract class Erasure extends InfoTransform atPos(tree.pos)(Apply(Select(tree, conversion), Nil)) } - private object NeedsSigCollector extends TypeCollector(false) { - def traverse(tp: Type) { + private class NeedsSigCollector(sym: Symbol) extends TypeCollector(false) { + def traverse(tp: Type): Unit = if (!result) { tp match { case st: SubType => @@ -69,16 +69,26 @@ abstract class Erasure extends InfoTransform parents foreach traverse case AnnotatedType(_, atp) => traverse(atp) + case MethodType(params, resultType) => + if (sym.isClassConstructor) { + val sigParams = params match { + case head :: tail if head.isOuterParam => tail + case _ => params + } + mapOver(sigParams) + // skip the result type, it is Void in the signature. + } else { + mapOver(tp) + } case _ => mapOver(tp) } } - } } override protected def verifyJavaErasure = settings.Xverify || settings.debug - private def needsJavaSig(tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { - def needs(tp: Type) = NeedsSigCollector.collect(tp) + private def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { + def needs(tp: Type) = new NeedsSigCollector(sym).collect(tp) needs(tp) || throwsArgs.exists(needs) } @@ -282,7 +292,7 @@ abstract class Erasure extends InfoTransform def classSig: Unit = { markClassUsed(sym) val preRebound = pre.baseType(sym.owner) // #2585 - if (needsJavaSig(preRebound, Nil)) { + if (needsJavaSig(sym, preRebound, Nil)) { val i = builder.length() jsig(preRebound, existentiallyBound) if (builder.charAt(i) == 'L') { @@ -359,16 +369,19 @@ abstract class Erasure extends InfoTransform case MethodType(params, restpe) => builder.append('(') params foreach (p => { - val tp = p.attachments.get[TypeParamVarargsAttachment] match { - case Some(att) => - // For @varargs forwarders, a T* parameter has type Array[Object] in the forwarder - // instead of Array[T], as the latter would erase to Object (instead of Array[Object]). - // To make the generic signature correct ("[T", not "[Object"), an attachment on the - // parameter symbol stores the type T that was replaced by Object. - builder.append('['); att.typeParamRef - case _ => p.tpe + val isClassOuterParam = sym0.isClassConstructor && p.isOuterParam + if (!isClassOuterParam) { + val tp = p.attachments.get[TypeParamVarargsAttachment] match { + case Some(att) => + // For @varargs forwarders, a T* parameter has type Array[Object] in the forwarder + // instead of Array[T], as the latter would erase to Object (instead of Array[Object]). + // To make the generic signature correct ("[T", not "[Object"), an attachment on the + // parameter symbol stores the type T that was replaced by Object. + builder.append('['); att.typeParamRef + case _ => p.tpe + } + jsig(tp) } - jsig(tp) }) builder.append(')') if (restpe.typeSymbol == UnitClass || sym0.isConstructor) builder.append(VOID_TAG) else jsig(restpe) @@ -389,7 +402,7 @@ abstract class Erasure extends InfoTransform } } val throwsArgs = sym0.annotations flatMap ThrownException.unapply - if (needsJavaSig(info, throwsArgs)) { + if (needsJavaSig(sym0, info, throwsArgs)) { try { jsig(info, toplevel = true) throwsArgs.foreach { t => diff --git a/test/files/jvm/t10880.check b/test/files/jvm/t10880.check index 87f09c43838a..acbc3b9ebab8 100644 --- a/test/files/jvm/t10880.check +++ b/test/files/jvm/t10880.check @@ -1,2 +1,2 @@ -List(class Provides, Provides) -List(Provides) +List(class Provides, class java.lang.Object) +List(class Provides, class java.lang.Object) diff --git a/test/files/run/t10889.check b/test/files/run/t10889.check new file mode 100644 index 000000000000..dbf57fb20162 --- /dev/null +++ b/test/files/run/t10889.check @@ -0,0 +1 @@ +new O(o).I[](i) diff --git a/test/files/run/t10889/O.scala b/test/files/run/t10889/O.scala new file mode 100644 index 000000000000..d054780f6db8 --- /dev/null +++ b/test/files/run/t10889/O.scala @@ -0,0 +1,6 @@ +package p +class O(val o: String) { + class I[T](val i: String) { + println(s"new O($o).I[]($i)") + } +} diff --git a/test/files/run/t10889/Test.java b/test/files/run/t10889/Test.java new file mode 100644 index 000000000000..ae4237986bdd --- /dev/null +++ b/test/files/run/t10889/Test.java @@ -0,0 +1,6 @@ +public class Test { + public static void main(String[] args) { + p.O l = new p.O("o"); + p.O.I s = l.new I(/*l,*/ "i"); + } +} From 01e73f0abca4b7fc0fbc09d7895a92ebbc970ce1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 9 Apr 2019 09:46:43 +0000 Subject: [PATCH 1699/2793] [backport] Reduce allocations of NeedSigCollector --- src/compiler/scala/tools/nsc/transform/Erasure.scala | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index e088d071c4a7..0501dfd91126 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -50,7 +50,12 @@ abstract class Erasure extends InfoTransform atPos(tree.pos)(Apply(Select(tree, conversion), Nil)) } - private class NeedsSigCollector(sym: Symbol) extends TypeCollector(false) { + private object NeedsSigCollector { + private val NeedsSigCollector_true = new NeedsSigCollector(true) + private val NeedsSigCollector_false = new NeedsSigCollector(false) + def apply(isClassConstructor: Boolean) = if (isClassConstructor) NeedsSigCollector_true else NeedsSigCollector_false + } + private class NeedsSigCollector(isClassConstructor: Boolean) extends TypeCollector(false) { def traverse(tp: Type): Unit = if (!result) { tp match { @@ -70,7 +75,7 @@ abstract class Erasure extends InfoTransform case AnnotatedType(_, atp) => traverse(atp) case MethodType(params, resultType) => - if (sym.isClassConstructor) { + if (isClassConstructor) { val sigParams = params match { case head :: tail if head.isOuterParam => tail case _ => params @@ -88,7 +93,7 @@ abstract class Erasure extends InfoTransform override protected def verifyJavaErasure = settings.Xverify || settings.debug private def needsJavaSig(sym: Symbol, tp: Type, throwsArgs: List[Type]) = !settings.Ynogenericsig && { - def needs(tp: Type) = new NeedsSigCollector(sym).collect(tp) + def needs(tp: Type) = NeedsSigCollector(sym.isClassConstructor).collect(tp) needs(tp) || throwsArgs.exists(needs) } From 2d2b8959c3d93bdcf421094183cd322f7a038f11 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 21 Oct 2016 21:22:04 +1100 Subject: [PATCH 1700/2793] Improve hash code of Names The old approach of using the first, last, and middle characters only lays a trap for generate names that have little or no entropy at these locations. For instance, fresh existential names generated in "as seen from" operations are one such case, and when compiling large batches of files the name table can become imbalanced. This seems to be the bottleneck compiling the enourmous (generated) test suite for ScalaTest itself: https://github.com/scala/scala-dev/issues/246#issuecomment-255338925 This commit uses all characters to compute the hashCode. It improves the compilation time of ScalaTest tests from 487s to 349s (0.71x). It would still be useful to avoid generating these fresh names with a global counter, as this represents a steady name leak in long-lived Globals (e.g. the presentation compiler.) --- .../scala/reflect/internal/Names.scala | 20 +++++++++---------- 1 file changed, 9 insertions(+), 11 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index b4cde7b6a3bf..eb5bf07734ac 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -47,17 +47,15 @@ trait Names extends api.Names { /** Hashtable for finding type names quickly. */ private val typeHashtable = new Array[TypeName](HASH_SIZE) - /** - * The hashcode of a name depends on the first, the last and the middle character, - * and the length of the name. - */ - private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = - if (len > 0) - (len * (41 * 41 * 41) + - cs(offset) * (41 * 41) + - cs(offset + len - 1) * 41 + - cs(offset + (len >> 1))) - else 0 + private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = { + var h = 0 + var i = 0 + while (i < len) { + h = 31 * h + cs(i + offset) + i += 1 + } + h + } /** Is (the ASCII representation of) name at given index equal to * cs[offset..offset+len-1]? From aa47977672993d2f26337caad72a80235a67376d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 16 Apr 2019 14:18:33 -0700 Subject: [PATCH 1701/2793] add lampepfl org to CoC --- CODE_OF_CONDUCT.md | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 8bef56b65522..0511f2126d92 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1 +1,7 @@ -all repositories in the [scala](https://github.com/scala) and [scalacenter](https://github.com/scalacenter) organizations are covered by the Scala Code of Conduct: https://scala-lang.org/conduct/ +all repositories in these organizations: + +* [scala](https://github.com/scala) +* [scalacenter](https://github.com/scalacenter) +* [lampepfl](https://github.com/lampepfl) + +are covered by the Scala Code of Conduct: https://scala-lang.org/conduct/ From 50ff82195d3867ea9a004342f9e2c9fde8875830 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 24 Apr 2019 09:26:27 +1000 Subject: [PATCH 1702/2793] Limit string interpolation intrinsic avoid compiler SOE Fallback to the old style when the more than 64 varargs are provided. Backport of a limit introduced in 2.13.x in #7678 --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 3 ++- test/files/pos/t10870.scala | 6 ++++++ 2 files changed, 8 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t10870.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 95b1c25a7afd..09d1115e9dc1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1604,7 +1604,8 @@ abstract class RefChecks extends Transform { if qual1.symbol == rd.StringContext_apply && treeInfo.isQualifierSafeToElide(qual) && lits.forall(lit => treeInfo.isLiteralString(lit)) && - lits.length == (args.length + 1) => + lits.length == (args.length + 1) && + args.lengthCompare(64) <= 0 => // TODO make more robust to large input so that we can drop this condition, chunk the concatenations in manageable batches val isRaw = sym == rd.StringContext_raw if (isRaw) Some((lits, args)) else { diff --git a/test/files/pos/t10870.scala b/test/files/pos/t10870.scala new file mode 100644 index 000000000000..9836821f1288 --- /dev/null +++ b/test/files/pos/t10870.scala @@ -0,0 +1,6 @@ +package example + +object Test { + val a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31, a32, a33, a34, a35, a36, a37, a38, a39, a40, a41, a42, a43, a44, a45, a46, a47, a48, a49, a50, a51, a52, a53, a54, a55, a56, a57, a58, a59, a60, a61, a62, a63, a64, a65, a66, a67, a68, a69, a70, a71, a72, a73, a74, a75, a76, a77, a78, a79, a80, a81, a82, a83, a84, a85, a86, a87, a88, a89, a90, a91, a92, a93, a94, a95, a96, a97, a98, a99, a100, a101, a102, a103, a104, a105, a106, a107, a108, a109, a110, a111, a112, a113, a114, a115, a116, a117, a118, a119, a120, a121, a122, a123, a124, a125, a126, a127, a128, a129, a130, a131, a132, a133, a134, a135, a136, a137, a138, a139, a140, a141, a142, a143, a144, a145, a146, a147, a148, a149, a150, a151, a152, a153, a154, a155, a156, a157, a158, a159, a160, a161, a162, a163, a164, a165, a166, a167, a168, a169, a170, a171, a172, a173, a174, a175, a176, a177, a178, a179, a180, a181, a182, a183, a184, a185, a186, a187, a188, a189, a190, a191, a192, a193, a194, a195, a196, a197, a198, a199, a200, a201, a202, a203, a204, a205, a206, a207, a208, a209, a210, a211, a212, a213, a214, a215, a216, a217, a218, a219, a220, a221, a222, a223, a224, a225, a226, a227, a228, a229, a230, a231, a232, a233, a234, a235, a236, a237, a238, a239, a240, a241, a242, a243, a244, a245, a246, a247, a248, a249, a250, a251, a252, a253, a254, a255, a256 = " " + val foo = s"""$a1 $a2 $a3 $a4 $a5 $a6 $a7 $a8 $a9 $a10 $a11 $a12 $a13 $a14 $a15 $a16 $a17 $a18 $a19 $a20 $a21 $a22 $a23 $a24 $a25 $a26 $a27 $a28 $a29 $a30 $a31 $a32 $a33 $a34 $a35 $a36 $a37 $a38 $a39 $a40 $a41 $a42 $a43 $a44 $a45 $a46 $a47 $a48 $a49 $a50 $a51 $a52 $a53 $a54 $a55 $a56 $a57 $a58 $a59 $a60 $a61 $a62 $a63 $a64 $a65 $a66 $a67 $a68 $a69 $a70 $a71 $a72 $a73 $a74 $a75 $a76 $a77 $a78 $a79 $a80 $a81 $a82 $a83 $a84 $a85 $a86 $a87 $a88 $a89 $a90 $a91 $a92 $a93 $a94 $a95 $a96 $a97 $a98 $a99 $a100 $a101 $a102 $a103 $a104 $a105 $a106 $a107 $a108 $a109 $a110 $a111 $a112 $a113 $a114 $a115 $a116 $a117 $a118 $a119 $a120 $a121 $a122 $a123 $a124 $a125 $a126 $a127 $a128 $a129 $a130 $a131 $a132 $a133 $a134 $a135 $a136 $a137 $a138 $a139 $a140 $a141 $a142 $a143 $a144 $a145 $a146 $a147 $a148 $a149 $a150 $a151 $a152 $a153 $a154 $a155 $a156 $a157 $a158 $a159 $a160 $a161 $a162 $a163 $a164 $a165 $a166 $a167 $a168 $a169 $a170 $a171 $a172 $a173 $a174 $a175 $a176 $a177 $a178 $a179 $a180 $a181 $a182 $a183 $a184 $a185 $a186 $a187 $a188 $a189 $a190 $a191 $a192 $a193 $a194 $a195 $a196 $a197 $a198 $a199 $a200 $a201 $a202 $a203 $a204 $a205 $a206 $a207 $a208 $a209 $a210 $a211 $a212 $a213 $a214 $a215 $a216 $a217 $a218 $a219 $a220 $a221 $a222 $a223 $a224 $a225 $a226 $a227 $a228 $a229 $a230 $a231 $a232 $a233 $a234 $a235 $a236 $a237 $a238 $a239 $a240 $a241 $a242 $a243 $a244 $a245 $a246 $a247 $a248 $a249 $a250 $a251 $a252 $a253 $a254 $a255 $a256""" +} From 86c5a0385a305491d4267847de0be711811049bf Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Thu, 25 Apr 2019 15:42:53 -0700 Subject: [PATCH 1703/2793] correct jansi version in intellij setup --- src/intellij/scala.ipr.SAMPLE | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index b5f03d96d7e8..ed483d019c86 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -363,7 +363,7 @@ - + From 8f01615260d6b8bf8a15bef62a5b1fb0bc8af4ee Mon Sep 17 00:00:00 2001 From: Enno Runne <458526+ennru@users.noreply.github.com> Date: Fri, 8 Mar 2019 10:49:56 +0100 Subject: [PATCH 1704/2793] [backport] Scaladoc: support setting canonical URLs MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Introduces a new command line flag for Scaladoc to configure a base URL for generation of canonical URLs on all pages. Canonical URLs intend to help search engines to identify the most relevant/recent version of a page when several versions are available. References [Google Support: Consolidate duplicate URLs](https://support.google.com/webmasters/answer/139066?hl=en) [Blog: Google’s Algorithms Can Ignore Rel Canonical When URLs Contain Different Content.](https://www.gsqi.com/marketing-blog/google-ignore-rel-canonical-different-content/) Fixes https://github.com/scala/bug/issues/10640 --- .../scala/tools/nsc/doc/Settings.scala | 7 +++ .../tools/nsc/doc/html/page/Entity.scala | 8 ++++ test/scaladoc/resources/canonical.scala | 15 +++++++ test/scaladoc/run/canonical-unset.check | 2 + test/scaladoc/run/canonical-unset.scala | 43 ++++++++++++++++++ test/scaladoc/run/canonical.check | 2 + test/scaladoc/run/canonical.scala | 44 +++++++++++++++++++ 7 files changed, 121 insertions(+) create mode 100644 test/scaladoc/resources/canonical.scala create mode 100644 test/scaladoc/run/canonical-unset.check create mode 100644 test/scaladoc/run/canonical-unset.scala create mode 100644 test/scaladoc/run/canonical.check create mode 100644 test/scaladoc/run/canonical.scala diff --git a/src/scaladoc/scala/tools/nsc/doc/Settings.scala b/src/scaladoc/scala/tools/nsc/doc/Settings.scala index 59e5088ef64c..fb7c30b51fbe 100644 --- a/src/scaladoc/scala/tools/nsc/doc/Settings.scala +++ b/src/scaladoc/scala/tools/nsc/doc/Settings.scala @@ -225,6 +225,13 @@ class Settings(error: String => Unit, val printMsg: String => Unit = println(_)) "Prevents parsing and inclusion of comments from java sources." ) + val docCanonicalBaseUrl = StringSetting ( + "-doc-canonical-base-url", + "url", + s"A base URL to use as prefix and add `canonical` URLs to all pages. The canonical URL may be used by search engines to choose the URL that you want people to see in search results. If unset no canonical URLs are generated.", + "" + ) + // For improved help output. def scaladocSpecific = Set[Settings#Setting]( docformat, doctitle, docfooter, docversion, docUncompilable, docsourceurl, docgenerator, docRootContent, useStupidTypes, diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index d3c27057d46a..42a88f537498 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -48,6 +48,14 @@ trait EntityPage extends HtmlPage { def headers = + { val canonicalSetting = universe.settings.docCanonicalBaseUrl + if (canonicalSetting.isSetByUser) { + val canonicalUrl = + if (canonicalSetting.value.endsWith("/")) canonicalSetting.value + else canonicalSetting.value + "/" + + } else NodeSeq.Empty + } diff --git a/test/scaladoc/resources/canonical.scala b/test/scaladoc/resources/canonical.scala new file mode 100644 index 000000000000..9ec6cb715e9e --- /dev/null +++ b/test/scaladoc/resources/canonical.scala @@ -0,0 +1,15 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package p + +class Canonical diff --git a/test/scaladoc/run/canonical-unset.check b/test/scaladoc/run/canonical-unset.check new file mode 100644 index 000000000000..bf0743043e43 --- /dev/null +++ b/test/scaladoc/run/canonical-unset.check @@ -0,0 +1,2 @@ +As expected, no canonical URL found. +Done. diff --git a/test/scaladoc/run/canonical-unset.scala b/test/scaladoc/run/canonical-unset.scala new file mode 100644 index 000000000000..8cbceebf5ef3 --- /dev/null +++ b/test/scaladoc/run/canonical-unset.scala @@ -0,0 +1,43 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import scala.tools.nsc.{ScalaDocReporter, doc, io} +import scala.tools.nsc.doc.DocFactory +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.reporters.ConsoleReporter +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def resourceFile: String = "test/scaladoc/resources/canonical.scala" + + def destinationDir = "target/canonical-unset" + + override def scaladocSettings = + s"-d ${destinationDir}" + + override def code = "" + + def testModel(rootPackage: Package): Unit = { + val dir = new java.io.File(destinationDir) + dir.mkdirs() + newDocFactory.document(List(resourceFile)) + val Pattern = """""".r + val s = io.File(s"${dir.getAbsolutePath}/p/Canonical.html").slurp() + Pattern.findFirstIn(s) match { + case Some(s) => + println(s) + case _ => + println("As expected, no canonical URL found.") + } + } +} diff --git a/test/scaladoc/run/canonical.check b/test/scaladoc/run/canonical.check new file mode 100644 index 000000000000..ee4467883e7e --- /dev/null +++ b/test/scaladoc/run/canonical.check @@ -0,0 +1,2 @@ + +Done. diff --git a/test/scaladoc/run/canonical.scala b/test/scaladoc/run/canonical.scala new file mode 100644 index 000000000000..bd987023e50a --- /dev/null +++ b/test/scaladoc/run/canonical.scala @@ -0,0 +1,44 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +import scala.tools.nsc.{ScalaDocReporter, doc, io} +import scala.tools.nsc.doc.DocFactory +import scala.tools.nsc.doc.model._ +import scala.tools.nsc.reporters.ConsoleReporter +import scala.tools.partest.ScaladocModelTest + +object Test extends ScaladocModelTest { + + override def resourceFile: String = "test/scaladoc/resources/canonical.scala" + + def destinationDir = "target/canonical" + + override def scaladocSettings = + s"-doc-canonical-base-url https://www.scala-lang.org/files/archive/nightly/2.13.x/api/2.13.x/ -d $destinationDir" + + override def code = "" + + def testModel(rootPackage: Package): Unit = { + val dir = new java.io.File(destinationDir) + dir.mkdirs() + newDocFactory.document(List(resourceFile)) + val Pattern = """""".r + val s = io.File(s"${dir.getAbsolutePath}/p/Canonical.html").slurp() + Pattern.findFirstIn(s) match { + case Some(s) => + println(s) + case _ => + println("No canonical URL found.") + println(s.substring(0, Math.min(1000, s.length))) + } + } +} From d7374d656a74815807e21e143c293b6fcb7af424 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 30 Apr 2019 21:47:37 -0400 Subject: [PATCH 1705/2793] Show attachments in tree browser. Remember swing? Hopefully I'm not lowering people's opinions of me for every once in a while using a graphical interface. --- .../scala/tools/nsc/ast/TreeBrowsers.scala | 20 +++++++++++++++---- 1 file changed, 16 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 44380a32e065..b71989721f75 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -305,6 +305,7 @@ abstract class TreeBrowsers { case _ => str.append("tree.id: ").append(t.id) str.append("\ntree.pos: ").append(t.pos) + str.append(TreeInfo.attachments(t, "tree")) str.append("\nSymbol: ").append(TreeInfo.symbolText(t)) str.append("\nSymbol owner: ").append( if ((t.symbol ne null) && t.symbol != NoSymbol) @@ -523,13 +524,24 @@ abstract class TreeBrowsers { val s = t.symbol if ((s ne null) && (s != NoSymbol)) { - var str = s.flagString - if (s.isStaticMember) str = str + " isStatic " - (str + " annotations: " + s.annotations.mkString("", " ", "") - + (if (s.isTypeSkolem) "\ndeSkolemized annotations: " + s.deSkolemize.annotations.mkString("", " ", "") else "")) + val str = new StringBuilder(s.flagString) + if (s.isStaticMember) str ++= " isStatic " + str ++= " annotations: " + str ++= s.annotations.mkString("", " ", "") + if (s.isTypeSkolem) { + str ++= "\ndeSkolemized annotations: " + str ++= s.deSkolemize.annotations.mkString("", " ", "") + } + str ++= attachments(s, "") + str.toString } else "" } + + def attachments(t: Attachable, pre: String): String = { + if (t.attachments.isEmpty) "" + else t.attachments.all.mkString(s"\n$pre attachments:\n ","\n ","") + } } object TypePrinter { From f98649c7678b49e0c0cb13cbc0c041fb10644222 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sun, 14 Apr 2019 17:27:32 -0400 Subject: [PATCH 1706/2793] [nomerge] Package LICENSE/NOTICE with jars Technically, all but the most conscientious redistributors of scala jars were violating the license (unless they personally attached the required copy of the Apache v2 license), and our `NOTICE` file was of little effect, as people only need to keep it with the distribution if they got it to begin with. --- project/License.scala | 18 ++++++++++++++++++ project/Osgi.scala | 7 ++++--- 2 files changed, 22 insertions(+), 3 deletions(-) create mode 100644 project/License.scala diff --git a/project/License.scala b/project/License.scala new file mode 100644 index 000000000000..13c8a2778544 --- /dev/null +++ b/project/License.scala @@ -0,0 +1,18 @@ +package scala.build + +import sbt._, Keys._, plugins._ + +object License extends AutoPlugin { + val licenseMapping = settingKey[Seq[(File, String)]]("LICENSE/NOTICE file mappings") + + override val requires = JvmPlugin + override val trigger = AllRequirements + + override def projectSettings: Seq[Def.Setting[_]] = + List(packageSrc, packageBin, packageDoc) + .map(pkg => mappings in (Compile, pkg) ++= licenseMapping.value) + + override def buildSettings: Seq[Def.Setting[_]] = Seq( + licenseMapping := List("LICENSE", "NOTICE").map(fn => (baseDirectory.value / fn) -> fn) + ) +} \ No newline at end of file diff --git a/project/Osgi.scala b/project/Osgi.scala index f8d43d8310d3..596e33c8642f 100644 --- a/project/Osgi.scala +++ b/project/Osgi.scala @@ -40,8 +40,9 @@ object Osgi { jarlist := false, bundle := Def.task { val cp = (products in Compile in packageBin).value + val licenseFiles = License.licenseMapping.value.map(_._1) bundleTask(headers.value.toMap, jarlist.value, cp, - (artifactPath in (Compile, packageBin)).value, cp, streams.value) + (artifactPath in (Compile, packageBin)).value, cp ++ licenseFiles, streams.value) }.value, packagedArtifact in (Compile, packageBin) := (((artifact in (Compile, packageBin)).value, bundle.value)), // Also create OSGi source bundles: @@ -63,10 +64,10 @@ object Osgi { // https://github.com/scala/scala-dev/issues/254 // Must be careful not to include scala-asm.jar within scala-compiler.jar! - def resourceDirectoryRef(f: File) = (if (f.isDirectory) "" else "@") + f.getAbsolutePath + def resourceDirectoryRef(f: File) = (if (f.getName endsWith ".jar") "@" else "") + f.getAbsolutePath val includeRes = resourceDirectories.filter(_.exists).map(resourceDirectoryRef).mkString(",") - if(!includeRes.isEmpty) builder.setProperty(INCLUDERESOURCE, includeRes) + if (!includeRes.isEmpty) builder.setProperty(INCLUDERESOURCE, includeRes) builder.getProperties.asScala.foreach { case (k, v) => log.debug(s"bnd: $k: $v") } // builder.build is not thread-safe because it uses a static SimpleDateFormat. This ensures // that all calls to builder.build are serialized. From ee2719585e40cb4e9e523e20061a6a2075f4d49d Mon Sep 17 00:00:00 2001 From: Michael Pollmeier Date: Tue, 30 Apr 2019 14:23:00 +1200 Subject: [PATCH 1707/2793] fix XSS vulnerability in scaladoc search to trigger XSS vuln, simply paste this into the search bar: ``` "\>{{7*7}} ``` all credit for finding the vulnerability goes to *Yeasir Arafat* --- src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js | 1 + 1 file changed, 1 insertion(+) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index 087c975aedd1..e899f06b5c0f 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -532,6 +532,7 @@ function searchAll() { scheduler.clear("search"); // clear previous search maxJobs = 1; // clear previous max var searchStr = $("#textfilter input").attr("value").trim() || ''; + searchStr = escape(searchStr); if (searchStr === '') { $("div#search-results").hide(); From fa355d04444a32e2b7769ba8868d594b24daf3da Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 30 Apr 2019 15:59:26 +1000 Subject: [PATCH 1708/2793] Optimize importedSymbol Call TypeName.toTermName less frequently. --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index 3b1d75567f02..c2a49d19c1b1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1532,10 +1532,13 @@ trait Contexts { self: Analyzer => var selectors = tree.selectors @inline def current = selectors.head while ((selectors ne Nil) && result == NoSymbol) { - if (current.rename == name.toTermName) + def sameName(name: Name, other: Name) = { + (name eq other) || (name ne null) && name.start == other.start + } + if (sameName(current.rename, name)) result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports if (name.isTypeName) current.name.toTypeName else current.name) - else if (current.name == name.toTermName) + else if (sameName(current.name, name)) renamed = true else if (current.name == nme.WILDCARD && !renamed && !requireExplicit) result = qual.tpe.nonLocalMember(name) From 5e8355a621b1e34203cca4e02d3e371e7ef2e400 Mon Sep 17 00:00:00 2001 From: Michael Pollmeier Date: Fri, 3 May 2019 10:43:49 +1200 Subject: [PATCH 1709/2793] fix xss by writing the input parameter properly to the dom rather than escaping the search string, which breaks the search for e.g. `:+` solution contributed by NthPortal in https://github.com/scala/scala/pull/8018#issuecomment-488546695 --- .../scala/tools/nsc/doc/html/resource/lib/index.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index e899f06b5c0f..379cb701b471 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -532,7 +532,6 @@ function searchAll() { scheduler.clear("search"); // clear previous search maxJobs = 1; // clear previous max var searchStr = $("#textfilter input").attr("value").trim() || ''; - searchStr = escape(searchStr); if (searchStr === '') { $("div#search-results").hide(); @@ -563,9 +562,12 @@ function searchAll() { entityResults.appendChild(entityH1); $("div#results-content") - .prepend("" - +" Showing results for \"" + searchStr + "\"" - +""); + .prepend( + $("") + .addClass("search-text") + .append(document.createTextNode(" Showing results for ")) + .append($("").addClass("query-str").text(searchStr)) + ); var regExp = compilePattern(searchStr); From 1ad22f1e77cb274844a9ce369201f3ec10b9cb0b Mon Sep 17 00:00:00 2001 From: Michael Pollmeier Date: Fri, 3 May 2019 10:51:18 +1200 Subject: [PATCH 1710/2793] four space indentation --- .../scala/tools/nsc/doc/html/resource/lib/index.js | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js index 379cb701b471..33b49b6d76f1 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js +++ b/src/scaladoc/scala/tools/nsc/doc/html/resource/lib/index.js @@ -561,13 +561,12 @@ function searchAll() { entityH1.innerHTML = "Entity results"; entityResults.appendChild(entityH1); - $("div#results-content") - .prepend( - $("") - .addClass("search-text") - .append(document.createTextNode(" Showing results for ")) - .append($("").addClass("query-str").text(searchStr)) - ); + $("div#results-content").prepend( + $("") + .addClass("search-text") + .append(document.createTextNode(" Showing results for ")) + .append($("").addClass("query-str").text(searchStr)) + ); var regExp = compilePattern(searchStr); From 3347caa572d39efe474fed416f056f465f1e4123 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Apr 2019 14:29:25 +1000 Subject: [PATCH 1711/2793] Remove unused, duplicated copy of findMacroClassLoader --- .../scala/tools/nsc/plugins/Plugins.scala | 34 ------------------- .../nsc/GlobalCustomizeClassloaderTest.scala | 1 - 2 files changed, 35 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index d30cf712f8ac..8d47bfa329dc 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -167,38 +167,4 @@ trait Plugins { global: Global => (for (plug <- roughPluginsList ; help <- plug.optionsHelp) yield { "\nOptions for plugin '%s':\n%s\n".format(plug.name, help) }).mkString - - /** Obtains a `ClassLoader` instance used for macro expansion. - * - * By default a new `ScalaClassLoader` is created using the classpath - * from global and the classloader of self as parent. - * - * Mirrors with runtime definitions (e.g. Repl) need to adjust this method. - */ - protected def findMacroClassLoader(): ClassLoader = { - val classpath: Seq[URL] = if (settings.YmacroClasspath.isSetByUser) { - for { - file <- scala.tools.nsc.util.ClassPath.expandPath(settings.YmacroClasspath.value, true) - af <- Option(nsc.io.AbstractFile getDirectory file) - } yield af.file.toURI.toURL - } else global.classPath.asURLs - def newLoader: () => ScalaClassLoader.URLClassLoader = () => { - analyzer.macroLogVerbose("macro classloader: initializing from -cp: %s".format(classpath)) - ScalaClassLoader.fromURLs(classpath, getClass.getClassLoader) - } - - val policy = settings.YcacheMacroClassLoader.value - val cache = Macros.macroClassLoadersCache - val disableCache = policy == settings.CachePolicy.None.name - val checkStamps = policy == settings.CachePolicy.LastModified.name - cache.checkCacheability(classpath, checkStamps, disableCache) match { - case Left(msg) => - analyzer.macroLogVerbose(s"macro classloader: $msg.") - val loader = newLoader() - closeableRegistry.registerClosable(loader) - loader - case Right(paths) => - cache.getOrCreate(paths, newLoader, closeableRegistry, checkStamps) - } - } } diff --git a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala index 500379706090..9f93c6acaa76 100644 --- a/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala +++ b/test/junit/scala/tools/nsc/GlobalCustomizeClassloaderTest.scala @@ -17,7 +17,6 @@ class GlobalCustomizeClassloaderTest { // that properly closes them before one of the elements needs to be overwritten. @Test def test(): Unit = { val g = new Global(new Settings) { - override protected[scala] def findMacroClassLoader(): ClassLoader = getClass.getClassLoader override protected def findPluginClassLoader(classpath: Seq[Path]): ClassLoader = { val d = new VirtualDirectory("", None) val xml = d.fileNamed("scalac-plugin.xml") From e4406b94c16a0799741607235c42f6f06965e538 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Apr 2019 14:33:55 +1000 Subject: [PATCH 1712/2793] Improve timer-based eviction of classloader caches Cancel in-progress timer task on a cache hit. This avoids reducing the effective deferred close delay when the old timer task fires and sees a ref count of zero, even though the ref count has since been positive. --- .../ZipAndJarFileLookupFactory.scala | 60 ++++++++++++------- 1 file changed, 37 insertions(+), 23 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala index 2321f0ff80f0..c8c759f07cd4 100644 --- a/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala +++ b/src/compiler/scala/tools/nsc/classpath/ZipAndJarFileLookupFactory.scala @@ -197,35 +197,49 @@ final class FileBasedCache[T] { private case class Stamp(lastModified: FileTime, size: Long, fileKey: Object) private case class Entry(stamps: Seq[Stamp], t: T) { val referenceCount: AtomicInteger = new AtomicInteger(1) + var timerTask: TimerTask = null + def cancelTimer(): Unit = { + timerTask match { + case null => + case t => t.cancel() + } + } } private val cache = collection.mutable.Map.empty[Seq[Path], Entry] - private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = new Closeable { - var closed = false - override def close(): Unit = { - if (!closed) { - closed = true - val count = e.referenceCount.decrementAndGet() - if (count == 0) { - e.t match { - case cl: Closeable => - FileBasedCache.timer match { - case Some(timer) => - val task = new TimerTask { - override def run(): Unit = { - cache.synchronized { - if (e.referenceCount.compareAndSet(0, -1)) { - cache.remove(paths) - cl.close() + private def referenceCountDecrementer(e: Entry, paths: Seq[Path]): Closeable = { + // Cancel the deferred close timer (if any) that was started when the reference count + // last dropped to zero. + e.cancelTimer() + + new Closeable { + var closed = false + override def close(): Unit = { + if (!closed) { + closed = true + val count = e.referenceCount.decrementAndGet() + if (count == 0) { + e.t match { + case cl: Closeable => + FileBasedCache.timer match { + case Some(timer) => + val task = new TimerTask { + override def run(): Unit = { + cache.synchronized { + if (e.referenceCount.compareAndSet(0, -1)) { + cache.remove(paths) + cl.close() + } } } } - } - timer.schedule(task, FileBasedCache.deferCloseMs.toLong) - case None => - cl.close() - } - case _ => + e.timerTask = task + timer.schedule(task, FileBasedCache.deferCloseMs.toLong) + case None => + cl.close() + } + case _ => + } } } } From aaedc0f14f2e49b557224c20d469f5206f9022a1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 24 Apr 2019 10:17:48 +1000 Subject: [PATCH 1713/2793] Avoid deep ASTs in StringContext rewrite. Add parens to the string concatenation reduce AST depth to max(32, N/32), rather than N (where N is the number of parts in the string concatenation) --- .../nsc/backend/jvm/BCodeBodyBuilder.scala | 25 ++++++++----- .../tools/nsc/typechecker/RefChecks.scala | 35 ++++++++++++++----- test/files/run/t10870.check | 1 + test/files/{pos => run}/t10870.scala | 12 +++++-- 4 files changed, 52 insertions(+), 21 deletions(-) create mode 100644 test/files/run/t10870.check rename test/files/{pos => run}/t10870.scala (94%) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index e744b5ce19f1..4c92b7bc7327 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -14,6 +14,7 @@ package scala.tools.nsc package backend.jvm import scala.annotation.switch +import scala.collection.mutable.ListBuffer import scala.reflect.internal.Flags import scala.tools.asm import scala.tools.asm.Opcodes @@ -1122,15 +1123,21 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * Returns a list of trees that each should be concatenated, from left to right. * It turns a chained call like "a".+("b").+("c") into a list of arguments. */ - def liftStringConcat(tree: Tree): List[Tree] = tree match { - case Apply(fun @ Select(larg, method), rarg) => - if (isPrimitive(fun.symbol) && - scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT) - liftStringConcat(larg) ::: rarg - else - tree :: Nil - case _ => - tree :: Nil + def liftStringConcat(tree: Tree): List[Tree] = { + val result = ListBuffer[Tree]() + def loop(tree: Tree): Unit = { + tree match { + case Apply(fun@Select(larg, method), rarg :: Nil) + if (isPrimitive(fun.symbol) && scalaPrimitives.getPrimitive(fun.symbol) == scalaPrimitives.CONCAT) => + + loop(larg) + loop(rarg) + case _ => + result += tree + } + } + loop(tree) + result.toList } /* Emit code to compare the two top-most stack values using the 'op' operator. */ diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 09d1115e9dc1..84015cb02311 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1563,21 +1563,39 @@ abstract class RefChecks extends Transform { transform(qual) case StringContextIntrinsic(treated, args) => - var result: Tree = treated.head - def concat(t: Tree): Unit = { - result = atPos(t.pos)(gen.mkMethodCall(gen.mkAttributedSelect(result, definitions.String_+), t :: Nil)).setType(StringTpe) - } + val argsIndexed = args.toVector + var concatArgs = ListBuffer[Tree]() val numLits = treated.length foreachWithIndex(treated.tail) { (lit, i) => val treatedContents = lit.asInstanceOf[Literal].value.stringValue val emptyLit = treatedContents.isEmpty if (i < numLits - 1) { - concat(args(i)) - if (!emptyLit) concat(lit) + concatArgs += argsIndexed(i) + if (!emptyLit) concatArgs += lit } else if (!emptyLit) { - concat(lit) + concatArgs += lit } } + def mkConcat(pos: Position, lhs: Tree, rhs: Tree): Tree = + atPos(pos)(gen.mkMethodCall(gen.mkAttributedSelect(lhs, definitions.String_+), rhs :: Nil)).setType(StringTpe) + + var result: Tree = treated.head + val chunkSize = 32 + if (concatArgs.lengthCompare(chunkSize) <= 0) { + concatArgs.foreach { t => + result = mkConcat(t.pos, result, t) + } + } else { + concatArgs.toList.grouped(chunkSize).foreach { + case group => + var chunkResult: Tree = group.head + group.tail.foreach { t => + chunkResult = mkConcat(t.pos, chunkResult, t) + } + result = mkConcat(chunkResult.pos, result, chunkResult) + } + } + result match { case ap: Apply => transformApply(ap) case _ => result @@ -1604,8 +1622,7 @@ abstract class RefChecks extends Transform { if qual1.symbol == rd.StringContext_apply && treeInfo.isQualifierSafeToElide(qual) && lits.forall(lit => treeInfo.isLiteralString(lit)) && - lits.length == (args.length + 1) && - args.lengthCompare(64) <= 0 => // TODO make more robust to large input so that we can drop this condition, chunk the concatenations in manageable batches + lits.length == (args.length + 1) => val isRaw = sym == rd.StringContext_raw if (isRaw) Some((lits, args)) else { diff --git a/test/files/run/t10870.check b/test/files/run/t10870.check new file mode 100644 index 000000000000..0628b7f34085 --- /dev/null +++ b/test/files/run/t10870.check @@ -0,0 +1 @@ +a1 a2 a3 a4 a5 a6 a7 a8 a9 a10 a11 a12 a13 a14 a15 a16 a17 a18 a19 a20 a21 a22 a23 a24 a25 a26 a27 a28 a29 a30 a31 a32 a33 a34 a35 a36 a37 a38 a39 a40 a41 a42 a43 a44 a45 a46 a47 a48 a49 a50 a51 a52 a53 a54 a55 a56 a57 a58 a59 a60 a61 a62 a63 a64 a65 a66 a67 a68 a69 a70 a71 a72 a73 a74 a75 a76 a77 a78 a79 a80 a81 a82 a83 a84 a85 a86 a87 a88 a89 a90 a91 a92 a93 a94 a95 a96 a97 a98 a99 a100 a101 a102 a103 a104 a105 a106 a107 a108 a109 a110 a111 a112 a113 a114 a115 a116 a117 a118 a119 a120 a121 a122 a123 a124 a125 a126 a127 a128 a129 a130 a131 a132 a133 a134 a135 a136 a137 a138 a139 a140 a141 a142 a143 a144 a145 a146 a147 a148 a149 a150 a151 a152 a153 a154 a155 a156 a157 a158 a159 a160 a161 a162 a163 a164 a165 a166 a167 a168 a169 a170 a171 a172 a173 a174 a175 a176 a177 a178 a179 a180 a181 a182 a183 a184 a185 a186 a187 a188 a189 a190 a191 a192 a193 a194 a195 a196 a197 a198 a199 a200 a201 a202 a203 a204 a205 a206 a207 a208 a209 a210 a211 a212 a213 a214 a215 a216 a217 a218 a219 a220 a221 a222 a223 a224 a225 a226 a227 a228 a229 a230 a231 a232 a233 a234 a235 a236 a237 a238 a239 a240 a241 a242 a243 a244 a245 a246 a247 a248 a249 a250 a251 a252 a253 a254 a255 a256 diff --git a/test/files/pos/t10870.scala b/test/files/run/t10870.scala similarity index 94% rename from test/files/pos/t10870.scala rename to test/files/run/t10870.scala index 9836821f1288..d1e9c800691a 100644 --- a/test/files/pos/t10870.scala +++ b/test/files/run/t10870.scala @@ -1,6 +1,12 @@ -package example - object Test { - val a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31, a32, a33, a34, a35, a36, a37, a38, a39, a40, a41, a42, a43, a44, a45, a46, a47, a48, a49, a50, a51, a52, a53, a54, a55, a56, a57, a58, a59, a60, a61, a62, a63, a64, a65, a66, a67, a68, a69, a70, a71, a72, a73, a74, a75, a76, a77, a78, a79, a80, a81, a82, a83, a84, a85, a86, a87, a88, a89, a90, a91, a92, a93, a94, a95, a96, a97, a98, a99, a100, a101, a102, a103, a104, a105, a106, a107, a108, a109, a110, a111, a112, a113, a114, a115, a116, a117, a118, a119, a120, a121, a122, a123, a124, a125, a126, a127, a128, a129, a130, a131, a132, a133, a134, a135, a136, a137, a138, a139, a140, a141, a142, a143, a144, a145, a146, a147, a148, a149, a150, a151, a152, a153, a154, a155, a156, a157, a158, a159, a160, a161, a162, a163, a164, a165, a166, a167, a168, a169, a170, a171, a172, a173, a174, a175, a176, a177, a178, a179, a180, a181, a182, a183, a184, a185, a186, a187, a188, a189, a190, a191, a192, a193, a194, a195, a196, a197, a198, a199, a200, a201, a202, a203, a204, a205, a206, a207, a208, a209, a210, a211, a212, a213, a214, a215, a216, a217, a218, a219, a220, a221, a222, a223, a224, a225, a226, a227, a228, a229, a230, a231, a232, a233, a234, a235, a236, a237, a238, a239, a240, a241, a242, a243, a244, a245, a246, a247, a248, a249, a250, a251, a252, a253, a254, a255, a256 = " " + var i = 0 + def next(): Int = { + i += 1 + i + } + val a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15, a16, a17, a18, a19, a20, a21, a22, a23, a24, a25, a26, a27, a28, a29, a30, a31, a32, a33, a34, a35, a36, a37, a38, a39, a40, a41, a42, a43, a44, a45, a46, a47, a48, a49, a50, a51, a52, a53, a54, a55, a56, a57, a58, a59, a60, a61, a62, a63, a64, a65, a66, a67, a68, a69, a70, a71, a72, a73, a74, a75, a76, a77, a78, a79, a80, a81, a82, a83, a84, a85, a86, a87, a88, a89, a90, a91, a92, a93, a94, a95, a96, a97, a98, a99, a100, a101, a102, a103, a104, a105, a106, a107, a108, a109, a110, a111, a112, a113, a114, a115, a116, a117, a118, a119, a120, a121, a122, a123, a124, a125, a126, a127, a128, a129, a130, a131, a132, a133, a134, a135, a136, a137, a138, a139, a140, a141, a142, a143, a144, a145, a146, a147, a148, a149, a150, a151, a152, a153, a154, a155, a156, a157, a158, a159, a160, a161, a162, a163, a164, a165, a166, a167, a168, a169, a170, a171, a172, a173, a174, a175, a176, a177, a178, a179, a180, a181, a182, a183, a184, a185, a186, a187, a188, a189, a190, a191, a192, a193, a194, a195, a196, a197, a198, a199, a200, a201, a202, a203, a204, a205, a206, a207, a208, a209, a210, a211, a212, a213, a214, a215, a216, a217, a218, a219, a220, a221, a222, a223, a224, a225, a226, a227, a228, a229, a230, a231, a232, a233, a234, a235, a236, a237, a238, a239, a240, a241, a242, a243, a244, a245, a246, a247, a248, a249, a250, a251, a252, a253, a254, a255, a256 = "a" + next() val foo = s"""$a1 $a2 $a3 $a4 $a5 $a6 $a7 $a8 $a9 $a10 $a11 $a12 $a13 $a14 $a15 $a16 $a17 $a18 $a19 $a20 $a21 $a22 $a23 $a24 $a25 $a26 $a27 $a28 $a29 $a30 $a31 $a32 $a33 $a34 $a35 $a36 $a37 $a38 $a39 $a40 $a41 $a42 $a43 $a44 $a45 $a46 $a47 $a48 $a49 $a50 $a51 $a52 $a53 $a54 $a55 $a56 $a57 $a58 $a59 $a60 $a61 $a62 $a63 $a64 $a65 $a66 $a67 $a68 $a69 $a70 $a71 $a72 $a73 $a74 $a75 $a76 $a77 $a78 $a79 $a80 $a81 $a82 $a83 $a84 $a85 $a86 $a87 $a88 $a89 $a90 $a91 $a92 $a93 $a94 $a95 $a96 $a97 $a98 $a99 $a100 $a101 $a102 $a103 $a104 $a105 $a106 $a107 $a108 $a109 $a110 $a111 $a112 $a113 $a114 $a115 $a116 $a117 $a118 $a119 $a120 $a121 $a122 $a123 $a124 $a125 $a126 $a127 $a128 $a129 $a130 $a131 $a132 $a133 $a134 $a135 $a136 $a137 $a138 $a139 $a140 $a141 $a142 $a143 $a144 $a145 $a146 $a147 $a148 $a149 $a150 $a151 $a152 $a153 $a154 $a155 $a156 $a157 $a158 $a159 $a160 $a161 $a162 $a163 $a164 $a165 $a166 $a167 $a168 $a169 $a170 $a171 $a172 $a173 $a174 $a175 $a176 $a177 $a178 $a179 $a180 $a181 $a182 $a183 $a184 $a185 $a186 $a187 $a188 $a189 $a190 $a191 $a192 $a193 $a194 $a195 $a196 $a197 $a198 $a199 $a200 $a201 $a202 $a203 $a204 $a205 $a206 $a207 $a208 $a209 $a210 $a211 $a212 $a213 $a214 $a215 $a216 $a217 $a218 $a219 $a220 $a221 $a222 $a223 $a224 $a225 $a226 $a227 $a228 $a229 $a230 $a231 $a232 $a233 $a234 $a235 $a236 $a237 $a238 $a239 $a240 $a241 $a242 $a243 $a244 $a245 $a246 $a247 $a248 $a249 $a250 $a251 $a252 $a253 $a254 $a255 $a256""" + def main(args: Array[String]): Unit = { + println(foo) + } } From e5dab49ca9efcb242b9878df464aa12c74309a7e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 8 May 2019 16:44:59 +1000 Subject: [PATCH 1714/2793] Rationalize subclasses of Name Due to alignment, TermName_R (which doesn't cache the provided string for toString) takes up just as much space as TermName_S. The code ends up somewhat easier to read with by just encoding the difference with the a nullable field. --- .../scala/reflect/internal/Names.scala | 35 +++++-------------- test/files/run/reflection-names.check | 6 ++-- 2 files changed, 11 insertions(+), 30 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index eb5bf07734ac..51f891dc9124 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -121,9 +121,7 @@ trait Names extends api.Names { enterChars(cs, offset, len) } val next = termHashtable(h) - val termName = - if (cachedString ne null) new TermName_S(startIndex, len, next, cachedString) - else new TermName_R(startIndex, len, next) + val termName = new TermName(startIndex, len, next, cachedString) // Add the new termName to the hashtable only after it's been fully constructed termHashtable(h) = termName termName @@ -187,7 +185,7 @@ trait Names extends api.Names { * or Strings as Names. Give names the key functions the absence of which * make people want Strings all the time. */ - sealed abstract class Name(protected val index: Int, protected val len: Int) extends NameApi with CharSequence { + sealed abstract class Name(protected val index: Int, protected val len: Int, cachedString: String) extends NameApi with CharSequence { type ThisNameType >: Null <: Name protected[this] def thisName: ThisNameType @@ -470,6 +468,9 @@ trait Names extends api.Names { def isOperatorName: Boolean = decode != toString // used by ide def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } + + override final def toString: String = if (cachedString == null) new String(chrs, index, len) else cachedString + } implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name) @@ -515,28 +516,9 @@ trait Names extends api.Names { // final override def isOperatorName = false // } - /** TermName_S and TypeName_S have fields containing the string version of the name. - * TermName_R and TypeName_R recreate it each time toString is called. - */ - private final class TermName_S(index0: Int, len0: Int, next0: TermName, override val toString: String) extends TermName(index0, len0, next0) { - protected def createCompanionName(next: TypeName): TypeName = new TypeName_S(index, len, next, toString) - override def newName(str: String): TermName = newTermNameCached(str) - } - private final class TypeName_S(index0: Int, len0: Int, next0: TypeName, override val toString: String) extends TypeName(index0, len0, next0) { - override def newName(str: String): TypeName = newTypeNameCached(str) - } - - private final class TermName_R(index0: Int, len0: Int, next0: TermName) extends TermName(index0, len0, next0) { - protected def createCompanionName(next: TypeName): TypeName = new TypeName_R(index, len, next) - override def toString = new String(chrs, index, len) - } - - private final class TypeName_R(index0: Int, len0: Int, next0: TypeName) extends TypeName(index0, len0, next0) { - override def toString = new String(chrs, index, len) - } // SYNCNOTE: caller to constructor must synchronize if `synchronizeNames` is enabled - sealed abstract class TermName(index0: Int, len0: Int, val next: TermName) extends Name(index0, len0) with TermNameApi { + final class TermName(index0: Int, len0: Int, val next: TermName, cachedString: String) extends Name(index0, len0, cachedString) with TermNameApi { type ThisNameType = TermName protected[this] def thisName: TermName = this @@ -568,8 +550,7 @@ trait Names extends api.Names { newTermName(chrs, start + from, to - from) def nameKind = "term" - /** SYNCNOTE: caller must synchronize if `synchronizeNames` is enabled */ - protected def createCompanionName(next: TypeName): TypeName + private def createCompanionName(next: TypeName): TypeName = new TypeName(index, len, next, cachedString) } implicit val TermNameTag = ClassTag[TermName](classOf[TermName]) @@ -579,7 +560,7 @@ trait Names extends api.Names { def unapply(name: TermName): Option[String] = Some(name.toString) } - sealed abstract class TypeName(index0: Int, len0: Int, val next: TypeName) extends Name(index0, len0) with TypeNameApi { + final class TypeName(index0: Int, len0: Int, val next: TypeName, cachedString: String) extends Name(index0, len0, cachedString) with TypeNameApi { type ThisNameType = TypeName protected[this] def thisName: TypeName = this diff --git a/test/files/run/reflection-names.check b/test/files/run/reflection-names.check index f8cb78cc67b4..52748e20c5db 100644 --- a/test/files/run/reflection-names.check +++ b/test/files/run/reflection-names.check @@ -1,4 +1,4 @@ (java.lang.String,bc) -(scala.reflect.internal.Names$TermName_R,bc) -(scala.reflect.internal.Names$TypeName_R,bc) -(scala.reflect.internal.Names$TypeName_R,bc) +(scala.reflect.internal.Names$TermName,bc) +(scala.reflect.internal.Names$TypeName,bc) +(scala.reflect.internal.Names$TypeName,bc) From 3b57788ba394631dd023d4a3493b75177e4d6914 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 9 May 2019 08:35:58 +1000 Subject: [PATCH 1715/2793] Avoid direct use of Names.chrs from Symbols --- src/reflect/scala/reflect/internal/Names.scala | 6 ++++-- src/reflect/scala/reflect/internal/Symbols.scala | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 51f891dc9124..6fe21ad426ab 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -38,7 +38,7 @@ trait Names extends api.Names { private val nameLock: Object = new Object /** Memory to store all names sequentially. */ - var chrs: Array[Char] = new Array[Char](NAME_SIZE) + var chrs: Array[Char] = new Array[Char](NAME_SIZE) // TODO this ought to be private private var nc = 0 /** Hashtable for finding term names quickly. */ @@ -470,7 +470,9 @@ trait Names extends api.Names { def debugString = { val s = decode ; if (isTypeName) s + "!" else s } override final def toString: String = if (cachedString == null) new String(chrs, index, len) else cachedString - + final def appendTo(buffer: java.lang.StringBuffer, start: Int, length: Int): Unit = { + buffer.append(chrs, this.start + start, length) + } } implicit def AnyNameOps(name: Name): NameOps[Name] = new NameOps(name) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 3341cee8aa2f..7982e71000c9 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -1310,11 +1310,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (sym.isRoot || sym.isRootPackage || sym == NoSymbol || sym.owner.isEffectiveRoot) { val capacity = size + nSize b = new java.lang.StringBuffer(capacity) - b.append(chrs, symName.start, nSize) + symName.appendTo(b, 0, nSize) } else { loop(size + nSize + 1, sym.effectiveOwner.enclClass) b.append(separator) - b.append(chrs, symName.start, nSize) + symName.appendTo(b, 0, nSize) } } loop(suffix.length(), this) From 22f67798ef116e848a888c06ddeab8f3746460e0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 9 May 2019 08:41:17 +1000 Subject: [PATCH 1716/2793] Deprecate external access to Names.chrs --- .../scala/reflect/internal/Names.scala | 53 ++++++++++--------- 1 file changed, 29 insertions(+), 24 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 6fe21ad426ab..7e19e72e9ea7 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -37,8 +37,13 @@ trait Names extends api.Names { protected def synchronizeNames: Boolean = false private val nameLock: Object = new Object + /** Memory to store all names sequentially. */ - var chrs: Array[Char] = new Array[Char](NAME_SIZE) // TODO this ought to be private + private[this] var _chrs: Array[Char] = new Array[Char](NAME_SIZE) // TODO this ought to be private + @deprecated("Don't access name table contents directly.", "2.12.9") + def chrs: Array[Char] = _chrs + @deprecated("Don't access name table contents directly.", "2.12.9") + def chrs_=(cs: Array[Char]) = _chrs = cs private var nc = 0 /** Hashtable for finding term names quickly. */ @@ -62,7 +67,7 @@ trait Names extends api.Names { */ private def equals(index: Int, cs: Array[Char], offset: Int, len: Int): Boolean = { var i = 0 - while ((i < len) && (chrs(index + i) == cs(offset + i))) + while ((i < len) && (_chrs(index + i) == cs(offset + i))) i += 1 i == len } @@ -71,12 +76,12 @@ trait Names extends api.Names { private def enterChars(cs: Array[Char], offset: Int, len: Int) { var i = 0 while (i < len) { - if (nc + i == chrs.length) { - val newchrs = new Array[Char](chrs.length * 2) - java.lang.System.arraycopy(chrs, 0, newchrs, 0, chrs.length) - chrs = newchrs + if (nc + i == _chrs.length) { + val newchrs = new Array[Char](_chrs.length * 2) + java.lang.System.arraycopy(_chrs, 0, newchrs, 0, chrs.length) + _chrs = newchrs } - chrs(nc + i) = cs(offset + i) + _chrs(nc + i) = cs(offset + i) i += 1 } if (len == 0) nc += 1 @@ -113,7 +118,7 @@ trait Names extends api.Names { // that name.toString will become an eager val, in which case the call // to enterChars cannot follow the construction of the TermName. var startIndex = 0 - if (cs == chrs) { + if (cs == _chrs) { // Optimize for subName, the new name is already stored in chrs startIndex = offset } else { @@ -225,7 +230,7 @@ trait Names extends api.Names { /** Copy bytes of this name to buffer cs, starting at position `offset`. */ final def copyChars(cs: Array[Char], offset: Int) = - java.lang.System.arraycopy(chrs, index, cs, offset, len) + java.lang.System.arraycopy(_chrs, index, cs, offset, len) /** @return the ascii representation of this name */ final def toChars: Array[Char] = { // used by ide @@ -271,7 +276,7 @@ trait Names extends api.Names { ****/ /** @return the i'th Char of this name */ - final def charAt(i: Int): Char = chrs(index + i) + final def charAt(i: Int): Char = _chrs(index + i) /** @return the index of first occurrence of char c in this name, length if not found */ final def pos(c: Char): Int = pos(c, 0) @@ -288,7 +293,7 @@ trait Names extends api.Names { */ final def pos(c: Char, start: Int): Int = { var i = start - while (i < len && chrs(index + i) != c) i += 1 + while (i < len && _chrs(index + i) != c) i += 1 i } @@ -305,7 +310,7 @@ trait Names extends api.Names { if (sLen == 1) return i while (i + sLen <= len) { var j = 1 - while (s.charAt(j) == chrs(index + i + j)) { + while (s.charAt(j) == _chrs(index + i + j)) { j += 1 if (j == sLen) return i } @@ -331,7 +336,7 @@ trait Names extends api.Names { */ final def lastPos(c: Char, start: Int): Int = { var i = start - while (i >= 0 && chrs(index + i) != c) i -= 1 + while (i >= 0 && _chrs(index + i) != c) i -= 1 i } @@ -342,14 +347,14 @@ trait Names extends api.Names { final def startsWith(prefix: Name, start: Int): Boolean = { var i = 0 while (i < prefix.length && start + i < len && - chrs(index + start + i) == chrs(prefix.start + i)) + _chrs(index + start + i) == _chrs(prefix.start + i)) i += 1 i == prefix.length } final def startsWith(prefix: String, start: Int): Boolean = { var i = 0 while (i < prefix.length && start + i < len && - chrs(index + start + i) == prefix.charAt(i)) + _chrs(index + start + i) == prefix.charAt(i)) i += 1 i == prefix.length } @@ -361,14 +366,14 @@ trait Names extends api.Names { final def endsWith(suffix: Name, end: Int): Boolean = { var i = 1 while (i <= suffix.length && i <= end && - chrs(index + end - i) == chrs(suffix.start + suffix.length - i)) + _chrs(index + end - i) == _chrs(suffix.start + suffix.length - i)) i += 1 i > suffix.length } final def endsWith(suffix: String, end: Int): Boolean = { var i = 1 while (i <= suffix.length && i <= end && - chrs(index + end - i) == suffix.charAt(suffix.length - i)) + _chrs(index + end - i) == suffix.charAt(suffix.length - i)) i += 1 i > suffix.length } @@ -384,7 +389,7 @@ trait Names extends api.Names { var i = index val max = index + len while (i < max) { - if (chrs(i) == ch) + if (_chrs(i) == ch) return true i += 1 } @@ -469,9 +474,9 @@ trait Names extends api.Names { def longString: String = nameKind + " " + decode def debugString = { val s = decode ; if (isTypeName) s + "!" else s } - override final def toString: String = if (cachedString == null) new String(chrs, index, len) else cachedString + override final def toString: String = if (cachedString == null) new String(_chrs, index, len) else cachedString final def appendTo(buffer: java.lang.StringBuffer, start: Int, length: Int): Unit = { - buffer.append(chrs, this.start + start, length) + buffer.append(_chrs, this.start + start, length) } } @@ -530,7 +535,7 @@ trait Names extends api.Names { def toTypeName: TypeName = { def body = { // Re-computing the hash saves a field for storing it in the TermName - val h = hashValue(chrs, index, len) & HASH_MASK + val h = hashValue(_chrs, index, len) & HASH_MASK var n = typeHashtable(h) while ((n ne null) && n.start != index) n = n.next @@ -549,7 +554,7 @@ trait Names extends api.Names { def newName(str: String): TermName = newTermName(str) def companionName: TypeName = toTypeName def subName(from: Int, to: Int): TermName = - newTermName(chrs, start + from, to - from) + newTermName(_chrs, start + from, to - from) def nameKind = "term" private def createCompanionName(next: TypeName): TypeName = new TypeName(index, len, next, cachedString) @@ -571,7 +576,7 @@ trait Names extends api.Names { def toTermName: TermName = { def body = { // Re-computing the hash saves a field for storing it in the TypeName - val h = hashValue(chrs, index, len) & HASH_MASK + val h = hashValue(_chrs, index, len) & HASH_MASK var n = termHashtable(h) while ((n ne null) && n.start != index) n = n.next @@ -585,7 +590,7 @@ trait Names extends api.Names { def newName(str: String): TypeName = newTypeName(str) def companionName: TermName = toTermName def subName(from: Int, to: Int): TypeName = - newTypeName(chrs, start + from, to - from) + newTypeName(_chrs, start + from, to - from) def nameKind = "type" override def decode = if (nameDebug) super.decode + "!" else super.decode From f3901f0b9ec353fdd542cf6812bb7b9e63198ad5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 12 May 2019 09:36:32 +1000 Subject: [PATCH 1717/2793] Fix regression in import name comparison ``` scala> :power Power mode enabled. :phase is at typer. import scala.tools.nsc._, intp.global._, definitions._ Try :help or completions for vals._ and power._ scala> val t = TermName("abcdefghijklmnopqrstuvwxyz") t: $r.intp.global.TermName = abcdefghijklmnopqrstuvwxyz scala> t.subName(0, 25) res0: $r.intp.global.TermName = abcdefghijklmnopqrstuvwxy scala> res0.start res1: Int = 474232 scala> t.start res2: Int = 474232 ``` --- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index c2a49d19c1b1..c23c57f1024a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1533,7 +1533,7 @@ trait Contexts { self: Analyzer => @inline def current = selectors.head while ((selectors ne Nil) && result == NoSymbol) { def sameName(name: Name, other: Name) = { - (name eq other) || (name ne null) && name.start == other.start + (name eq other) || (name ne null) && name.start == other.start && name.length == other.length } if (sameName(current.rename, name)) result = qual.tpe.nonLocalMember( // new to address #2733: consider only non-local members for imports From b4475d0a0822ee98c5ef6ea254d944455c3ac013 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 14 May 2019 20:21:18 -0400 Subject: [PATCH 1718/2793] Deprecate overriding deprecated APIs intended for overriding I assume the hope was to discourage plugin writers from implementing those methods, not to admonish ourselves for continuing to support them. (Although perhaps we deserve it.) --- src/compiler/scala/tools/nsc/plugins/Plugin.scala | 2 +- .../scala/reflect/internal/AnnotationCheckers.scala | 11 ++++++----- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index 9c0f2db89446..a38cbf8a504e 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -72,7 +72,7 @@ abstract class Plugin { true } - @deprecated("use Plugin#init instead", since="2.11.0") + @deprecatedOverriding("use Plugin#init instead", since="2.11.0") def processOptions(options: List[String], error: String => Unit): Unit = { if (!options.isEmpty) error(s"Error: $name takes no options") } diff --git a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala index 3076b3be02ed..1284f7f331e4 100644 --- a/src/reflect/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/reflect/scala/reflect/internal/AnnotationCheckers.scala @@ -52,14 +52,14 @@ trait AnnotationCheckers { * Modify the type that has thus far been inferred for a tree. All this should * do is add annotations. */ - @deprecated("create an AnalyzerPlugin and use pluginsTyped", "2.10.1") + @deprecatedOverriding("create an AnalyzerPlugin and use pluginsTyped", "2.10.1") def addAnnotations(tree: Tree, tpe: Type): Type = tpe /** * Decide whether this analyzer plugin can adapt a tree that has an annotated type to the * given type tp, taking into account the given mode (see method adapt in trait Typers). */ - @deprecated("create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1") + @deprecatedOverriding("create an AnalyzerPlugin and use canAdaptAnnotations", "2.10.1") def canAdaptAnnotations(tree: Tree, mode: Mode, pt: Type): Boolean = false /** @@ -69,7 +69,7 @@ trait AnnotationCheckers { * An implementation cannot rely on canAdaptAnnotations being called before. If the implementing * class cannot do the adapting, it should return the tree unchanged. */ - @deprecated("create an AnalyzerPlugin and use adaptAnnotations", "2.10.1") + @deprecatedOverriding("create an AnalyzerPlugin and use adaptAnnotations", "2.10.1") def adaptAnnotations(tree: Tree, mode: Mode, pt: Type): Tree = tree /** @@ -79,8 +79,9 @@ trait AnnotationCheckers { * * By default, this method simply returns the passed `default` type. */ - @deprecated("Create an AnalyzerPlugin and use pluginsTypedReturn. Note: the 'tree' argument here is\n"+ - "the 'expr' of a Return tree; 'pluginsTypedReturn' takes the Return tree itself as argument", "2.10.1") + @deprecatedOverriding( + "Create an AnalyzerPlugin and use pluginsTypedReturn. Note: the 'tree' argument here is\n"+ + "the 'expr' of a Return tree; 'pluginsTypedReturn' takes the Return tree itself as argument", "2.10.1") def adaptTypeOfReturn(tree: Tree, pt: Type, default: => Type): Type = default } From 0b28d2fb9e84e21ea3744a3f28a258661859de07 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 17 May 2019 12:57:50 +1000 Subject: [PATCH 1719/2793] Fix scalap parsing/printing of enum and class constant types --- .../scalap/scalax/rules/scalasig/ScalaSig.scala | 4 +++- .../scalax/rules/scalasig/ScalaSigPrinter.scala | 2 ++ test/files/scalap/constants.check | 16 ++++++++++++++++ test/files/scalap/constants.scala | 17 +++++++++++++++++ 4 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 test/files/scalap/constants.check create mode 100644 test/files/scalap/constants.scala diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala index b268bd99c9de..b8ef18306815 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSig.scala @@ -246,7 +246,9 @@ object ScalaSigEntryParsers extends RulesWithState with MemoisableRules { 32 -~ longValue ^^ (java.lang.Double.longBitsToDouble), 33 -~ nameRef, 34 -^ null, - 35 -~ typeRef) + 35 -~ typeRef, + 36 -~ symbolRef + ) lazy val attributeInfo = 40 -~ symbolRef ~ typeRef ~ (constantRef?) ~ (nameRef ~ constantRef *) ^~~~^ AttributeInfo // sym_Ref info_Ref {constant_Ref} {nameRef constantRef} lazy val children = 41 -~ (nat*) ^^ Children //sym_Ref {sym_Ref} diff --git a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala index 29b38c6c1baa..2b18f9a6c392 100644 --- a/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala +++ b/src/scalap/scala/tools/scalap/scalax/rules/scalasig/ScalaSigPrinter.scala @@ -342,6 +342,8 @@ class ScalaSigPrinter(stream: PrintStream, printPrivates: Boolean) { case _: Double => "scala.Double" case _: String => "java.lang.String" case c: Class[_] => "java.lang.Class[" + c.getComponentType.getCanonicalName.replace("$", ".") + "]" + case e: ExternalSymbol => e.parent.get.path + case tp: Type => "java.lang.Class[" + toString(tp, sep) + "]" }) case TypeRefType(prefix, symbol, typeArgs) => sep + (symbol.path match { case "scala." => flags match { diff --git a/test/files/scalap/constants.check b/test/files/scalap/constants.check new file mode 100644 index 000000000000..705cb8c44ade --- /dev/null +++ b/test/files/scalap/constants.check @@ -0,0 +1,16 @@ +class Constants extends scala.AnyRef { + def this() = { /* compiled code */ } + final val UnitConstant: scala.Unit = { /* compiled code */ } + final val ByteConstant: scala.Boolean = { /* compiled code */ } + final val CharConstant: scala.Char = { /* compiled code */ } + final val ShortConstant: scala.Short = { /* compiled code */ } + final val IntConstant: scala.Int = { /* compiled code */ } + final val LongConstant: scala.Long = { /* compiled code */ } + final val FloatConstant: scala.Float = { /* compiled code */ } + final val DoubleConstant: scala.Double = { /* compiled code */ } + final val NullConstant: scala.Null = { /* compiled code */ } + final val ClassConstant: java.lang.Class[scala.Predef.String] = { /* compiled code */ } + final val ClassConstant2: java.lang.Class[scala.Some[_]] = { /* compiled code */ } + final val EnumConstant: java.util.concurrent.TimeUnit = { /* compiled code */ } + final val StringConstant: java.lang.Class[scala.Predef.String] = { /* compiled code */ } +} diff --git a/test/files/scalap/constants.scala b/test/files/scalap/constants.scala new file mode 100644 index 000000000000..0a01a9f37809 --- /dev/null +++ b/test/files/scalap/constants.scala @@ -0,0 +1,17 @@ +class Constants { + final val UnitConstant = () + final val ByteConstant = false + final val CharConstant = 'a' + final val ShortConstant = 1.toShort + final val IntConstant = 1 + final val LongConstant = 1L + final val FloatConstant = 1f + final val DoubleConstant = 1d + + final val NullConstant = null + + final val ClassConstant = classOf[String] + final val ClassConstant2 = classOf[Some[_]] + final val EnumConstant = java.util.concurrent.TimeUnit.DAYS + final val StringConstant = classOf[String] +} From 78066506f746e7b69f0e76608b84ef60aed2cfe4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Oct 2018 11:02:53 +1000 Subject: [PATCH 1720/2793] Avoid typechecking val and def tpt-s twice Demo: ``` $ cat sandbox/test.scala && (scalac-ref 2.12.x -Ytyper-debug sandbox/test.scala 2>&1) > /tmp/old.log && (qscalac -Ytyper-debug sandbox/test.scala 2>&1) > /tmp/new.log && diff -U1000 /tmp/{old,new}.log ``` ```scala trait C { type X def foo: X } ``` ```diff --- /tmp/old.log 2019-05-20 13:56:45.000000000 +1000 +++ /tmp/new.log 2019-05-20 13:56:47.000000000 +1000 @@ -1,12 +1,10 @@ |-- EXPRmode-POLYmode-QUALmode (site: package ) +|-- EXPRmode-POLYmode-QUALmode (site: package ) | \-> .type |-- class C BYVALmode-EXPRmode (site: package ) | |-- X BYVALmode-EXPRmode (site: trait C) | | \-> [type X] C.this.X | |-- def foo BYVALmode-EXPRmode (site: trait C) | | |-- X TYPEmode (site: method foo in C) | | | \-> C.this.X -| | |-- X TYPEmode (site: method foo in C) -| | | \-> C.this.X | | \-> [def foo] => C.this.X | \-> [trait C] C ``` --- .../scala/tools/nsc/typechecker/Namers.scala | 12 ++++++-- .../scala/tools/nsc/typechecker/Typers.scala | 4 +-- test/files/neg/t2918.check | 5 +--- test/files/neg/t5093.check | 5 +--- test/files/run/analyzerPlugins.check | 28 +++++++++---------- .../scala/reflect/internal/PrintersTest.scala | 2 +- 6 files changed, 29 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 74db109014c0..53bf0b655671 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -1311,7 +1311,11 @@ trait Namers extends MethodSynthesis { val resTpGiven = if (tpt.isEmpty) WildcardType - else typer.typedType(tpt).tpe + else { + val tptTyped = typer.typedType(tpt) + context.unit.transformed(tpt) = tptTyped + tptTyped.tpe + } // ignore missing types unless we can look to overridden method to recover the missing information @@ -1723,7 +1727,11 @@ trait Namers extends MethodSynthesis { tptFromRhsUnderPt } - } else typer.typedType(tpt).tpe + } else { + val tptTyped = typer.typedType(tpt) + context.unit.transformed(tpt) = tptTyped + tptTyped.tpe + } // println(s"val: $result / ${vdef.tpt.tpe} / ") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 45d118b5fab2..0a3002d04c80 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2087,7 +2087,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } else typedModifiers(vdef.mods) sym.annotations.map(_.completeInfo()) - val tpt1 = checkNoEscaping.privates(this, sym, typedType(vdef.tpt)) + val tpt1 = checkNoEscaping.privates(this, sym, transformedOr(vdef.tpt, typedType(vdef.tpt))) checkNonCyclic(vdef, tpt1) // allow trait accessors: it's the only vehicle we have to hang on to annotations that must be passed down to @@ -2315,7 +2315,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper if (isRepeatedParamType(vparam1.symbol.tpe)) StarParamNotLastError(vparam1) - val tpt1 = checkNoEscaping.privates(this, meth, typedType(ddef.tpt)) + val tpt1 = checkNoEscaping.privates(this, meth, transformedOr(ddef.tpt, typedType(ddef.tpt))) checkNonCyclic(ddef, tpt1) ddef.tpt.setType(tpt1.tpe) val typedMods = typedModifiers(ddef.mods) diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check index aae3045e8af7..f45494d78131 100644 --- a/test/files/neg/t2918.check +++ b/test/files/neg/t2918.check @@ -4,7 +4,4 @@ t2918.scala:2: error: illegal cyclic reference involving type A t2918.scala:2: error: cyclic aliasing or subtyping involving type A def g[X, A[X] <: A[X]](x: A[X]) = x ^ -t2918.scala:2: error: A does not take type parameters - def g[X, A[X] <: A[X]](x: A[X]) = x - ^ -three errors found +two errors found diff --git a/test/files/neg/t5093.check b/test/files/neg/t5093.check index daba46001153..b794f023e56d 100644 --- a/test/files/neg/t5093.check +++ b/test/files/neg/t5093.check @@ -4,7 +4,4 @@ t5093.scala:2: error: illegal cyclic reference involving type C t5093.scala:2: error: cyclic aliasing or subtyping involving type C def f[C[X] <: C[X]](l: C[_]) = l.x ^ -t5093.scala:2: error: C does not take type parameters - def f[C[X] <: C[X]](l: C[_]) = l.x - ^ -three errors found +two errors found diff --git a/test/files/run/analyzerPlugins.check b/test/files/run/analyzerPlugins.check index 64b68db242eb..3cfbda651639 100644 --- a/test/files/run/analyzerPlugins.check +++ b/test/files/run/analyzerPlugins.check @@ -14,18 +14,18 @@ canAdaptAnnotations(Trees$Select, ?) [1] canAdaptAnnotations(Trees$Select, Boolean @testAnn) [1] canAdaptAnnotations(Trees$Select, Boolean) [1] canAdaptAnnotations(Trees$Select, String @testAnn) [1] -canAdaptAnnotations(Trees$TypeTree, ?) [8] +canAdaptAnnotations(Trees$TypeTree, ?) [7] canAdaptAnnotations(Trees$Typed, ?) [3] canAdaptAnnotations(Trees$Typed, Any) [1] canAdaptAnnotations(Trees$Typed, Int) [1] lub(List(Int @testAnn, Int)) [1] -pluginsPt(?, Trees$Annotated) [7] +pluginsPt(?, Trees$Annotated) [6] pluginsPt(?, Trees$Apply) [11] pluginsPt(?, Trees$ApplyImplicitView) [2] pluginsPt(?, Trees$Block) [4] pluginsPt(?, Trees$ClassDef) [2] pluginsPt(?, Trees$DefDef) [14] -pluginsPt(?, Trees$Ident) [51] +pluginsPt(?, Trees$Ident) [43] pluginsPt(?, Trees$If) [2] pluginsPt(?, Trees$Literal) [16] pluginsPt(?, Trees$New) [6] @@ -37,7 +37,7 @@ pluginsPt(?, Trees$This) [13] pluginsPt(?, Trees$TypeApply) [3] pluginsPt(?, Trees$TypeBoundsTree) [2] pluginsPt(?, Trees$TypeDef) [1] -pluginsPt(?, Trees$TypeTree) [32] +pluginsPt(?, Trees$TypeTree) [25] pluginsPt(?, Trees$Typed) [1] pluginsPt(?, Trees$ValDef) [13] pluginsPt(Any, Trees$Literal) [2] @@ -118,20 +118,20 @@ pluginsTyped(=> String @testAnn, Trees$Select) [1] pluginsTyped(A, Trees$Apply) [1] pluginsTyped(A, Trees$Ident) [2] pluginsTyped(A, Trees$This) [1] -pluginsTyped(A, Trees$TypeTree) [4] +pluginsTyped(A, Trees$TypeTree) [2] pluginsTyped(A.super.type, Trees$Super) [1] pluginsTyped(A.this.type, Trees$This) [11] pluginsTyped(Any, Trees$TypeTree) [1] pluginsTyped(AnyRef, Trees$Select) [4] pluginsTyped(Array[Any], Trees$ArrayValue) [1] pluginsTyped(Boolean @testAnn, Trees$Select) [1] -pluginsTyped(Boolean @testAnn, Trees$TypeTree) [3] +pluginsTyped(Boolean @testAnn, Trees$TypeTree) [2] pluginsTyped(Boolean(false), Trees$Literal) [1] pluginsTyped(Boolean, Trees$Apply) [1] -pluginsTyped(Boolean, Trees$Select) [3] +pluginsTyped(Boolean, Trees$Select) [2] pluginsTyped(Char('c'), Trees$Literal) [2] pluginsTyped(Double, Trees$Apply) [3] -pluginsTyped(Double, Trees$Select) [6] +pluginsTyped(Double, Trees$Select) [4] pluginsTyped(Int @testAnn, Trees$TypeTree) [2] pluginsTyped(Int @testAnn, Trees$Typed) [2] pluginsTyped(Int(0), Trees$Literal) [2] @@ -141,8 +141,8 @@ pluginsTyped(Int(2), Trees$Literal) [1] pluginsTyped(Int, Trees$Apply) [1] pluginsTyped(Int, Trees$Ident) [1] pluginsTyped(Int, Trees$If) [1] -pluginsTyped(Int, Trees$Select) [12] -pluginsTyped(Int, Trees$TypeTree) [10] +pluginsTyped(Int, Trees$Select) [10] +pluginsTyped(Int, Trees$TypeTree) [8] pluginsTyped(List[Any], Trees$Apply) [1] pluginsTyped(List[Any], Trees$Select) [1] pluginsTyped(List[Any], Trees$TypeTree) [2] @@ -158,14 +158,14 @@ pluginsTyped(String("str"), Trees$Literal) [1] pluginsTyped(String("two"), Trees$Literal) [2] pluginsTyped(String, Trees$Apply) [2] pluginsTyped(String, Trees$Block) [2] -pluginsTyped(String, Trees$Select) [7] -pluginsTyped(String, Trees$TypeTree) [6] +pluginsTyped(String, Trees$Select) [4] +pluginsTyped(String, Trees$TypeTree) [5] pluginsTyped(Unit, Trees$Apply) [2] pluginsTyped(Unit, Trees$Assign) [1] pluginsTyped(Unit, Trees$Block) [4] pluginsTyped(Unit, Trees$If) [1] pluginsTyped(Unit, Trees$Literal) [5] -pluginsTyped(Unit, Trees$TypeTree) [2] +pluginsTyped(Unit, Trees$TypeTree) [1] pluginsTyped([A](xs: A*)List[A], Trees$Select) [1] pluginsTyped([T <: Int]=> Int, Trees$Select) [1] pluginsTyped([T0]()T0, Trees$Select) [1] @@ -183,7 +183,7 @@ pluginsTyped(testAnn, Trees$Apply) [6] pluginsTyped(testAnn, Trees$Ident) [6] pluginsTyped(testAnn, Trees$New) [6] pluginsTyped(testAnn, Trees$This) [1] -pluginsTyped(testAnn, Trees$TypeTree) [2] +pluginsTyped(testAnn, Trees$TypeTree) [1] pluginsTyped(testAnn.super.type, Trees$Super) [1] pluginsTyped(type, Trees$Select) [1] pluginsTypedReturn(return f, String) [1] diff --git a/test/junit/scala/reflect/internal/PrintersTest.scala b/test/junit/scala/reflect/internal/PrintersTest.scala index c7cfe0dfbb72..55aa82cceb36 100644 --- a/test/junit/scala/reflect/internal/PrintersTest.scala +++ b/test/junit/scala/reflect/internal/PrintersTest.scala @@ -332,7 +332,7 @@ class BasePrintTest { @Test def testFunc2 = assertResultCode( code = "val sum: Seq[Int] => Int = _ reduceLeft (_+_)")( parsedCode = "val sum: _root_.scala.Function1[Seq[Int], Int] = ((x$1) => x$1.reduceLeft(((x$2, x$3) => x$2.+(x$3))))", - typedCode = "val sum: _root_.scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1: Seq[Int]) => x$1.reduceLeft[Int](((x$2: Int, x$3: Int) => x$2.+(x$3))))") + typedCode = "val sum: scala.Function1[scala.`package`.Seq[scala.Int], scala.Int] = ((x$1: Seq[Int]) => x$1.reduceLeft[Int](((x$2: Int, x$3: Int) => x$2.+(x$3))))") @Test def testFunc3 = assertResultCode( code = "List(1, 2, 3) map (_ - 1)")( From 039b118eb98d6458467c8b8260830f12a51f0ee6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 22 May 2019 14:04:43 +1000 Subject: [PATCH 1721/2793] PipelineMain: add test, make it more testable and less buggy. - Allow user specified reporter - funnel javac errors through it - funnel PipelineMain's logging through it, too. - Use a separate FileManager for each javac invocation to avoid an apparent race condition. - Expose config knobs programatically rather than only through system properties. --- .../scala/tools/nsc/PipelineMain.scala | 204 ++++++++------ .../scala/tools/nsc/DeterminismTest.scala | 36 +-- test/junit/scala/tools/nsc/FileUtils.scala | 39 +++ .../scala/tools/nsc/PipelineMainTest.scala | 260 ++++++++++++++++++ 4 files changed, 428 insertions(+), 111 deletions(-) create mode 100644 test/junit/scala/tools/nsc/FileUtils.scala create mode 100644 test/junit/scala/tools/nsc/PipelineMainTest.scala diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 24f8f8881771..e2d3f109ee44 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -17,32 +17,28 @@ import java.lang.Thread.UncaughtExceptionHandler import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} import java.time.Instant -import java.util.Collections -import java.util.concurrent.atomic.AtomicInteger +import java.util.{Collections, Locale} +import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} -import javax.tools.ToolProvider +import javax.tools.Diagnostic.Kind +import javax.tools.{Diagnostic, DiagnosticListener, JavaFileObject, ToolProvider} -import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.collection.JavaConverters._ import scala.collection.{immutable, mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration import scala.reflect.internal.pickling.PickleBuffer -import scala.reflect.internal.util.FakePos -import scala.reflect.io.RootPath +import scala.reflect.internal.util.{BatchSourceFile, FakePos, Position} +import scala.reflect.io.{PlainNioFile, RootPath} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} -import PipelineMain.{BuildStrategy, Pipeline, Traditional} - -class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy, argFiles: Seq[Path], useJars: Boolean) { - private val pickleCacheConfigured = System.getProperty("scala.pipeline.picklecache") - private val pickleCache: Path = { - if (pickleCacheConfigured == null) Files.createTempDirectory("scala.picklecache") - else { - Paths.get(pickleCacheConfigured) - } - } +import PipelineMain.{Pipeline, Traditional} + +class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.PipelineSettings) { + import pipelineSettings._ + private val pickleCache: Path = configuredPickleCache.getOrElse(Files.createTempDirectory("scala.picklecache")) private def cachePath(file: Path): Path = { val newExtension = if (useJars) ".jar" else "" changeExtension(pickleCache.resolve("./" + file).normalize(), newExtension) @@ -120,7 +116,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } - def writeDotFile(dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { + def writeDotFile(logDir: Path, dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { val builder = new java.lang.StringBuilder() builder.append("digraph projects {\n") for ((p, deps) <- dependsOn) { @@ -133,17 +129,16 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } builder.append("}\n") - val path = Paths.get("projects.dot") + val path = logDir.resolve("projects.dot") Files.write(path, builder.toString.getBytes(java.nio.charset.StandardCharsets.UTF_8)) - println("Wrote project dependency graph to: " + path.toAbsolutePath) + reporter.echo("Wrote project dependency graph to: " + path.toAbsolutePath) } private case class Dependency(t: Task, isMacro: Boolean, isPlugin: Boolean) def process(): Boolean = { - println(s"parallelism = $parallelism, strategy = $strategy") - - reporter = new ConsoleReporter(new Settings(scalacError)) + reporter = createReporter(new Settings(scalacError)) + reporter.echo(s"parallelism = $parallelism, strategy = $strategy") def commandFor(argFileArg: Path): Task = { val ss = new Settings(scalacError) @@ -152,6 +147,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } val projects: List[Task] = argFiles.toList.map(commandFor) + if (reporter.hasErrors) return false + val numProjects = projects.size val produces = mutable.LinkedHashMap[Path, Task]() for (p <- projects) { @@ -168,27 +165,27 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val externalClassPath = projects.iterator.flatMap(_.classPath).filter(p => !produces.contains(p) && Files.exists(p)).toSet if (strategy != Traditional) { - val exportTimer = new Timer - exportTimer.start() - for (entry <- externalClassPath) { - val extracted = cachePath(entry) - val sourceTimeStamp = Files.getLastModifiedTime(entry) - if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { - // println(s"Skipped export of pickles from $entry to $extracted (up to date)") - } else { - PickleExtractor.process(entry, extracted) - Files.setLastModifiedTime(extracted, sourceTimeStamp) - println(s"Exported pickles from $entry to $extracted") - Files.setLastModifiedTime(extracted, sourceTimeStamp) + if (stripExternalClassPath) { + val exportTimer = new Timer + exportTimer.start() + for (entry <- externalClassPath) { + val extracted = cachePath(entry) + val sourceTimeStamp = Files.getLastModifiedTime(entry) + if (Files.exists(extracted) && Files.getLastModifiedTime(extracted) == sourceTimeStamp) { + // println(s"Skipped export of pickles from $entry to $extracted (up to date)") + } else { + PickleExtractor.process(entry, extracted) + Files.setLastModifiedTime(extracted, sourceTimeStamp) + reporter.echo(s"Exported pickles from $entry to $extracted") + Files.setLastModifiedTime(extracted, sourceTimeStamp) + } + strippedAndExportedClassPath(entry) = extracted } - strippedAndExportedClassPath(entry) = extracted + exportTimer.stop() + reporter.echo(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") } - exportTimer.stop() - println(f"Exported external classpath in ${exportTimer.durationMs}%.0f ms") } - writeDotFile(dependsOn) - val timer = new Timer timer.start() @@ -197,9 +194,12 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val allFutures = projects.flatMap(_.futures) val count = allFutures.size val counter = new AtomicInteger(count) + val failed = new AtomicBoolean(false) val handler = (a: Try[_]) => a match { case f @ Failure(_) => - done.complete(f) + if (failed.compareAndSet(false, true)) { + done.complete(f) + } case Success(_) => val remaining = counter.decrementAndGet() if (remaining == 0) done.success(()) @@ -213,28 +213,28 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy val allFutures: immutable.Seq[Future[_]] = projects.flatMap(_.futures) val numAllFutures = allFutures.size val awaitAllFutures: Future[_] = awaitAll(allFutures) - val numTasks = awaitAllFutures var lastNumCompleted = allFutures.count(_.isCompleted) while (true) try { Await.result(awaitAllFutures, Duration(60, "s")) timer.stop() val numCompleted = allFutures.count(_.isCompleted) - println(s"PROGRESS: $numCompleted / $numAllFutures") + reporter.echo(s"PROGRESS: $numCompleted / $numAllFutures") return } catch { case _: TimeoutException => val numCompleted = allFutures.count(_.isCompleted) if (numCompleted == lastNumCompleted) { - println(s"STALLED: $numCompleted / $numAllFutures") - println("Outline/Scala/Javac") + reporter.echo(s"STALLED: $numCompleted / $numAllFutures") + reporter.echo("Outline/Scala/Javac") projects.map { p => def toX(b: Future[_]): String = b.value match { case None => "-"; case Some(Success(_)) => "x"; case Some(Failure(_)) => "!" } val s = List(p.outlineDoneFuture, p.groupsDoneFuture, p.javaDoneFuture).map(toX).mkString(" ") - println(s + " " + p.label) + reporter.echo(s + " " + p.label) } } else { - println(s"PROGRESS: $numCompleted / $numAllFutures") + reporter.echo(s"PROGRESS: $numCompleted / $numAllFutures") + lastNumCompleted = numCompleted } } } @@ -246,7 +246,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy _ <- depsReady _ <- { val isLeaf = !dependedOn.contains(p) - if (isLeaf) { + if (isLeaf && useTraditionalForLeaf) { p.outlineDone.complete(Success(())) p.fullCompile() } else @@ -274,16 +274,17 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy if (parallelism == 1) { val criticalPath = projects.maxBy(_.regularCriticalPathMs) - println(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") } else - println(f" Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f" Wall Clock: ${timer.durationMs}%.0f ms") case Traditional => projects.foreach { p => val f1 = Future.traverse(dependsOn.getOrElse(p, Nil))(_.t.javaDone.future) val f2 = f1.flatMap { _ => p.outlineDone.complete(Success(())) p.fullCompile() - Future.traverse(p.groups)(_.done.future).map(_ => p.javaCompile()) + val eventualUnits: Future[List[Unit]] = Future.traverse(p.groups)(_.done.future) + eventualUnits.map(_ => p.javaCompile()) } f2.onComplete { _ => p.compiler.close() } } @@ -298,24 +299,28 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } if (parallelism == 1) { val maxFullCriticalPath: Double = projects.map(_.fullCriticalPathMs).max - println(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f"Critical path: $maxFullCriticalPath%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") } else { - println(f"Wall Clock: ${timer.durationMs}%.0f ms") + reporter.echo(f"Wall Clock: ${timer.durationMs}%.0f ms") } } - writeChromeTrace(projects) + logDir.foreach { dir => + Files.createDirectories(dir) + writeDotFile(dir, dependsOn) + writeChromeTrace(dir, projects) + } deleteTempPickleCache() true } private def deleteTempPickleCache(): Unit = { - if (pickleCacheConfigured == null) { + if (configuredPickleCache.isEmpty) { AbstractFile.getDirectory(pickleCache.toFile).delete() } } - private def writeChromeTrace(projects: List[Task]) = { + private def writeChromeTrace(logDir: Path, projects: List[Task]) = { val trace = new java.lang.StringBuilder() trace.append("""{"traceEvents": [""") val sb = new mutable.StringBuilder(trace) @@ -344,9 +349,9 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy projects.iterator.flatMap(projectEvents).addString(sb, ",\n") trace.append("]}") - val traceFile = Paths.get(s"build-${label}.trace") + val traceFile = logDir.resolve(s"build-${label}.trace") Files.write(traceFile, trace.toString.getBytes()) - println("Chrome trace written to " + traceFile.toAbsolutePath) + reporter.echo("Chrome trace written to " + traceFile.toAbsolutePath) } case class Group(files: List[String]) { @@ -355,7 +360,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } private case class Task(argsFile: Path, command: CompilerCommand, files: List[String]) { - val label = argsFile.toString.replaceAll("target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") + val label = argsFile.toString.replaceAll(".*/target/", "").replaceAll("""(.*)/(.*).args""", "$1:$2") override def toString: String = argsFile.toString def outputDir: Path = command.settings.outputDirs.getSingleOutput.get.file.toPath.toAbsolutePath.normalize() private def expand(s: command.settings.PathSetting): List[Path] = { @@ -380,8 +385,6 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy dependency.t.outlineDone.future - val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") - val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") if (cacheMacro) command.settings.YcacheMacroClassLoader.value = "always" if (cachePlugin) @@ -391,6 +394,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy command.settings.YpickleJava.value = true } + val groupSize = Integer.getInteger("scala.pipeline.group.size", 128) + val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) if (isScalaLibrary) { @@ -398,7 +403,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } else { command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value val length = files.length - val groups = (length.toDouble / 128).toInt.max(1) + val groups = (length.toDouble / groupSize).toInt.max(1) files.grouped((length.toDouble / groups).ceil.toInt.max(1)).toList.map(Group(_)) } } @@ -438,8 +443,8 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy throw t } - def fullCompile(): Unit = { + command.settings.Youtline.value = false command.settings.stopAfter.value = Nil command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal @@ -451,9 +456,14 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy group.timer.start() val compiler2 = newCompiler(command.settings) try { - val run2 = new compiler2.Run() - run2 compile group.files - compiler2.reporter.finish() + try { + val run2 = new compiler2.Run() + run2 compile group.files + compiler2.reporter.finish() + } finally { + group.timer.stop() + log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") + } if (compiler2.reporter.hasErrors) { group.done.complete(Failure(new RuntimeException(label + ": compile failed: "))) } else { @@ -461,9 +471,7 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy } } finally { compiler2.close() - group.timer.stop() } - log(f"scalac (${ix + 1}/$groupCount): done ${group.timer.durationMs}%.0f ms") } } } @@ -521,19 +529,40 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy log("javac: start") javaTimer.start() javaDone.completeWith(Future { - val opts = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) - val compileTask = ToolProvider.getSystemJavaCompiler.getTask(null, null, null, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) + val opts: java.util.List[String] = java.util.Arrays.asList("-d", command.settings.outdir.value, "-cp", command.settings.outdir.value + File.pathSeparator + originalClassPath) + val compiler = ToolProvider.getSystemJavaCompiler + val listener = new DiagnosticListener[JavaFileObject] { + override def report(diagnostic: Diagnostic[_ <: JavaFileObject]): Unit = { + val msg = diagnostic.getMessage(Locale.getDefault) + val source: JavaFileObject = diagnostic.getSource + val path = Paths.get(source.toUri) + val sourceFile = new BatchSourceFile(new PlainNioFile(path)) + val position = Position.range(sourceFile, diagnostic.getStartPosition.toInt, diagnostic.getPosition.toInt, diagnostic.getEndPosition.toInt) + diagnostic.getKind match { + case Kind.ERROR => reporter.error(position, msg) + case Kind.WARNING | Kind.MANDATORY_WARNING => reporter.warning(position, msg) + case Kind.NOTE => reporter.info(position, msg, true) + case Kind.OTHER => reporter.echo(position, msg) + } + } + } + val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) + val compileTask = compiler.getTask(null, fileManager, listener, opts, null, fileManager.getJavaFileObjects(javaSources.toArray: _*)) compileTask.setProcessors(Collections.emptyList()) - compileTask.call() - javaTimer.stop() - log(f"javac: done ${javaTimer.durationMs}%.0f ms") + if (compileTask.call()) { + javaTimer.stop() + log(f"javac: done ${javaTimer.durationMs}%.0f ms ") + } else { + javaTimer.stop() + log(f"javac: error ${javaTimer.durationMs}%.0f ms ") + } () }) } else { javaDone.complete(Success(())) } } - def log(msg: String): Unit = println(this.label + ": " + msg) + def log(msg: String): Unit = reporter.echo(this.label + ": " + msg) } final class Timer() { @@ -579,24 +608,39 @@ class PipelineMainClass(label: String, parallelism: Int, strategy: BuildStrategy object PipelineMain { sealed abstract class BuildStrategy - /** Begin compilation as soon as the pickler phase is complete on all dependencies. */ + /** Transport pickles as an input to downstream compilation. */ case object Pipeline extends BuildStrategy /** Emit class files before triggering downstream compilation */ case object Traditional extends BuildStrategy - def main(args: Array[String]): Unit = { + case class PipelineSettings(label: String, parallelism: Int, strategy: BuildStrategy, useJars: Boolean, + configuredPickleCache: Option[Path], cacheMacro: Boolean, cachePlugin: Boolean, + stripExternalClassPath: Boolean, useTraditionalForLeaf: Boolean, logDir: Option[Path], + createReporter: (Settings => Reporter)) + def defaultSettings: PipelineSettings = { val strategies = List(Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") + val cacheMacro = java.lang.Boolean.getBoolean("scala.pipeline.cache.macro.classloader") + val cachePlugin = java.lang.Boolean.getBoolean("scala.pipeline.cache.plugin.classloader") + val stripExternalClassPath = java.lang.Boolean.getBoolean("scala.pipeline.strip.external.classpath") + val useTraditionalForLeaf = java.lang.Boolean.getBoolean("scala.pipeline.use.traditional.for.leaf") + val configuredPickleCache = Option(System.getProperty("scala.pipeline.picklecache")).map(Paths.get(_)) + val logDir = Paths.get(".") + new PipelineSettings("1", parallelism, strategy, useJars, configuredPickleCache, + cacheMacro, cachePlugin, stripExternalClassPath, useTraditionalForLeaf, Some(logDir), new ConsoleReporter(_)) + } + + def main(args: Array[String]): Unit = { val argFiles: Seq[Path] = args match { case Array(path) if Files.isDirectory(Paths.get(path)) => Files.walk(Paths.get(path)).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList case _ => args.map(Paths.get(_)) } - val main = new PipelineMainClass("1", parallelism, strategy, argFiles, useJars) + val main = new PipelineMainClass(argFiles, defaultSettings) val result = main.process() if (!result) System.exit(1) @@ -608,10 +652,12 @@ object PipelineMain { //object PipelineMainTest { // def main(args: Array[String]): Unit = { // var i = 0 -// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList -// for (_ <- 1 to 2; n <- List(parallel.availableProcessors); strat <- List(Pipeline)) { +//// val argsFiles = Files.walk(Paths.get("/code/guardian-frontend")).iterator().asScala.filter(_.getFileName.toString.endsWith(".args")).toList +// val argsFiles = List(Paths.get("/Users/jz/code/guardian-frontend/common/target/compile.args")) +// val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") +// for (_ <- 1 to 20; n <- List(parallel.availableProcessors); strat <- List(OutlineTypePipeline)) { // i += 1 -// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars = false) +// val main = new PipelineMainClass(strat + "-" + i, n, strat, argsFiles, useJars) // println(s"====== ITERATION $i=======") // val result = main.process() // if (!result) diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index 9f79709cca58..deadd7fa218d 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -1,20 +1,18 @@ package scala.tools.nsc -import java.io.{File, OutputStreamWriter} +import java.io.OutputStreamWriter import java.nio.charset.Charset import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} -import java.util import javax.tools.ToolProvider import org.junit.Test -import scala.collection.JavaConverters.{asScalaIteratorConverter, seqAsJavaListConverter} -import scala.collection.immutable +import scala.collection.JavaConverters.seqAsJavaListConverter import scala.language.implicitConversions import scala.reflect.internal.util.{BatchSourceFile, SourceFile} -import scala.reflect.io.PlainNioFile import scala.tools.nsc.reporters.StoreReporter +import FileUtils._ class DeterminismTest { @Test def testLambdaLift(): Unit = { @@ -328,7 +326,7 @@ class DeterminismTest { val recompileOutput = Files.createTempDirectory("recompileOutput") copyRecursive(referenceOutput, recompileOutput) compile(recompileOutput, permutation) - assert(diff(referenceOutput, recompileOutput), s"Difference detected between recompiling $permutation Run:\njardiff -r $referenceOutput $recompileOutput\n") + assertDirectorySame(referenceOutput, recompileOutput, permutation.toString) deleteRecursive(recompileOutput) } deleteRecursive(referenceOutput) @@ -336,30 +334,4 @@ class DeterminismTest { } def permutationsWithSubsets[A](as: List[A]): List[List[A]] = as.permutations.toList.flatMap(_.inits.filter(_.nonEmpty)).distinct - - private def diff(dir1: Path, dir2: Path): Boolean = { - def allFiles(dir: Path) = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).sortBy(_._1.toString) - - val dir1Files = allFiles(dir1) - val dir2Files = allFiles(dir2) - val identical = dir1Files.corresponds(dir2Files) { - case ((rel1, file1), (rel2, file2)) => - rel1 == rel2 && java.util.Arrays.equals(Files.readAllBytes(file1), Files.readAllBytes(file2)) - } - identical - } - private def deleteRecursive(f: Path) = new PlainNioFile(f).delete() - private def copyRecursive(src: Path, dest: Path): Unit = { - class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { - override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { - Files.createDirectories(dest.resolve(src.relativize(dir))) - super.preVisitDirectory(dir, attrs) - } - override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { - Files.copy(file, dest.resolve(src.relativize(file))) - super.visitFile(file, attrs) - } - } - Files.walkFileTree(src, new CopyVisitor(src, dest)) - } } diff --git a/test/junit/scala/tools/nsc/FileUtils.scala b/test/junit/scala/tools/nsc/FileUtils.scala new file mode 100644 index 000000000000..03befd661cab --- /dev/null +++ b/test/junit/scala/tools/nsc/FileUtils.scala @@ -0,0 +1,39 @@ +package scala.tools.nsc + +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.reflect.io.PlainNioFile + +object FileUtils { + def assertDirectorySame(dir1: Path, dir2: Path, dir2Label: String): Unit = { + assert(FileUtils.diff(dir1, dir2), s"Difference detected between recompiling $dir2Label Run:\njardiff -r $dir1 $dir2\n") + } + def diff(dir1: Path, dir2: Path): Boolean = { + def allFiles(dir: Path) = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).sortBy(_._1.toString) + + val dir1Files = allFiles(dir1) + val dir2Files = allFiles(dir2) + val identical = dir1Files.corresponds(dir2Files) { + case ((rel1, file1), (rel2, file2)) => + rel1 == rel2 && java.util.Arrays.equals(Files.readAllBytes(file1), Files.readAllBytes(file2)) + } + identical + } + + def deleteRecursive(f: Path) = new PlainNioFile(f).delete() + def copyRecursive(src: Path, dest: Path): Unit = { + class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.createDirectories(dest.resolve(src.relativize(dir))) + super.preVisitDirectory(dir, attrs) + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.copy(file, dest.resolve(src.relativize(file))) + super.visitFile(file, attrs) + } + } + Files.walkFileTree(src, new CopyVisitor(src, dest)) + } +} diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala new file mode 100644 index 000000000000..48e27aaac98a --- /dev/null +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -0,0 +1,260 @@ +package scala.tools.nsc + +import java.io.{File, IOException} +import java.nio.charset.Charset +import java.nio.file.attribute.BasicFileAttributes +import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} + +import org.junit.{After, Before, Test} + +import scala.collection.JavaConverters._ +import scala.collection.mutable +import FileUtils._ +import scala.tools.nsc.PipelineMain._ +import scala.tools.nsc.reporters.{ConsoleReporter, StoreReporter} + +class PipelineMainTest { + private var base: Path = _ + + // Enables verbose output to console to help understand what the test is doing. + private val debug = false + private var deleteBaseAfterTest = true + + @Before def before(): Unit = { + base = Files.createTempDirectory("pipelineBase") + } + + @After def after(): Unit = { + if (base != null && !debug && deleteBaseAfterTest) { + deleteRecursive(base) + } + } + + private def projectsBase = createDir(base, "projects") + + @Test def pipelineMainBuildsSeparate(): Unit = { + check(allBuilds.map(_.projects)) + } + + @Test def pipelineMainBuildsCombined(): Unit = { + check(List(allBuilds.flatMap(_.projects))) + } + + private val pipelineSettings = PipelineMain.defaultSettings.copy( + useJars = true, + parallelism = java.lang.Runtime.getRuntime.availableProcessors, + cacheMacro = true, + cachePlugin = true, + stripExternalClassPath = true, + useTraditionalForLeaf = true, + createReporter = ((s: Settings) => if (debug) new ConsoleReporter(s) else new StoreReporter()) + ) + + private def check(projectss: List[List[Build#Project]], altStrategies: List[BuildStrategy] = List(Pipeline)): Unit = { + def build(strategy: BuildStrategy): Unit = { + for (projects <- projectss) { + val argsFiles = projects.map(_.argsFile(Nil)) + val main = new PipelineMainClass(argsFiles, pipelineSettings.copy(strategy = strategy, logDir = Some(base.resolve(strategy.toString)))) + assert(main.process()) + } + } + build(Traditional) + + val reference = snapshotClasses(Traditional) + clean() + for (strategy <- altStrategies) { + build(strategy) + val recompiled = snapshotClasses(strategy) + // Bytecode should be identical regardless of compilation strategy. + deleteBaseAfterTest = false + assertDirectorySame(reference, recompiled, strategy.toString) + deleteBaseAfterTest = true + } + } + + private lazy val allBuilds = List(m1, b2, b3, b4) + + private lazy val m1: Build = { + val build = new Build(projectsBase, "m1") + val macroProject = build.project("p1") + macroProject.withSource("m1/p1/Macro.scala")( + """ + |package m1.p1 + |import reflect.macros.blackbox.Context, language.experimental._ + |object Macro { + | def m: Unit = macro impl + | def impl(c: Context): c.Tree = { + | import c.universe._ + | q"()" + | } + |} + """.stripMargin) + val internalMacroClient = build.project("internalMacroClient") + internalMacroClient.scalacOptions ++= List("-Ymacro-classpath", macroProject.out.toString) + internalMacroClient.classpath += macroProject.out + internalMacroClient.withSource("m2/p2/InternalClient.scala")( + """ + |package m1.p2 + |class InternalClient { m1.p1.Macro.m } + """.stripMargin) + build + } + + private lazy val b2: Build = { + val build = new Build(projectsBase, "b1") + val p1 = build.project("p1") + val m1P1 = m1.project("p1") + p1.classpath += m1P1.out + p1.scalacOptions ++= List("-Ymacro-classpath", m1P1.out.toString) + p1.withSource("b1/p1/ExternalClient.scala")( + """ + |package b2.p2 + |class ExternalClient { m1.p1.Macro.m } + """.stripMargin) + build + } + + private lazy val b3: Build = { + val build = new Build(projectsBase, "b3") + val p1 = build.project("p1") + p1.withSource("b3/p1/JavaDefined.java")( + """ + |package b3.p1; + |public class JavaDefined { + | ScalaJoint id(T t) { return new ScalaJoint(); } + |} + """.stripMargin) + p1.withSource("b3/p1/ScalaJoint.scala")( + """ + |package b3.p1 + |class ScalaJoint[T] { + | def foo: Unit = new JavaDefined[String] + |} + """.stripMargin) + val p2 = build.project("p2") + p2.classpath += p1.out + p2.withSource("b3/p2/JavaClient.java")( + """ + |package b3.p2; + |public class JavaClient { + | b3.p1.JavaDefined test() { return null; } + |} + """.stripMargin) + p2.withSource("b3/p2/ScalaClient.scala")( + """ + |package b3.p2 + |class ScalaClient { + | def test(): b3.p1.JavaDefined[String] = null; + |} + """.stripMargin) + build + } + + private lazy val b4: Build = { + val build = new Build(projectsBase, "b4") + val b3P1 = b3.project("p1") + val p2 = build.project("p2") + p2.classpath += b3P1.out + p2.withSource("b4/p2/JavaClient.java")( + """ + |package b4.p2; + |public class JavaClient { + | b3.p1.JavaDefined test() { return null; } + |} + """.stripMargin) + p2.withSource("b4/p2/ScalaClient.scala")( + """ + |package b4.p2 + |class ScalaClient { + | def test(): b3.p1.JavaDefined[String] = null; + |} + """.stripMargin) + build + } + + final class Build(base: Path, name: String) { + + val buildBase = createDir(base, name) + val scalacOptions = mutable.ListBuffer[String]() + final class Project(val name: String) { + def fullName: String = Build.this.name + "." + name + val base = createDir(buildBase, name) + val out = createDir(base, "target") + val src = createDir(base, "src") + val scalacOptions = mutable.ListBuffer[String]() + scalacOptions += "-usejavacp" + val classpath = mutable.ListBuffer[Path]() + val sources = mutable.ListBuffer[Path]() + def withSource(relativePath: String)(code: String): this.type = { + val srcFile = src.resolve(relativePath) + Files.createDirectories(srcFile.getParent) + Files.write(srcFile, code.getBytes(Charset.defaultCharset())) + sources += srcFile + this + } + def argsFile(extraOpts: List[String]): Path = { + val cp = if (classpath.isEmpty) Nil else List("-cp", classpath.mkString(File.pathSeparator)) + val printArgs = if (debug) List("-Xprint-args", "-") else Nil + val entries = List( + Build.this.scalacOptions.toList, + scalacOptions.toList, + extraOpts, + printArgs, + List("-d", out.toString) ::: cp ::: sources.toList.map(_.toString) + ).flatten + Files.write(out.resolve(fullName + ".args"), entries.asJava) + } + } + private val projectsMap = mutable.LinkedHashMap[String, Project]() + def projects: List[Project] = projectsMap.valuesIterator.toList + def project(name: String): Project = { + projectsMap.getOrElseUpdate(name, new Project(name)) + } + } + + private def clean(): Unit = { + class CleanVisitor() extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + if (dir.getFileName.toString == "target") { + deleteRecursive(dir) + Files.createDirectories(dir) + FileVisitResult.SKIP_SUBTREE + } else super.preVisitDirectory(dir, attrs) + } + } + Files.walkFileTree(projectsBase, new CleanVisitor()) + } + private def snapshotClasses(strategy: BuildStrategy): Path = { + val src = projectsBase + val dest = createDir(base, strategy.toString + "/classes") + class CopyVisitor(src: Path, dest: Path) extends SimpleFileVisitor[Path] { + override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.createDirectories(dest.resolve(src.relativize(dir))) + super.preVisitDirectory(dir, attrs) + } + + override def postVisitDirectory(dir: Path, exc: IOException): FileVisitResult = { + val destDir = dest.resolve(src.relativize(dir)) + val listing = Files.list(destDir) + try { + if (!listing.iterator().hasNext) + Files.delete(destDir) + } finally { + listing.close() + } + super.postVisitDirectory(dir, exc) + } + override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { + Files.copy(file, dest.resolve(src.relativize(file))) + super.visitFile(file, attrs) + } + } + Files.walkFileTree(src, new CopyVisitor(src, dest)) + dest + } + + private def createDir(dir: Path, s: String): Path = { + val subDir = dir.resolve(s) + Files.createDirectories(subDir) + } +} From 79ace8ae5a4a387fcf6784cabe964a16ad597ff0 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 22 May 2019 14:04:56 +1000 Subject: [PATCH 1722/2793] Add experimental support for outline typing In this new mode, the RHS of definitions is only typechecked if the definition lacks an explicit type ascription, or or it may contain a super call that is compiled to a trait super accessor. Refer to the new test case for a motivating example. --- .../scala/tools/nsc/PipelineMain.scala | 73 ++++++++++++++++++- .../tools/nsc/typechecker/Analyzer.scala | 12 +-- .../scala/tools/nsc/typechecker/Typers.scala | 12 ++- .../scala/tools/nsc/PipelineMainTest.scala | 43 ++++++++++- 4 files changed, 127 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index e2d3f109ee44..edb385da6d08 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -34,7 +34,7 @@ import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath import scala.util.{Failure, Success, Try} -import PipelineMain.{Pipeline, Traditional} +import PipelineMain.{OutlineTypePipeline, Pipeline, Traditional} class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.PipelineSettings) { import pipelineSettings._ @@ -239,6 +239,43 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe } } strategy match { + case OutlineTypePipeline => + projects.foreach { p: Task => + val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) + val f = for { + _ <- depsReady + _ <- { + p.outlineCompile() + p.outlineDone.future + } + _ <- { + p.fullCompile() + Future.traverse(p.groups)(_.done.future) + } + } yield { + p.javaCompile() + } + f.onComplete { _ => p.compiler.close() } + } + + awaitDone() + + for (p <- projects) { + val dependencies = dependsOn(p).map(_.t) + + def maxByOrZero[A](as: List[A])(f: A => Double): Double = if (as.isEmpty) 0d else as.map(f).max + + val maxOutlineCriticalPathMs = maxByOrZero(dependencies)(_.outlineCriticalPathMs) + p.outlineCriticalPathMs = maxOutlineCriticalPathMs + p.outlineTimer.durationMs + p.regularCriticalPathMs = maxOutlineCriticalPathMs + maxByOrZero(p.groups)(_.timer.durationMs) + p.fullCriticalPathMs = maxByOrZero(dependencies)(_.fullCriticalPathMs) + p.groups.map(_.timer.durationMs).sum + } + + if (parallelism == 1) { + val criticalPath = projects.maxBy(_.regularCriticalPathMs) + reporter.echo(f"Critical path: ${criticalPath.regularCriticalPathMs}%.0f ms. Wall Clock: ${timer.durationMs}%.0f ms") + } else + reporter.echo(f" Wall Clock: ${timer.durationMs}%.0f ms") case Pipeline => projects.foreach { p => val depsReady = Future.traverse(dependsOn.getOrElse(p, Nil))(task => p.dependencyReadyFuture(task)) @@ -332,7 +369,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe def projectEvents(p: Task): List[String] = { val events = List.newBuilder[String] if (p.outlineTimer.durationMicros > 0d) { - val desc = "parser-to-pickler" + val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" events += durationEvent(p.label, desc, p.outlineTimer) events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) } @@ -398,7 +435,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val groups: List[Group] = { val isScalaLibrary = files.exists(_.endsWith("Predef.scala")) - if (isScalaLibrary) { + if (strategy != OutlineTypePipeline || isScalaLibrary) { Group(files) :: Nil } else { command.settings.classpath.value = command.settings.outputDirs.getSingleOutput.get.toString + File.pathSeparator + command.settings.classpath.value @@ -443,6 +480,32 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe throw t } + def outlineCompile(): Unit = { + outlineTimer.start() + try { + log("scalac outline: start") + command.settings.Youtline.value = true + command.settings.stopAfter.value = List("pickler") + command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + val run1 = new compiler.Run() + run1 compile files + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) + outlineTimer.stop() + reporter.finish() + if (reporter.hasErrors) { + log("scalac outline: failed") + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } else { + log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") + outlineDone.complete(Success(())) + } + } catch { + case t: Throwable => + t.printStackTrace() + outlineDone.complete(Failure(new RuntimeException(label + ": compile failed: "))) + } + } + def fullCompile(): Unit = { command.settings.Youtline.value = false command.settings.stopAfter.value = Nil @@ -608,6 +671,8 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe object PipelineMain { sealed abstract class BuildStrategy + /** Outline type check sources to compute type signatures an input to downstream compilation. Compile sources (optionally */ + case object OutlineTypePipeline extends BuildStrategy /** Transport pickles as an input to downstream compilation. */ case object Pipeline extends BuildStrategy @@ -619,7 +684,7 @@ object PipelineMain { stripExternalClassPath: Boolean, useTraditionalForLeaf: Boolean, logDir: Option[Path], createReporter: (Settings => Reporter)) def defaultSettings: PipelineSettings = { - val strategies = List(Pipeline, Traditional) + val strategies = List(OutlineTypePipeline, Pipeline, Traditional) val strategy = strategies.find(_.productPrefix.equalsIgnoreCase(System.getProperty("scala.pipeline.strategy", "pipeline"))).get val parallelism = java.lang.Integer.getInteger("scala.pipeline.parallelism", parallel.availableProcessors) val useJars = java.lang.Boolean.getBoolean("scala.pipeline.use.jar") diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index b068e43d1ad4..bc5ffd0ccd7c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -112,11 +112,13 @@ trait Analyzer extends AnyRef try { val typer = newTyper(rootContext(unit)) unit.body = typer.typed(unit.body) - for (workItem <- unit.toCheck) workItem() - if (settings.warnUnusedImport) - warnUnusedImports(unit) - if (settings.warnUnused.isSetByUser) - new checkUnused(typer).apply(unit) + if (!settings.Youtline.value) { + for (workItem <- unit.toCheck) workItem() + if (settings.warnUnusedImport) + warnUnusedImports(unit) + if (settings.warnUnused.isSetByUser) + new checkUnused(typer).apply(unit) + } } finally { unit.toCheck.clear() diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0a3002d04c80..79086ab03bb9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5945,14 +5945,21 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper final def transformedOrTyped(tree: Tree, mode: Mode, pt: Type): Tree = { lookupTransformed(tree) match { case Some(tree1) => tree1 - case _ => typed(tree, mode, pt) + case _ => if (canSkipRhs(tree)) EmptyTree else typed(tree, mode, pt) } } final def lookupTransformed(tree: Tree): Option[Tree] = if (phase.erasedTypes) None // OPT save the hashmap lookup in erasure type and beyond else transformed remove tree - } + private final def canSkipRhs(tree: Tree) = settings.Youtline.value && !tree.exists { + case Super(qual, mix) => + // conservative approximation of method bodies that may give rise to super accessors which must be + // stored in pickle. + context.owner.enclClass.isTrait || mix != tpnme.EMPTY + case _ => false + } + } /** Finish computation of param aliases after typechecking is completed */ final def finishComputeParamAlias(): Unit = { @@ -5981,6 +5988,7 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper } superConstructorCalls.clear() } + } trait TypersStats { diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index 48e27aaac98a..e3e6a81fc921 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -40,6 +40,12 @@ class PipelineMainTest { check(List(allBuilds.flatMap(_.projects))) } + @Test def pipelineMainBuildsJavaAccessor(): Unit = { + // Tests the special case in Typer:::canSkipRhs to make outline typing descend into method bodies might + // give rise to super accssors + check(List(b5SuperAccessor.projects), altStrategies = List(OutlineTypePipeline)) + } + private val pipelineSettings = PipelineMain.defaultSettings.copy( useJars = true, parallelism = java.lang.Runtime.getRuntime.availableProcessors, @@ -50,7 +56,7 @@ class PipelineMainTest { createReporter = ((s: Settings) => if (debug) new ConsoleReporter(s) else new StoreReporter()) ) - private def check(projectss: List[List[Build#Project]], altStrategies: List[BuildStrategy] = List(Pipeline)): Unit = { + private def check(projectss: List[List[Build#Project]], altStrategies: List[BuildStrategy] = List(Pipeline, OutlineTypePipeline)): Unit = { def build(strategy: BuildStrategy): Unit = { for (projects <- projectss) { val argsFiles = projects.map(_.argsFile(Nil)) @@ -72,7 +78,7 @@ class PipelineMainTest { } } - private lazy val allBuilds = List(m1, b2, b3, b4) + private lazy val allBuilds = List(m1, b2, b3, b4, b5SuperAccessor) private lazy val m1: Build = { val build = new Build(projectsBase, "m1") @@ -172,6 +178,39 @@ class PipelineMainTest { build } + private lazy val b5SuperAccessor: Build = { + val build = new Build(projectsBase, "b5") + val p1 = build.project("p1") + p1.withSource("b5/p1/JavaProtectedMethod.java")( + """ + |package b5.p1; + |public class JavaProtectedMethod { + | protected String foo() { return "JavaProtectedMethod.foo"; } + |} + """.stripMargin) + p1.withSource("b5/p1/NeedSuperAccessor.scala")( + """ + |package b5.p1 + |trait NeedSuperAccessor extends JavaProtectedMethod { + | protected override def foo = "NeedSuperAccessor.foo" + | class Inner { + | def test: Any = { + | NeedSuperAccessor.super[JavaProtectedMethod].foo + | } + | } + |} + """.stripMargin) + val p2 = build.project("p2") + p2.classpath += p1.out + p2.withSource("b5/p2/ScalaSub.scala")( + """ + |package b5.p2 + |class ScalaSub extends b5.p1.NeedSuperAccessor { + |} + """.stripMargin) + build + } + final class Build(base: Path, name: String) { val buildBase = createDir(base, name) From a669e91b86c6037d4420c5be5f583b63dbe2a9f1 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 22 May 2019 16:13:29 +1000 Subject: [PATCH 1723/2793] Deal with unpositioned Javac diagnostic messages --- src/compiler/scala/tools/nsc/PipelineMain.scala | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index edb385da6d08..c55fdfec4b53 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -28,7 +28,7 @@ import scala.collection.{immutable, mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration import scala.reflect.internal.pickling.PickleBuffer -import scala.reflect.internal.util.{BatchSourceFile, FakePos, Position} +import scala.reflect.internal.util.{BatchSourceFile, FakePos, NoPosition, Position} import scala.reflect.io.{PlainNioFile, RootPath} import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} @@ -599,8 +599,10 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val msg = diagnostic.getMessage(Locale.getDefault) val source: JavaFileObject = diagnostic.getSource val path = Paths.get(source.toUri) - val sourceFile = new BatchSourceFile(new PlainNioFile(path)) - val position = Position.range(sourceFile, diagnostic.getStartPosition.toInt, diagnostic.getPosition.toInt, diagnostic.getEndPosition.toInt) + val position = if (diagnostic.getPosition == Diagnostic.NOPOS) NoPosition else { + val sourceFile = new BatchSourceFile(new PlainNioFile(path)) + Position.range(sourceFile, diagnostic.getStartPosition.toInt, diagnostic.getPosition.toInt, diagnostic.getEndPosition.toInt) + } diagnostic.getKind match { case Kind.ERROR => reporter.error(position, msg) case Kind.WARNING | Kind.MANDATORY_WARNING => reporter.warning(position, msg) From 1a1651447a86a79052d940acf911212b15f704cc Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 21 Mar 2019 11:46:55 +0100 Subject: [PATCH 1724/2793] [backport] Drop leaky encoding from JavaParser It's subsumed by Jason's improvements to name resolution in #7671. The leak yielded spurious errors in mixed Scala/Java compilation (akka-http). (cherry picked from commit 8529be781349c464694229a5b2a95cc79c55ae85) --- .../scala/tools/nsc/javac/JavaParsers.scala | 13 +------------ 1 file changed, 1 insertion(+), 12 deletions(-) diff --git a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala index d87fa7e8da81..9f3d66dda17d 100644 --- a/src/compiler/scala/tools/nsc/javac/JavaParsers.scala +++ b/src/compiler/scala/tools/nsc/javac/JavaParsers.scala @@ -768,18 +768,7 @@ trait JavaParsers extends ast.parser.ParsersCommon with JavaScanners { members) ++= decls } } - def forwarders(sdef: Tree): List[Tree] = sdef match { - case ClassDef(mods, name, tparams, _) if (parentToken == INTERFACE) => - val tparams1: List[TypeDef] = tparams map (_.duplicate) - var rhs: Tree = Select(Ident(parentName.toTermName), name) - if (!tparams1.isEmpty) rhs = AppliedTypeTree(rhs, tparams1 map (tp => Ident(tp.name))) - List(TypeDef(Modifiers(Flags.PROTECTED), name, tparams1, rhs)) - case _ => - List() - } - val sdefs = statics.toList - val idefs = members.toList ::: (sdefs flatMap forwarders) - (sdefs, idefs) + (statics.toList, members.toList) } def annotationParents = List( gen.scalaAnnotationDot(tpnme.Annotation), From 5e83e4a4fd8bf40a6a5f260e15d1d5146df41ddd Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 22 May 2019 17:20:05 +1000 Subject: [PATCH 1725/2793] Test for backported fix in prior commit (pls merge forward) --- .../java-inherited-type-protobuf/Test.java | 39 +++++++++++++++++++ .../java-inherited-type-protobuf/client.scala | 5 +++ 2 files changed, 44 insertions(+) create mode 100644 test/files/pos/java-inherited-type-protobuf/Test.java create mode 100644 test/files/pos/java-inherited-type-protobuf/client.scala diff --git a/test/files/pos/java-inherited-type-protobuf/Test.java b/test/files/pos/java-inherited-type-protobuf/Test.java new file mode 100644 index 000000000000..d76bf21e6f70 --- /dev/null +++ b/test/files/pos/java-inherited-type-protobuf/Test.java @@ -0,0 +1,39 @@ +package example; + +public class Test { + +} + +class GeneratedMessage extends AbstractMessage { + GeneratedMessage(Builder builder) { + } + + public abstract static class Builder + extends AbstractMessage.Builder {} +} + +class AbstractMessage extends AbstractMessageLite + implements Message { + public static abstract class Builder + extends AbstractMessageLite.Builder + implements Message.Builder {} +} + +class AbstractMessageLite implements MessageLite { + public static abstract class Builder + implements MessageLite.Builder { + } + +} + +interface Message extends MessageLite, MessageOrBuilder { + static interface Builder extends MessageLite.Builder, MessageOrBuilder {} +} + +interface MessageLite extends MessageLiteOrBuilder { + interface Builder extends MessageLiteOrBuilder, Cloneable {} +} + +interface MessageLiteOrBuilder {} + +interface MessageOrBuilder extends MessageLiteOrBuilder {} \ No newline at end of file diff --git a/test/files/pos/java-inherited-type-protobuf/client.scala b/test/files/pos/java-inherited-type-protobuf/client.scala new file mode 100644 index 000000000000..3a73336f46fd --- /dev/null +++ b/test/files/pos/java-inherited-type-protobuf/client.scala @@ -0,0 +1,5 @@ +package example + +object Client { + new GeneratedMessage(null) +} From da7bce3af2cc2b5565ab88c11db3886fb44709ab Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 23 May 2019 14:04:57 +1000 Subject: [PATCH 1726/2793] Fix chrome trace output for outline typing strategy --- src/compiler/scala/tools/nsc/PipelineMain.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index c55fdfec4b53..2e5d6d0d0530 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -489,8 +489,12 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe command.settings.Ymacroexpand.value = command.settings.MacroExpand.None val run1 = new compiler.Run() run1 compile files - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) outlineTimer.stop() + log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") + pickleExportTimer.start() + registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) + pickleExportTimer.stop() + log(f"scalac: exported pickles ${pickleExportTimer.durationMs}%.0f ms") reporter.finish() if (reporter.hasErrors) { log("scalac outline: failed") From 0a8e7c3cde655c0ebc60f4526f9069840a1a8f82 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Mon, 23 Apr 2018 11:26:07 +0200 Subject: [PATCH 1727/2793] [backport] Update test case to changed JDK behavior cherry-picked from b0b684e578863a0ff15ee0638431c30a9c00a965 --- test/files/run/t2873.check | 1 - test/files/run/t2873.scala | 4 +++- 2 files changed, 3 insertions(+), 2 deletions(-) delete mode 100644 test/files/run/t2873.check diff --git a/test/files/run/t2873.check b/test/files/run/t2873.check deleted file mode 100644 index 209b679c0719..000000000000 --- a/test/files/run/t2873.check +++ /dev/null @@ -1 +0,0 @@ -RedBlack.Empty$ diff --git a/test/files/run/t2873.scala b/test/files/run/t2873.scala index 3a3cc59b465c..d8cf21e75303 100644 --- a/test/files/run/t2873.scala +++ b/test/files/run/t2873.scala @@ -5,6 +5,8 @@ abstract class RedBlack[A] extends Serializable { object Test { def main(args: Array[String]): Unit = { - println(classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType) + val r = classOf[RedBlack[_]].getMethod("Empty").getGenericReturnType.toString + // Output changed in JDK 1.8.0_172: https://github.com/scala/bug/issues/10835 + assert(r == "RedBlack.Empty$" || r == "RedBlack$Empty$", r) } } From 8967f68b086146563ac1a63b341bdc7ea4ddac13 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 23 May 2019 14:06:16 +0200 Subject: [PATCH 1728/2793] run/classfile-format-52.scala, run on java8, needs -target:jvm-1.8 --- test/files/run/classfile-format-52.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/files/run/classfile-format-52.scala b/test/files/run/classfile-format-52.scala index 453f61ac8481..a641b67f820a 100644 --- a/test/files/run/classfile-format-52.scala +++ b/test/files/run/classfile-format-52.scala @@ -14,7 +14,7 @@ import Opcodes._ // By its nature the test can only work on JDK 8+ because under JDK 7- the // interface won't verify. object Test extends DirectTest { - override def extraSettings: String = "-optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path + override def extraSettings: String = "-target:jvm-1.8 -optimise -usejavacp -d " + testOutput.path + " -cp " + testOutput.path def generateInterface() { val interfaceName = "HasDefaultMethod" From e877349687d93cac55a2d57b7a8a5de7626ea464 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 23 May 2019 14:06:29 +0200 Subject: [PATCH 1729/2793] bump straight to sbt 0.13.18 --- project/build.properties | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/build.properties b/project/build.properties index 35c88bab7dd9..8e682c526d5d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.12 +sbt.version=0.13.18 From 6b883e15264ca0c1dfe3c1b7a8e2177ebd7e6523 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 9 Aug 2016 15:42:44 +1000 Subject: [PATCH 1730/2793] [backport] Determistically enter classes from directory into package scope On Linux, the directory listing is not automatically sorted on Mac. This leads to non-determistic ids of Symbols of the classes in a directory, which in turn leads to instability of the ordering of parents within inferred refinement types. Notable, with this patch, we will stably infer: ``` scala> case class C(); case class D(); List(C(), D()).head defined class C defined class D res0: Product with Serializable = C() ``` rather than sometimes getting `Serializable with Product` on Linux. As such, I've removed the workarounds for this instability in two test cases. Backported from c141254 --- .../classpath/DirectoryFlatClassPath.scala | 27 ++++++++++++++++--- .../presentation/callcc-interpreter.check | 4 +-- test/files/run/t7747-repl.check | 2 +- .../classpath/FlatClassPathResolverTest.scala | 4 +-- 4 files changed, 29 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala index 81d2f7320f97..43e5ace6c5f8 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryFlatClassPath.scala @@ -41,7 +41,7 @@ trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatC val dirForPackage = getDirectory(inPackage) val nestedDirs: Array[File] = dirForPackage match { case None => Array.empty - case Some(directory) => directory.listFiles(DirectoryFileLookup.packageDirectoryFileFilter) + case Some(directory) => listDir(directory, Some(DirectoryFileLookup.packageDirectoryFileFilter)) } val prefix = PackageNameUtils.packagePrefix(inPackage) val entries = nestedDirs map { file => @@ -54,7 +54,7 @@ trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatC val dirForPackage = getDirectory(inPackage) val files: Array[File] = dirForPackage match { case None => Array.empty - case Some(directory) => directory.listFiles(fileFilter) + case Some(directory) => listDir(directory, Some(fileFilter)) } val entries = files map { file => val wrappedFile = new scala.reflect.io.File(file) @@ -67,7 +67,7 @@ trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatC val dirForPackage = getDirectory(inPackage) val files: Array[File] = dirForPackage match { case None => Array.empty - case Some(directory) => directory.listFiles() + case Some(directory) => listDir(directory, None) } val packagePrefix = PackageNameUtils.packagePrefix(inPackage) val packageBuf = collection.mutable.ArrayBuffer.empty[PackageEntry] @@ -85,6 +85,27 @@ trait DirectoryFileLookup[FileEntryType <: ClassRepClassPathEntry] extends FlatC FlatClassPathEntries(packageBuf, fileBuf) } + private def listDir(dir: File, filter: Option[FileFilter]): Array[File] = { + val listing = filter match { + case Some(f) => dir.listFiles(f) + case None => dir.listFiles() + } + + // Sort by file name for stable order of directory .class entries in package scope. + // This gives stable results ordering of base type sequences for unrelated classes + // with the same base type depth. + // + // Notably, this will stably infer`Product with Serializable` + // as the type of `case class C(); case class D(); List(C(), D()).head`, rather than the opposite order. + // On Mac, the HFS performs this sorting transparently, but on Linux the order is unspecified. + // + // Note this behaviour can be enabled in javac with `javac -XDsortfiles`, but that's only + // intended to improve determinism of the compiler for compiler hackers. + java.util.Arrays.sort(listing, new java.util.Comparator[File] { def compare(o1: File, o2: File) = o1.getName.compareTo(o2.getName) } ) + listing + } + + protected def createFileEntry(file: AbstractFile): FileEntryType protected def fileFilter: FileFilter } diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check index 94a3d64d68dc..62d1db11e605 100644 --- a/test/files/presentation/callcc-interpreter.check +++ b/test/files/presentation/callcc-interpreter.check @@ -18,7 +18,7 @@ case class Var extends callccInterpreter.Term with Product with Serializable case object Wrong def +(other: String): String def ->[B](y: B): (callccInterpreter.type, B) -def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] +def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Serializable with Product with callccInterpreter.Value] def apply(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[callccInterpreter.Value] def callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A]): callccInterpreter.M[A] def ensuring(cond: Boolean): callccInterpreter.type @@ -90,7 +90,7 @@ def showM(m: callccInterpreter.M[callccInterpreter.Value]): String = m.in.apply( askType at CallccInterpreter.scala(50,30) ================================================================================ [response] askTypeAt (50,30) -def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match { +def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Serializable with Product with callccInterpreter.Value] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match { case (_1: callccInterpreter.Value, _2: callccInterpreter.Value)(callccInterpreter.Value, callccInterpreter.Value)((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n))) case _ => callccInterpreter.this.unitM[callccInterpreter.Wrong.type](callccInterpreter.this.Wrong) } diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index d698ea668d5a..687d432ea008 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -280,7 +280,7 @@ object $read extends scala.AnyRef { }; val INSTANCE = new $read. } -res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo()) +res3: List[Serializable with Product] = List(BippyBups(), PuppyPups(), Bingo()) scala> case class Sum(exp: String, exp2: String) defined class Sum diff --git a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala b/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala index 5dee488285c0..b5436b873232 100644 --- a/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala +++ b/test/junit/scala/tools/nsc/classpath/FlatClassPathResolverTest.scala @@ -117,9 +117,9 @@ class FlatClassPathResolverTest { val packageNameParts = if (inPackage == FlatClassPath.RootPackage) Nil else inPackage.split('.').toList val recursiveClassPathInPackage = traverseToPackage(packageNameParts, recursiveClassPath) - val flatCpPackages = flatClassPath.packages(inPackage).map(_.name) + val flatCpPackages = flatClassPath.packages(inPackage).map(_.name).sorted val pkgPrefix = PackageNameUtils.packagePrefix(inPackage) - val recursiveCpPackages = recursiveClassPathInPackage.packages.map(pkgPrefix + _.name) + val recursiveCpPackages = recursiveClassPathInPackage.packages.map(pkgPrefix + _.name).sorted assertEquals(s"Packages in package '$inPackage' on flat cp should be the same as on the recursive cp", recursiveCpPackages, flatCpPackages) From bf79ccd2e6f8dae2bf43b84ec5935bdabe7fd31a Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 23 May 2019 16:22:25 +0200 Subject: [PATCH 1731/2793] bump versions: starr 2.11.12, jline 2.14.6 --- versions.properties | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/versions.properties b/versions.properties index b1d884356274..e5bd96b9d0ab 100644 --- a/versions.properties +++ b/versions.properties @@ -8,7 +8,7 @@ # The scala version used for bootstrapping. This has no impact on the final classfiles: # there are two stages (locker and quick), so compiler and library are always built # with themselves. Stability is ensured by building a third stage (strap). -starr.version=2.11.11 +starr.version=2.11.12 # These are the versions of the modules that go with this release. # These properties are used during PR validation and in dbuild builds. @@ -22,7 +22,7 @@ starr.version=2.11.11 scala.binary.version=2.11 # e.g. 2.11.0-RC1, 2.11.0, 2.11.1-RC1, 2.11.1 # this defines the dependency on scala-continuations-plugin in scala-dist's pom -scala.full.version=2.11.11 +scala.full.version=2.11.12 # external modules shipped with distribution, as specified by scala-library-all's pom scala-xml.version.number=1.0.5 @@ -32,7 +32,7 @@ scala-continuations-library.version.number=1.0.2 scala-swing.version.number=1.0.2 akka-actor.version.number=2.3.16 actors-migration.version.number=1.1.0 -jline.version=2.14.3 +jline.version=2.14.6 scala-asm.version=6.0.0-scala-1 # external modules, used internally (not shipped) From eece60ef53868ea227afc89146dd13184ad7a930 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 20 Jun 2018 13:21:05 +1000 Subject: [PATCH 1732/2793] Backport ASM 6.2 upgrade to 2.11.x via 2.12.x (#6733) Avoid performance problem after ASM upgrade in prod/cons analysis ASM 6.2 now creates a new Frame inside the loop in which `newExceptionValue` is called. We were including this frame in the case-class equality of the pseudo-instruction, `ExceptionProducer`, and upon receiving new instances each time the `ProdCons` analysis massively slowed down. This commit just captures the data we need: the stack top of the handler frame. Upgrade to scala-asm 6.2 See: https://github.com/scala/scala-asm/issues/5 Upstream changes in ASM: https://github.com/scala/scala-asm/compare/ASM_6_0...ASM_6_2 http://asm.ow2.io/versions.html The motivations, other than just keeping current, are: - support for Java 9/10/11 updates to the classfile format. - reducing needless String => Array[Char] conversions thanks to internal changes in ASM. This PR will fail to build until we publish artifact from scala/scala-asm. Includes a workaround for scala/bug#10418 Move to the standard way of defining a custom asm.Attribute It seems we don't need CustomAttr in our fork of scala-asm, we can just override Attribute.write. Customise label handling without needing to modify ASM directly Comment on our customizations to asm.tree.*Node (cherry picked from commit 79b7f2a56427835c0a8375404fee460def5551b8) --- .../tools/nsc/backend/jvm/AsmUtils.scala | 57 +++++++++++++++-- .../tools/nsc/backend/jvm/BCodeHelpers.scala | 14 +++-- .../nsc/backend/jvm/BCodeSkelBuilder.scala | 2 +- .../scala/tools/nsc/backend/jvm/BTypes.scala | 6 +- .../tools/nsc/backend/jvm/ClassNode1.java | 31 ++++++++++ .../scala/tools/nsc/backend/jvm/GenASM.scala | 10 ++- .../tools/nsc/backend/jvm/LabelNode1.java | 23 +++++++ .../tools/nsc/backend/jvm/MethodNode1.java | 39 ++++++++++++ .../backend/jvm/analysis/BackendUtils.scala | 0 .../jvm/analysis/ProdConsAnalyzer.scala | 22 ++++--- .../backend/jvm/opt/ByteCodeRepository.scala | 2 +- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 2 +- .../tools/partest/nest/StreamCapture.scala | 61 +++++++++++++++++++ .../nsc/backend/jvm/ProdConsBenchmark.scala | 43 +++++++++++++ .../scala/tools/testing/BytecodeTesting.scala | 0 versions.properties | 2 +- 16 files changed, 289 insertions(+), 25 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java create mode 100644 src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala create mode 100644 src/partest/scala/tools/partest/nest/StreamCapture.scala create mode 100644 test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala create mode 100644 test/junit/scala/tools/testing/BytecodeTesting.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala index cd7e0b83e8ec..5ba7d0bccced 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/AsmUtils.scala @@ -5,11 +5,14 @@ package scala.tools.nsc.backend.jvm -import scala.tools.asm.tree.{InsnList, AbstractInsnNode, ClassNode, MethodNode} -import java.io.{StringWriter, PrintWriter} -import scala.tools.asm.util.{CheckClassAdapter, TraceClassVisitor, TraceMethodVisitor, Textifier} -import scala.tools.asm.{ClassWriter, Attribute, ClassReader} +import scala.tools.asm.tree.{AbstractInsnNode, ClassNode, FieldNode, InsnList, MethodNode} +import java.io.{PrintWriter, StringWriter} +import java.util.Comparator + +import scala.tools.asm.util.{CheckClassAdapter, Textifier, TraceClassVisitor, TraceMethodVisitor} +import scala.tools.asm.{Attribute, ClassReader, ClassWriter} import scala.collection.convert.decorateAsScala._ +import scala.collection.convert.decorateAsJava._ import scala.tools.nsc.backend.jvm.analysis.InitialProducer import scala.tools.nsc.backend.jvm.opt.InlineInfoAttributePrototype @@ -55,6 +58,52 @@ object AsmUtils { node } + def readClass(filename: String): ClassNode = readClass(classBytes(filename)) + + def classBytes(file: String): Array[Byte] = { + val f = new java.io.RandomAccessFile(file, "r") + val bytes = new Array[Byte](f.length.toInt) + f.read(bytes) + bytes + } + + def classFromBytes(bytes: Array[Byte]): ClassNode = { + val node = new ClassNode1() + new ClassReader(bytes).accept(node, ClassReader.SKIP_DEBUG | ClassReader.SKIP_FRAMES) + + node + } + +// def main(args: Array[String]): Unit = println(textify(sortedClassRead(classBytes(args.head)))) + + def sortClassMembers(node: ClassNode): node.type = { + node.fields.sort(new Comparator[FieldNode] { + override def compare(o1: FieldNode, o2: FieldNode): Int = o1.name compareTo o2.name + }) + node.methods.sort(new Comparator[MethodNode] { + override def compare(o1: MethodNode, o2: MethodNode): Int = o1.name compareTo o2.name + }) + node + } + + // drop ScalaSig annotation and class attributes + def zapScalaClassAttrs(node: ClassNode): node.type = { + if (node.visibleAnnotations != null) + node.visibleAnnotations = node.visibleAnnotations.asScala.filterNot(a => a == null || a.desc.contains("Lscala/reflect/ScalaSignature")).asJava + + node.attrs = null + node + } + + def main(args: Array[String]): Unit = args.par.foreach { classFileName => + val node = zapScalaClassAttrs(sortClassMembers(classFromBytes(classBytes(classFileName)))) + + val pw = new PrintWriter(classFileName + ".asm") + val trace = new TraceClassVisitor(pw) + node.accept(trace) + pw.close() + } + /** * Returns a human-readable representation of the cnode ClassNode. */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 1b976817431b..15432b11af6b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -12,6 +12,7 @@ import scala.collection.mutable import scala.tools.nsc.io.AbstractFile import GenBCode._ import BackendReporting._ +import scala.tools.asm.ClassWriter /* * Traits encapsulating functionality to convert Scala AST Trees into ASM ClassNodes. @@ -244,9 +245,14 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { * can-multi-thread */ def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - val dest = new Array[Byte](len) - System.arraycopy(b, offset, dest, 0, len) - new asm.CustomAttr(name, dest) + new asm.Attribute(name) { + override def write(classWriter: ClassWriter, code: Array[Byte], + codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { + val byteVector = new asm.ByteVector(len) + byteVector.putByteArray(b, offset, len) + byteVector + } + } } /* @@ -766,7 +772,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic with BytecodeWriters { this.cunit = cunit val bType = mirrorClassClassBType(moduleClass) - val mirrorClass = new asm.tree.ClassNode + val mirrorClass = new ClassNode1 mirrorClass.visit( classfileVersion, bType.info.get.flags, diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index a9b6a312e9cb..92a017b557d6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -104,7 +104,7 @@ abstract class BCodeSkelBuilder extends BCodeHelpers { val classBType = classBTypeFromSymbol(claszSymbol) - cnode = new asm.tree.ClassNode() + cnode = new ClassNode1() initJClass(cnode) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 0c26e0132220..7adce4485b5d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -97,7 +97,7 @@ abstract class BTypes { /** * Obtain the BType for a type descriptor or internal name. For class descriptors, the ClassBType * is constructed by parsing the corresponding classfile. - * + * * Some JVM operations use either a full descriptor or only an internal name. Example: * ANEWARRAY java/lang/String // a new array of strings (internal name for the String class) * ANEWARRAY [Ljava/lang/String; // a new array of array of string (full descriptor for the String class) @@ -964,6 +964,8 @@ abstract class BTypes { // finds the first common one. // MOST LIKELY the answer can be found here, see the comments and links by Miguel: // - https://issues.scala-lang.org/browse/SI-3872 + // @jz Wouldn't it be better to walk the superclass chain of both types in reverse (starting from Object), and + // finding the last common link? That would be O(N), whereas this looks O(N^2) firstCommonSuffix(this :: this.superClassesTransitive.orThrow, other :: other.superClassesTransitive.orThrow) } @@ -1155,4 +1157,4 @@ object BTypes { // no static way (without symbol table instance) to get to nme.ScalaATTR / ScalaSignatureATTR val ScalaAttributeName = "Scala" val ScalaSigAttributeName = "ScalaSig" -} \ No newline at end of file +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java new file mode 100644 index 000000000000..b62374dcc53b --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassNode1.java @@ -0,0 +1,31 @@ +/* NSC -- new Scala compiler + * Copyright 2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc.backend.jvm; + +import scala.tools.asm.MethodVisitor; +import scala.tools.asm.Opcodes; +import scala.tools.asm.tree.ClassNode; +import scala.tools.asm.tree.MethodNode; + +/** + * A subclass of {@link ClassNode} to customize the representation of + * label nodes with {@link LabelNode1}. + */ +public class ClassNode1 extends ClassNode { + public ClassNode1() { + this(Opcodes.ASM6); + } + + public ClassNode1(int api) { + super(api); + } + + @Override + public MethodVisitor visitMethod(int access, String name, String descriptor, String signature, String[] exceptions) { + MethodNode method = new MethodNode1(access, name, descriptor, signature, exceptions); + methods.add(method); + return method; + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 9dba9e23ceee..2c07e93a17d6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -512,9 +512,13 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => } def createJAttribute(name: String, b: Array[Byte], offset: Int, len: Int): asm.Attribute = { - val dest = new Array[Byte](len) - System.arraycopy(b, offset, dest, 0, len) - new asm.CustomAttr(name, dest) + new asm.Attribute(name) { + override def write(classWriter: asm.ClassWriter, code: Array[Byte], codeLength: Int, maxStack: Int, maxLocals: Int): asm.ByteVector = { + val byteVector = new asm.ByteVector(len) + byteVector.putByteArray(b, offset, len) + byteVector + } + } } // ----------------------------------------------------------------------------------------- diff --git a/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java new file mode 100644 index 000000000000..5bb3c5835428 --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/LabelNode1.java @@ -0,0 +1,23 @@ +/* NSC -- new Scala compiler + * Copyright 2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc.backend.jvm; + +import scala.tools.asm.Label; +import scala.tools.asm.tree.ClassNode; +import scala.tools.asm.tree.LabelNode; + +/** + * A subclass of {@link LabelNode} to add user-definable flags. + */ +public class LabelNode1 extends LabelNode { + public LabelNode1() { + } + + public LabelNode1(Label label) { + super(label); + } + + public int flags; +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java new file mode 100644 index 000000000000..9c735acdd65a --- /dev/null +++ b/src/compiler/scala/tools/nsc/backend/jvm/MethodNode1.java @@ -0,0 +1,39 @@ +/* NSC -- new Scala compiler + * Copyright 2018 LAMP/EPFL + * @author Martin Odersky + */ +package scala.tools.nsc.backend.jvm; + +import scala.tools.asm.Label; +import scala.tools.asm.Opcodes; +import scala.tools.asm.tree.LabelNode; +import scala.tools.asm.tree.MethodNode; +/** + * A subclass of {@link MethodNode} to customize the representation of + * label nodes with {@link LabelNode1}. + */ +public class MethodNode1 extends MethodNode { + public MethodNode1(int api, int access, String name, String descriptor, String signature, String[] exceptions) { + super(api, access, name, descriptor, signature, exceptions); + } + + public MethodNode1(int access, String name, String descriptor, String signature, String[] exceptions) { + this(Opcodes.ASM6, access, name, descriptor, signature, exceptions); + } + + public MethodNode1(int api) { + super(api); + } + + public MethodNode1() { + this(Opcodes.ASM6); + } + + @Override + protected LabelNode getLabelNode(Label label) { + if (!(label.info instanceof LabelNode)) { + label.info = new LabelNode1(label); + } + return (LabelNode) label.info; + } +} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala index 594fd8923c87..c24b0b16cad9 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/ProdConsAnalyzer.scala @@ -102,8 +102,13 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName) inputValues(insn).iterator.flatMap(v => v.insns.asScala).toSet } - def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = - _consumersOfOutputsFrom.get(insn).map(v => v.indices.flatMap(v.apply)(collection.breakOut): Set[AbstractInsnNode]).getOrElse(Set.empty) + def consumersOfOutputsFrom(insn: AbstractInsnNode): Set[AbstractInsnNode] = insn match { + case _: UninitializedLocalProducer => Set.empty + case ParameterProducer(local) => consumersOfValueAt(methodNode.instructions.getFirst, local) + case ExceptionProducer(handlerLabel, handlerStackTop) => consumersOfValueAt(handlerLabel, handlerStackTop) + case _ => + _consumersOfOutputsFrom.get(insn).map(v => v.indices.flatMap(v.apply)(collection.breakOut): Set[AbstractInsnNode]).getOrElse(Set.empty) + } /** * Returns the potential initial producer instructions of a value in the frame of `insn`. @@ -386,7 +391,7 @@ class ProdConsAnalyzer(methodNode: MethodNode, classInternalName: InternalName) private def outputValueSlots(insn: AbstractInsnNode): Seq[Int] = insn match { case ParameterProducer(local) => Seq(local) case UninitializedLocalProducer(local) => Seq(local) - case ExceptionProducer(frame) => Seq(frame.stackTop) + case ExceptionProducer(_, stackTop) => Seq(stackTop) case _ => if (insn.getOpcode == -1) return Seq.empty if (isStore(insn)) { @@ -459,11 +464,11 @@ abstract class InitialProducer extends AbstractInsnNode(-1) { override def accept(cv: MethodVisitor): Unit = throw new UnsupportedOperationException } -case class ParameterProducer(local: Int) extends InitialProducer -case class UninitializedLocalProducer(local: Int) extends InitialProducer -case class ExceptionProducer(handlerFrame: Frame[_ <: Value]) extends InitialProducer +case class ParameterProducer(local: Int) extends InitialProducer +case class UninitializedLocalProducer(local: Int) extends InitialProducer +case class ExceptionProducer[V <: Value](handlerLabel: LabelNode, handlerStackTop: Int) extends InitialProducer -class InitialProducerSourceInterpreter extends SourceInterpreter { +class InitialProducerSourceInterpreter extends SourceInterpreter(scala.tools.asm.Opcodes.ASM7_EXPERIMENTAL) { override def newParameterValue(isInstanceMethod: Boolean, local: Int, tp: Type): SourceValue = { new SourceValue(tp.getSize, ParameterProducer(local)) } @@ -473,6 +478,7 @@ class InitialProducerSourceInterpreter extends SourceInterpreter { } override def newExceptionValue(tryCatchBlockNode: TryCatchBlockNode, handlerFrame: Frame[_ <: Value], exceptionType: Type): SourceValue = { - new SourceValue(1, ExceptionProducer(handlerFrame)) + val handlerStackTop = handlerFrame.stackTop + 1 // +1 because this value is about to be pushed onto `handlerFrame`. + new SourceValue(1, ExceptionProducer(tryCatchBlockNode.handler, handlerStackTop)) } } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index a5b85e54e790..c73da089d924 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -137,7 +137,7 @@ class ByteCodeRepository(val classPath: ClassFileLookup[AbstractFile], val isJav private def parseClass(internalName: InternalName): Either[ClassNotFound, ClassNode] = { val fullName = internalName.replace('/', '.') classPath.findClassFile(fullName) map { classFile => - val classNode = new asm.tree.ClassNode() + val classNode = new ClassNode1 val classReader = new asm.ClassReader(classFile.toByteArray) // Passing the InlineInfoAttributePrototype makes the ClassReader invoke the specific `read` diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 7aadd2c466a3..0d01fd6d5229 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -283,7 +283,7 @@ object BytecodeUtils { */ def newLabelNode: LabelNode = { val label = new Label - val labelNode = new LabelNode(label) + val labelNode = new LabelNode1(label) label.info = labelNode labelNode } diff --git a/src/partest/scala/tools/partest/nest/StreamCapture.scala b/src/partest/scala/tools/partest/nest/StreamCapture.scala new file mode 100644 index 000000000000..b24a4f9c768e --- /dev/null +++ b/src/partest/scala/tools/partest/nest/StreamCapture.scala @@ -0,0 +1,61 @@ +/* NEST (New Scala Test) + * Copyright 2007-2013 LAMP/EPFL + * @author Paul Phillips + */ +package scala.tools.partest +package nest + +import java.io.{Console => _, _} +import java.nio.charset.Charset + +object StreamCapture { + def savingSystem[T](body: => T): T = { + val savedOut = System.out + val savedErr = System.err + try body + finally { + System setErr savedErr + System setOut savedOut + } + } + + def capturingOutErr[A](output: OutputStream)(f: => A): A = { + import java.io._ + val charset = Charset.defaultCharset() + val printStream = new PrintStream(output, true, charset.name()) + savingSystem { + System.setOut(printStream) + System.setErr(printStream) + try { + scala.Console.withErr(printStream) { + scala.Console.withOut(printStream) { + f + } + } + } finally { + printStream.close() + } + } + } + + def withExtraProperties[A](extra: Map[String, String])(action: => A): A = { + val saved = System.getProperties() + val modified = new java.util.Properties() + // on Java 9, we need to cast our way around this: + // src/main/scala/scala/tools/partest/nest/StreamCapture.scala:44: ambiguous reference to overloaded definition, + // both method putAll in class Properties of type (x$1: java.util.Map[_, _])Unit + // and method putAll in class Hashtable of type (x$1: java.util.Map[_ <: Object, _ <: Object])Unit + // match argument types (java.util.Properties) + (modified: java.util.Hashtable[AnyRef, AnyRef]).putAll(saved) + extra.foreach { case (k, v) => modified.setProperty(k, v) } + // Trying to avoid other threads seeing the new properties object prior to the new entries + // https://github.com/scala/scala/pull/6391#issuecomment-371346171 + UnsafeAccess.U.storeFence() + System.setProperties(modified) + try { + action + } finally { + System.setProperties(saved) + } + } +} diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala new file mode 100644 index 000000000000..761b1168576e --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala @@ -0,0 +1,43 @@ +package scala.tools.nsc +package backend.jvm + +import java.util.concurrent.TimeUnit + +import scala.tools.asm.tree.ClassNode +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.tools.asm.tree.ClassNode + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ProdConsBenchmark { + type G <: Global + var global: G = _ + private var classNode: ClassNode = _ + + @Setup(Level.Trial) def setup(): Unit = { + val settings = new Settings() + settings.usejavacp.value = true + val global = new Global(settings) + import global._ + this.global = global.asInstanceOf[G] + classNode = AsmUtils.readClass(global.classPath.findClassFile("scala.tools.nsc.typechecker.Implicits$ImplicitSearch").get.toByteArray) + } + + @Benchmark + def prodCons(bh: Blackhole): Unit = { + val global: G = this.global + import global.genBCode.postProcessor.backendUtils._ + for (m <- classNode.methods.iterator().asScala) { + bh.consume(new ProdConsAnalyzer(m, classNode.name)) + } + } +} + diff --git a/test/junit/scala/tools/testing/BytecodeTesting.scala b/test/junit/scala/tools/testing/BytecodeTesting.scala new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/versions.properties b/versions.properties index e5bd96b9d0ab..690ae0b4956a 100644 --- a/versions.properties +++ b/versions.properties @@ -33,7 +33,7 @@ scala-swing.version.number=1.0.2 akka-actor.version.number=2.3.16 actors-migration.version.number=1.1.0 jline.version=2.14.6 -scala-asm.version=6.0.0-scala-1 +scala-asm.version=6.2.0-scala-2 # external modules, used internally (not shipped) partest.version.number=1.0.16 From a0b85a5ef4c91c0eb46e665887d1b3e322fa6b43 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 23 May 2019 12:10:59 -0400 Subject: [PATCH 1733/2793] fix help text for multiple args to -opt-inline-from --- src/compiler/scala/tools/nsc/settings/ScalaSettings.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 804481ef709a..8b736448822d 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -363,10 +363,10 @@ trait ScalaSettings extends AbsScalaSettings | Classes defined in source files compiled in the current compilation, either | passed explicitly to the compiler or picked up from the `-sourcepath` | - |The setting accepts a list of patterns: `-opt-inline-from:p1:p2`. The setting can be passed + |The setting accepts a list of patterns: `-opt-inline-from:p1,p2`. The setting can be passed |multiple times, the list of patterns gets extended. A leading `!` marks a pattern excluding. |The last matching pattern defines whether a classfile is included or excluded (default: excluded). - |For example, `a.**:!a.b.**` includes classes in a and sub-packages, but not in a.b and sub-packages. + |For example, `a.**,!a.b.**` includes classes in a and sub-packages, but not in a.b and sub-packages. | |Note: on the command-line you might need to quote patterns containing `*` to prevent the shell |from expanding it to a list of files in the current directory.""".stripMargin)) From b09f679bfacf1aabeb83e06f6ca08c056ae192f7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 31 Aug 2016 16:33:00 +1000 Subject: [PATCH 1734/2793] [backport] Disable stack hungry test of deprecated PagedSeq (cherry picked from commit 241fb9fe204d2974e0e1b2a60c2b71298e88f3b6) --- test/junit/scala/collection/immutable/PagedSeqTest.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/junit/scala/collection/immutable/PagedSeqTest.scala b/test/junit/scala/collection/immutable/PagedSeqTest.scala index 74f8825307c8..6c974db884e0 100644 --- a/test/junit/scala/collection/immutable/PagedSeqTest.scala +++ b/test/junit/scala/collection/immutable/PagedSeqTest.scala @@ -2,13 +2,14 @@ package scala.collection.immutable import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Test +import org.junit.{Ignore, Test} import org.junit.Assert._ @RunWith(classOf[JUnit4]) class PagedSeqTest { // should not NPE, and should equal the given Seq @Test + @Ignore("This tests a non-stack safe method in a deprecated class that requires ~1.5M stack, disabling") def test_SI6615(): Unit = { assertEquals(Seq('a'), PagedSeq.fromStrings(List.fill(5000)("a")).slice(4096, 4097)) } From 248b651e102c815317b4b23157e3ea82e0c5b3d7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 May 2019 18:38:00 +1000 Subject: [PATCH 1735/2793] Optimize macro reflection --- .../runtime/JavaReflectionRuntimes.scala | 32 +++++++++++-------- 1 file changed, 19 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala index 37d3c4ce213d..944e2b917745 100644 --- a/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/JavaReflectionRuntimes.scala @@ -31,19 +31,25 @@ trait JavaReflectionRuntimes { // so every methName can resolve to at maximum one method val implMeth = implMeths getOrElse { throw new NoSuchMethodException(s"$className.$methName") } macroLogVerbose(s"successfully loaded macro impl as ($implClass, $implMeth)") - args => { - val implObj = - if (isBundle) { - def isMacroContext(clazz: Class[_]) = clazz == classOf[BlackboxContext] || clazz == classOf[WhiteboxContext] - def isBundleCtor(ctor: jConstructor[_]) = ctor.getParameterTypes match { - case Array(param) if isMacroContext(param) => true - case _ => false - } - val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor) - bundleCtor.newInstance(args.c) - } else ReflectionUtils.staticSingletonInstance(implClass) - val implArgs = if (isBundle) args.others else args.c +: args.others - implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*) + if (isBundle) { + def isMacroContext(clazz: Class[_]) = clazz == classOf[BlackboxContext] || clazz == classOf[WhiteboxContext] + + def isBundleCtor(ctor: jConstructor[_]) = ctor.getParameterTypes match { + case Array(param) if isMacroContext(param) => true + case _ => false + } + + val Array(bundleCtor) = implClass.getConstructors.filter(isBundleCtor) + args => { + val implObj = bundleCtor.newInstance(args.c) + implMeth.invoke(implObj, args.others.asInstanceOf[Seq[AnyRef]]: _*) + } + } else { + val implObj = ReflectionUtils.staticSingletonInstance(implClass) + args => { + val implArgs = args.c +: args.others + implMeth.invoke(implObj, implArgs.asInstanceOf[Seq[AnyRef]]: _*) + } } } } From 08e697db01c637b4f8216e95f8997bc7d36e2051 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 26 May 2019 13:48:12 +1000 Subject: [PATCH 1736/2793] Add comments to test --- test/junit/scala/tools/nsc/PipelineMainTest.scala | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index e3e6a81fc921..8d4218029c6d 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -80,6 +80,7 @@ class PipelineMainTest { private lazy val allBuilds = List(m1, b2, b3, b4, b5SuperAccessor) + // Build containing a macro definition and a reference to it from another internal subproject private lazy val m1: Build = { val build = new Build(projectsBase, "m1") val macroProject = build.project("p1") @@ -106,6 +107,7 @@ class PipelineMainTest { build } + // Build containing a reference to the external macro from `b1` private lazy val b2: Build = { val build = new Build(projectsBase, "b1") val p1 = build.project("p1") @@ -120,6 +122,9 @@ class PipelineMainTest { build } + // Build containing projects with mixed Java/Scala source files. + // PipelineMain pickles the API of jointly compiled .java files and + // places these on the classpath of downstream scalac invocations. private lazy val b3: Build = { val build = new Build(projectsBase, "b3") val p1 = build.project("p1") @@ -156,6 +161,7 @@ class PipelineMainTest { build } + // External version of `b4.p2`. private lazy val b4: Build = { val build = new Build(projectsBase, "b4") val b3P1 = b3.project("p1") @@ -178,6 +184,8 @@ class PipelineMainTest { build } + // Build containing motivating test case for special handling of `Super` AST nodes + // in outline typechecking implementation. private lazy val b5SuperAccessor: Build = { val build = new Build(projectsBase, "b5") val p1 = build.project("p1") From eef9c980a9a8fbd900a7394fcd6dce93f9b7e2fb Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 May 2019 18:23:33 +1000 Subject: [PATCH 1737/2793] Avoid building temp set in Attachment.remove --- src/reflect/scala/reflect/macros/Attachments.scala | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/macros/Attachments.scala b/src/reflect/scala/reflect/macros/Attachments.scala index 15dc568b8eef..7fa3e85d35b4 100644 --- a/src/reflect/scala/reflect/macros/Attachments.scala +++ b/src/reflect/scala/reflect/macros/Attachments.scala @@ -65,9 +65,12 @@ abstract class Attachments { self => /** Creates a copy of this attachment with the payload of the given class type `T` removed. */ def remove[T: ClassTag]: Attachments { type Pos = self.Pos } = { - val newAll = all filterNot matchesTag[T] - if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }] - else new NonemptyAttachments[Pos](this.pos, newAll) + if (!all.exists(matchesTag[T])) this // OPT immutable.Set.filter doesn't structurally share on 2.12 collections. + else { + val newAll = all filterNot matchesTag[T] + if (newAll.isEmpty) pos.asInstanceOf[Attachments { type Pos = self.Pos }] + else new NonemptyAttachments[Pos](this.pos, newAll) + } } def isEmpty: Boolean = true From e7e4af815f345a0b3ede1e0a9b34a7dd1823c875 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 May 2019 18:12:59 +1000 Subject: [PATCH 1738/2793] Fuse freeTerms and freeTypes in post-macro accounting --- .../scala/tools/nsc/typechecker/Macros.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 6d8d87b8ef7d..8eb41f300c8f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -822,7 +822,7 @@ trait Macros extends MacroRuntimes with Traces with Helpers { def validateResultingTree(expanded: Tree) = { macroLogVerbose("original:") macroLogLite("" + expanded + "\n" + showRaw(expanded)) - val freeSyms = expanded.freeTerms ++ expanded.freeTypes + val freeSyms = expanded.freeSyms freeSyms foreach (sym => MacroFreeSymbolError(expandee, sym)) // Macros might have spliced arguments with range positions into non-compliant // locations, notably, under a tree without a range position. Or, they might diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index 4d8b5fcac421..f925b9d3a8e4 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -171,17 +171,19 @@ trait Trees extends api.Trees { if (builder eq null) Nil else builder.result() } - def freeTerms: List[FreeTermSymbol] = freeSyms[FreeTermSymbol](_.isFreeTerm, _.termSymbol) - def freeTypes: List[FreeTypeSymbol] = freeSyms[FreeTypeSymbol](_.isFreeType, _.typeSymbol) + def freeTerms: List[FreeTermSymbol] = freeSyms(terms = true, types = false).asInstanceOf[List[FreeTermSymbol]] + def freeTypes: List[FreeTypeSymbol] = freeSyms(terms = false, types = true).asInstanceOf[List[FreeTypeSymbol]] + def freeSyms: List[FreeSymbol] = freeSyms(terms = true, types = true) - private def freeSyms[S <: Symbol](isFree: Symbol => Boolean, symOfType: Type => Symbol): List[S] = { - val s = mutable.LinkedHashSet[S]() - def addIfFree(sym: Symbol): Unit = if (sym != null && isFree(sym)) s += sym.asInstanceOf[S] + private def freeSyms(terms: Boolean, types: Boolean): List[FreeSymbol] = { + val s = mutable.LinkedHashSet[FreeSymbol]() + def addIfFree(sym: Symbol): Unit = if (sym != null && (terms && sym.isFreeTerm || types && sym.isFreeType)) s += sym.asInstanceOf[FreeSymbol] for (t <- this) { addIfFree(t.symbol) if (t.tpe != null) { for (tp <- t.tpe) { - addIfFree(symOfType(tp)) + if (types) addIfFree(tp.typeSymbol) + if (types) addIfFree(tp.termSymbol) } } } From 54953638f31957293381f5050a9f5ba929ad1299 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 24 May 2019 18:01:16 +1000 Subject: [PATCH 1739/2793] Optimize symbol lookup from an import Fuse the duplicate and resetPos traversals over the import qualifier. --- src/compiler/scala/tools/nsc/ast/Trees.scala | 7 ------- src/compiler/scala/tools/nsc/typechecker/Contexts.scala | 2 +- src/reflect/scala/reflect/internal/Trees.scala | 8 ++++++++ src/reflect/scala/reflect/runtime/JavaUniverseForce.scala | 1 + 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 6af6d0ea1ea1..a46befc86f87 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -171,13 +171,6 @@ trait Trees extends scala.reflect.internal.Trees { self: Global => transformer.treeCopy.TypeTreeWithDeferredRefCheck(tree) } - object resetPos extends Traverser { - override def traverse(t: Tree) { - if (t != EmptyTree) t.setPos(NoPosition) - super.traverse(t) - } - } - // Finally, no one uses resetAllAttrs anymore, so I'm removing it from the compiler. // Even though it's with great pleasure I'm doing that, I'll leave its body here to warn future generations about what happened in the past. // diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index c23c57f1024a..91f832b7c26b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -1300,7 +1300,7 @@ trait Contexts { self: Analyzer => } } // optimization: don't write out package prefixes - finish(resetPos(imp1.qual.duplicate), impSym) + finish(duplicateAndResetPos.transform(imp1.qual), impSym) } else finish(EmptyTree, NoSymbol) } diff --git a/src/reflect/scala/reflect/internal/Trees.scala b/src/reflect/scala/reflect/internal/Trees.scala index f925b9d3a8e4..d6dd771922e1 100644 --- a/src/reflect/scala/reflect/internal/Trees.scala +++ b/src/reflect/scala/reflect/internal/Trees.scala @@ -1710,6 +1710,14 @@ trait Trees extends api.Trees { t1 } } + object duplicateAndResetPos extends Transformer { + override val treeCopy = newStrictTreeCopier + override def transform(t: Tree) = { + val t1 = super.transform(t) + if (t1 ne EmptyTree) t1.setPos(NoPosition) + t1 + } + } trait TreeStackTraverser extends Traverser { import collection.mutable val path: mutable.Stack[Tree] = mutable.Stack() diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 0b4d7131fbeb..a88a70149cc8 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -120,6 +120,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.pendingSuperCall this.emptyValDef this.EmptyTreeTypeSubstituter + this.duplicateAndResetPos this.UnmappableAnnotArg this.LiteralAnnotArg this.ArrayAnnotArg From d629bcf02bf108276b739913dbfee842d3d6c7ab Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 26 May 2019 14:11:09 +1000 Subject: [PATCH 1740/2793] Be lazier in assembling macro FastTrack mappings No need to force the base classes of reflect.api.Universe etc, which triggers a lot of classfile parsing. We know exactly who owns the special-cases symbols, so just use `.decl` instead. --- .../scala/reflect/internal/Definitions.scala | 27 ++++++++++++++----- .../reflect/runtime/JavaUniverseForce.scala | 1 + 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 2828db3e01d4..f6605ce1c98c 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -512,8 +512,8 @@ trait Definitions extends api.StandardDefinitions { lazy val ReflectPackage = requiredModule[scala.reflect.`package`.type] lazy val ReflectApiPackage = getPackageObjectIfDefined("scala.reflect.api") // defined in scala-reflect.jar, so we need to be careful lazy val ReflectRuntimePackage = getPackageObjectIfDefined("scala.reflect.runtime") // defined in scala-reflect.jar, so we need to be careful - def ReflectRuntimeUniverse = ReflectRuntimePackage.map(sym => getMemberValue(sym, nme.universe)) - def ReflectRuntimeCurrentMirror = ReflectRuntimePackage.map(sym => getMemberMethod(sym, nme.currentMirror)) + def ReflectRuntimeUniverse = ReflectRuntimePackage.map(sym => getDeclValue(sym, nme.universe)) + def ReflectRuntimeCurrentMirror = ReflectRuntimePackage.map(sym => getDeclMethod(sym, nme.currentMirror)) lazy val UniverseClass = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful def UniverseInternal = getMemberValue(UniverseClass, nme.internal) @@ -536,6 +536,7 @@ trait Definitions extends api.StandardDefinitions { lazy val TypeTagsClass = getClassIfDefined("scala.reflect.api.TypeTags") // defined in scala-reflect.jar, so we need to be careful lazy val ApiUniverseClass = getClassIfDefined("scala.reflect.api.Universe") // defined in scala-reflect.jar, so we need to be careful + lazy val ApiQuasiquotesClass = getClassIfDefined("scala.reflect.api.Quasiquotes") // defined in scala-reflect.jar, so we need to be careful lazy val JavaUniverseClass = getClassIfDefined("scala.reflect.api.JavaUniverse") // defined in scala-reflect.jar, so we need to be careful lazy val MirrorClass = getClassIfDefined("scala.reflect.api.Mirror") // defined in scala-reflect.jar, so we need to be careful @@ -560,10 +561,10 @@ trait Definitions extends api.StandardDefinitions { // scala/bug#8392 a reflection universe on classpath may not have // quasiquotes, if e.g. crosstyping with -Xsource on - lazy val QuasiquoteClass = if (ApiUniverseClass != NoSymbol) getMemberIfDefined(ApiUniverseClass, tpnme.Quasiquote) else NoSymbol - lazy val QuasiquoteClass_api = if (QuasiquoteClass != NoSymbol) getMember(QuasiquoteClass, tpnme.api) else NoSymbol - lazy val QuasiquoteClass_api_apply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.apply) else NoSymbol - lazy val QuasiquoteClass_api_unapply = if (QuasiquoteClass_api != NoSymbol) getMember(QuasiquoteClass_api, nme.unapply) else NoSymbol + lazy val QuasiquoteClass = if (ApiUniverseClass != NoSymbol) ApiQuasiquotesClass.info.decl(tpnme.Quasiquote) else NoSymbol + lazy val QuasiquoteClass_api = if (QuasiquoteClass != NoSymbol) QuasiquoteClass.info.decl(tpnme.api) else NoSymbol + lazy val QuasiquoteClass_api_apply = if (QuasiquoteClass_api != NoSymbol) getDeclMethod(QuasiquoteClass_api, nme.apply) else NoSymbol + lazy val QuasiquoteClass_api_unapply = if (QuasiquoteClass_api != NoSymbol) getDeclMethod(QuasiquoteClass_api, nme.unapply) else NoSymbol lazy val ScalaSignatureAnnotation = requiredClass[scala.reflect.ScalaSignature] lazy val ScalaLongSignatureAnnotation = requiredClass[scala.reflect.ScalaLongSignature] @@ -1308,6 +1309,18 @@ trait Definitions extends api.StandardDefinitions { case _ => fatalMissingSymbol(owner, name, "method") } } + def getDeclMethod(owner: Symbol, name: Name): TermSymbol = { + getDecl(owner, name.toTermName) match { + case x: TermSymbol => x + case _ => fatalMissingSymbol(owner, name, "method") + } + } + def getDeclValue(owner: Symbol, name: Name): TermSymbol = { + getDecl(owner, name.toTermName) match { + case x: TermSymbol => x + case _ => fatalMissingSymbol(owner, name, "declared value") + } + } private lazy val erasurePhase = findPhaseWithName("erasure") def getMemberIfDefined(owner: Symbol, name: Name): Symbol = @@ -1574,7 +1587,7 @@ trait Definitions extends api.StandardDefinitions { lazy val HigherKindsFeature = getLanguageFeature("higherKinds") lazy val ExistentialsFeature = getLanguageFeature("existentials") - lazy val ApiUniverseReify = ApiUniverseClass.map(sym => getMemberMethod(sym, nme.reify)) + lazy val ApiUniverseReify = ApiUniverseClass.map(sym => getDeclIfDefined(sym, nme.reify)) lazy val ReflectRuntimeUniverse = DefinitionsClass.this.ReflectRuntimeUniverse lazy val ReflectRuntimeCurrentMirror = DefinitionsClass.this.ReflectRuntimeCurrentMirror diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index a88a70149cc8..2dae947f6579 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -326,6 +326,7 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => definitions.ClassTagClass definitions.TypeTagsClass definitions.ApiUniverseClass + definitions.ApiQuasiquotesClass definitions.JavaUniverseClass definitions.MirrorClass definitions.TypeCreatorClass From 4c2de35af3b76db4fa91e596ca9db6781b796ac5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 26 May 2019 14:12:51 +1000 Subject: [PATCH 1741/2793] Cache macro impl binding lookup This is needed to check the 'boxity' of the macro each time it is referred to. --- .../scala/tools/nsc/typechecker/Macros.scala | 32 +++++++++++-------- 1 file changed, 18 insertions(+), 14 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 8eb41f300c8f..c72d6f570a49 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -312,10 +312,14 @@ trait Macros extends MacroRuntimes with Traces with Helpers { macroDef withAnnotation AnnotationInfo(MacroImplAnnotation.tpe, List(pickle), Nil) } - def loadMacroImplBinding(macroDef: Symbol): Option[MacroImplBinding] = - macroDef.getAnnotation(MacroImplAnnotation) collect { - case AnnotationInfo(_, List(pickle), _) => MacroImplBinding.unpickle(pickle) - } + def loadMacroImplBinding(macroDef: Symbol): Option[MacroImplBinding] = { + macroImplBindingCache.getOrElseUpdate(macroDef, + macroDef.getAnnotation(MacroImplAnnotation) collect { + case AnnotationInfo(_, List(pickle), _) => MacroImplBinding.unpickle(pickle) + } + ) + } + private val macroImplBindingCache = perRunCaches.newAnyRefMap[Symbol, Option[MacroImplBinding]]() def isBlackbox(expandee: Tree): Boolean = isBlackbox(dissectApplied(expandee).core.symbol) def isBlackbox(macroDef: Symbol): Boolean = pluginsIsBlackbox(macroDef) @@ -906,33 +910,33 @@ trait Macros extends MacroRuntimes with Traces with Helpers { var hasPendingMacroExpansions = false // JZ this is never reset to false. What is its purpose? Should it not be stored in Context? def typerShouldExpandDeferredMacros: Boolean = hasPendingMacroExpansions && !delayed.isEmpty private val forced = perRunCaches.newWeakSet[Tree] - private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Int]]() - private def isDelayed(expandee: Tree) = delayed contains expandee + private val delayed = perRunCaches.newWeakMap[Tree, scala.collection.mutable.Set[Symbol]]() + private def isDelayed(expandee: Tree) = !delayed.isEmpty && (delayed contains expandee) def clearDelayed(): Unit = delayed.clear() - private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Int] = - if (forced(expandee)) scala.collection.mutable.Set[Int]() + private def calculateUndetparams(expandee: Tree): scala.collection.mutable.Set[Symbol] = + if (forced(expandee)) scala.collection.mutable.Set[Symbol]() else delayed.getOrElse(expandee, { val calculated = scala.collection.mutable.Set[Symbol]() expandee foreach (sub => { - def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym.id)) calculated += sym + def traverse(sym: Symbol) = if (sym != null && (undetparams contains sym)) calculated += sym if (sub.symbol != null) traverse(sub.symbol) if (sub.tpe != null) sub.tpe foreach (sub => traverse(sub.typeSymbol)) }) macroLogVerbose("calculateUndetparams: %s".format(calculated)) - calculated map (_.id) + calculated }) - private val undetparams = perRunCaches.newSet[Int]() + private val undetparams = perRunCaches.newSet[Symbol]() def notifyUndetparamsAdded(newUndets: List[Symbol]): Unit = { - undetparams ++= newUndets map (_.id) + undetparams ++= newUndets if (macroDebugVerbose) newUndets foreach (sym => println("undetParam added: %s".format(sym))) } def notifyUndetparamsInferred(undetNoMore: List[Symbol], inferreds: List[Type]): Unit = { - undetparams --= undetNoMore map (_.id) + undetparams --= undetNoMore if (macroDebugVerbose) (undetNoMore zip inferreds) foreach { case (sym, tpe) => println("undetParam inferred: %s as %s".format(sym, tpe))} if (!delayed.isEmpty) delayed.toList foreach { case (expandee, undetparams) if !undetparams.isEmpty => - undetparams --= undetNoMore map (_.id) + undetparams --= undetNoMore if (undetparams.isEmpty) { hasPendingMacroExpansions = true macroLogVerbose(s"macro expansion is pending: $expandee") From fdc59d2c780486de9798fab67a4138b6fdea4432 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 26 May 2019 14:01:10 +1000 Subject: [PATCH 1742/2793] Optimize macro context creation - Use lazy vals in Enclosures where possible. - Avoid temporary lists like `enclosingContextChain` --- .../reflect/macros/contexts/Enclosures.scala | 28 +++++++++++-------- 1 file changed, 16 insertions(+), 12 deletions(-) diff --git a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala index 19ce230d0dd9..694aff3232ff 100644 --- a/src/compiler/scala/reflect/macros/contexts/Enclosures.scala +++ b/src/compiler/scala/reflect/macros/contexts/Enclosures.scala @@ -21,24 +21,28 @@ trait Enclosures { import universe._ private lazy val site = callsiteTyper.context - private lazy val enclTrees = site.enclosingContextChain map (_.tree) - private lazy val enclPoses = enclosingMacros map (_.macroApplication.pos) filterNot (_ eq NoPosition) - private def lenientEnclosure[T <: Tree : ClassTag]: Tree = enclTrees collectFirst { case x: T => x } getOrElse EmptyTree - private def strictEnclosure[T <: Tree : ClassTag]: T = enclTrees collectFirst { case x: T => x } getOrElse (throw EnclosureException(classTag[T].runtimeClass, enclTrees)) + private def lenientEnclosure[T <: Tree : ClassTag]: Tree = site.nextEnclosing(c => classTag[T].runtimeClass.isInstance(c.tree)).tree + private def strictEnclosure[T <: Tree : ClassTag]: T = site.nextEnclosing(c => classTag[T].runtimeClass.isInstance(c.tree)) match { + case analyzer.NoContext => throw EnclosureException(classTag[T].runtimeClass, site.enclosingContextChain map (_.tree)) + case cx => cx.tree.asInstanceOf[T] + } - // vals are eager to simplify debugging - // after all we wouldn't save that much time by making them lazy val macroApplication: Tree = expandee - def enclosingPackage: PackageDef = strictEnclosure[PackageDef] - val enclosingClass: Tree = lenientEnclosure[ImplDef] + def enclosingPackage: PackageDef = site.nextEnclosing(_.tree.isInstanceOf[PackageDef]).tree.asInstanceOf[PackageDef] + lazy val enclosingClass: Tree = lenientEnclosure[ImplDef] def enclosingImpl: ImplDef = strictEnclosure[ImplDef] def enclosingTemplate: Template = strictEnclosure[Template] - val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate) - val enclosingMacros: List[Context] = this :: universe.analyzer.openMacros // include self - val enclosingMethod: Tree = lenientEnclosure[DefDef] + lazy val enclosingImplicits: List[ImplicitCandidate] = site.openImplicits.map(_.toImplicitCandidate) + private val analyzerOpenMacros = universe.analyzer.openMacros + val enclosingMacros: List[Context] = this :: analyzerOpenMacros // include self + lazy val enclosingMethod: Tree = lenientEnclosure[DefDef] def enclosingDef: DefDef = strictEnclosure[DefDef] - val enclosingPosition: Position = if (enclPoses.isEmpty) NoPosition else enclPoses.head.pos + lazy val enclosingPosition: Position = if (this.macroApplication.pos ne NoPosition) this.macroApplication.pos else { + analyzerOpenMacros.collectFirst { + case x if x.macroApplication.pos ne NoPosition => x.macroApplication.pos + }.getOrElse(NoPosition) + } val enclosingUnit: CompilationUnit = universe.currentRun.currentUnit val enclosingRun: Run = universe.currentRun } From c88d6209b1c34528f89b0b02d76b6e9ba58e7d65 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 29 Jan 2015 14:40:45 +1000 Subject: [PATCH 1743/2793] Require and target Java 8 - Require Java 8 in ant build - use -source 1.8 and -target 1.8 for javac - Default scalac's -target to `jvm-1.8`, ignore and deprecate attempts to use `jvm-1.{6.7}` - Remove fragile javap-app test. The feature itself is slated for removal. - Remove obsolete Java6 checkfile - Adapt DCE tests - Remove deprecated/redundant -target:jvm-1.6 from flags where the intent was to trigger generation of stack map frames. - Remove tests with -target:jvm-1.5 that tested without stack map frames - Ignore OpenJDK JVM warnings (via test/[files|scaladoc]/filters). (cherry picked from commit 8d2d3c702d2010d40ed6facb771add48999216c1) --- build-ant-macros.xml | 4 ++-- build.sbt | 2 +- build.xml | 10 +--------- src/compiler/scala/tools/nsc/Global.scala | 7 +++++-- .../tools/nsc/backend/jvm/BCodeBodyBuilder.scala | 2 +- .../tools/nsc/backend/jvm/BCodeIdiomatic.scala | 3 --- .../scala/tools/nsc/backend/jvm/GenASM.scala | 3 --- .../tools/nsc/settings/AbsScalaSettings.scala | 1 + .../tools/nsc/settings/MutableSettings.scala | 7 +++++++ .../nsc/settings/StandardScalaSettings.scala | 4 ++-- .../scala/tools/nsc/transform/Delambdafy.scala | 7 +++---- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- src/manual/scala/man1/scalac.scala | 9 +++------ test/files/neg/deprecated-target.check | 4 ++++ test/files/neg/deprecated-target.flags | 1 + test/files/neg/deprecated-target.scala | 1 + test/files/neg/t6289.check | 6 ------ test/files/run/nothingTypeDce.flags | 2 +- test/files/run/nothingTypeDce.scala | 3 +-- test/files/run/nothingTypeNoFramesNoDce.check | 1 - test/files/run/nothingTypeNoFramesNoDce.flags | 1 - test/files/run/nothingTypeNoOpt.flags | 2 +- .../jvm/opt/CompactLocalVariablesTest.scala | 4 ++-- .../nsc/backend/jvm/opt/MethodLevelOpts.scala | 2 +- .../backend/jvm/opt/UnreachableCodeTest.scala | 16 +++------------- 25 files changed, 42 insertions(+), 62 deletions(-) create mode 100644 test/files/neg/deprecated-target.check create mode 100644 test/files/neg/deprecated-target.flags create mode 100644 test/files/neg/deprecated-target.scala delete mode 100644 test/files/run/nothingTypeNoFramesNoDce.check delete mode 100644 test/files/run/nothingTypeNoFramesNoDce.flags diff --git a/build-ant-macros.xml b/build-ant-macros.xml index ace86cac499d..ca01f4ce1446 100644 --- a/build-ant-macros.xml +++ b/build-ant-macros.xml @@ -105,7 +105,7 @@ - + @@ -132,7 +132,7 @@ - + diff --git a/build.sbt b/build.sbt index 1c94aa343dfc..b3eaf7cb3df0 100644 --- a/build.sbt +++ b/build.sbt @@ -144,7 +144,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, - javacOptions in Compile ++= Seq("-g", "-source", "1.5", "-target", "1.6"), + javacOptions in Compile ++= Seq("-g", "-source", "1.8", "-target", "1.8"), // we don't want any unmanaged jars; as a reminder: unmanaged jar is a jar stored // directly on the file system and it's not resolved through Ivy // Ant's build stored unmanaged jars in `lib/` directory diff --git a/build.xml b/build.xml index 1470c666141b..32d39656a4da 100644 --- a/build.xml +++ b/build.xml @@ -193,7 +193,7 @@ TODO: - + @@ -492,20 +492,12 @@ TODO: - - - - - - - - diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index a54b92cef8fc..d31c41abf903 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1405,8 +1405,11 @@ class Global(var currentSettings: Settings, var reporter: Reporter) settings.userSetSettings filter (_.isDeprecated) foreach { s => currentRun.reporting.deprecationWarning(NoPosition, s.name + " is deprecated: " + s.deprecationMessage.get) } - if (settings.target.value.contains("jvm-1.5")) - currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated: use target for Java 1.6 or above.") + val supportedTarget = "jvm-1.8" + if (settings.target.value != supportedTarget) { + currentRun.reporting.deprecationWarning(NoPosition, settings.target.name + ":" + settings.target.value + " is deprecated and has no effect, setting to " + supportedTarget) + settings.target.value = supportedTarget + } } /* An iterator returning all the units being compiled in this run */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala index 416628d5ba7c..4f9a5bceb834 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeBodyBuilder.scala @@ -843,7 +843,7 @@ abstract class BCodeBodyBuilder extends BCodeSkelBuilder { * loading another throwable first). * * New (http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4.10.1) - * - Requires consistent stack map frames. GenBCode generates stack frames if -target:jvm-1.6 + * - Requires consistent stack map frames. GenBCode always generates stack frames. * or higher. * - In practice: the ASM library computes stack map frames for us (ClassWriter). Emitting * correct frames after an ATHROW is probably complex, so ASM uses the following strategy: diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala index eb0da7caef69..535e1a862023 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeIdiomatic.scala @@ -28,9 +28,6 @@ abstract class BCodeIdiomatic extends SubComponent { import coreBTypes._ val classfileVersion: Int = settings.target.value match { - case "jvm-1.5" => asm.Opcodes.V1_5 - case "jvm-1.6" => asm.Opcodes.V1_6 - case "jvm-1.7" => asm.Opcodes.V1_7 case "jvm-1.8" => asm.Opcodes.V1_8 } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala index 2c07e93a17d6..74f9cbcde9a7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenASM.scala @@ -441,9 +441,6 @@ abstract class GenASM extends SubComponent with BytecodeWriters { self => // ----------------------------------------------------------------------------------------- private val classfileVersion: Int = settings.target.value match { - case "jvm-1.5" => asm.Opcodes.V1_5 - case "jvm-1.6" => asm.Opcodes.V1_6 - case "jvm-1.7" => asm.Opcodes.V1_7 case "jvm-1.8" => asm.Opcodes.V1_8 } diff --git a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala index 6b339b2a6da4..8386722b6357 100644 --- a/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/AbsScalaSettings.scala @@ -31,6 +31,7 @@ trait AbsScalaSettings { def BooleanSetting(name: String, descr: String): BooleanSetting def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String): ChoiceSetting + def ChoiceSettingForcedDefault(name: String, helpArg: String, descr: String, choices: List[String], default: String): ChoiceSetting def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]): IntSetting def MultiStringSetting(name: String, helpArg: String, descr: String): MultiStringSetting def MultiChoiceSetting[E <: MultiChoiceEnumeration](name: String, helpArg: String, descr: String, domain: E, default: Option[List[String]]): MultiChoiceSetting[E] diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index 11cde935f22b..6212469f73aa 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -221,6 +221,13 @@ class MutableSettings(val errorFn: String => Unit) def BooleanSetting(name: String, descr: String) = add(new BooleanSetting(name, descr)) def ChoiceSetting(name: String, helpArg: String, descr: String, choices: List[String], default: String) = add(new ChoiceSetting(name, helpArg, descr, choices, default)) + def ChoiceSettingForcedDefault(name: String, helpArg: String, descr: String, choices: List[String], default: String) = + ChoiceSetting(name, helpArg, descr, choices, default).withPostSetHook(sett => + if (sett.value != default) { + sett.withDeprecationMessage(s"${name}:${sett.value} is deprecated, forcing use of $default") + sett.value = default + } + ) def IntSetting(name: String, descr: String, default: Int, range: Option[(Int, Int)], parser: String => Option[Int]) = add(new IntSetting(name, descr, default, range, parser)) def MultiStringSetting(name: String, arg: String, descr: String) = add(new MultiStringSetting(name, arg, descr)) diff --git a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala index d42c0dd730d8..f197a4930da5 100644 --- a/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/StandardScalaSettings.scala @@ -38,8 +38,8 @@ trait StandardScalaSettings { val nowarn = BooleanSetting ("-nowarn", "Generate no warnings.") val optimise: BooleanSetting // depends on post hook which mutates other settings val print = BooleanSetting ("-print", "Print program with Scala-specific features removed.") - val target = ChoiceSetting ("-target", "target", "Target platform for object files. All JVM 1.5 targets are deprecated.", - List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"), "jvm-1.6") + val target = ChoiceSettingForcedDefault ("-target", "target", "Target platform for object files. All JVM 1.5 - 1.7 targets are deprecated.", + List("jvm-1.5", "jvm-1.6", "jvm-1.7", "jvm-1.8"), "jvm-1.8") val unchecked = BooleanSetting ("-unchecked", "Enable additional warnings where generated code depends on assumptions.") val uniqid = BooleanSetting ("-uniqid", "Uniquely tag all identifiers in debugging output.") val usejavacp = BooleanSetting ("-usejavacp", "Utilize the java.class.path in classpath resolution.") diff --git a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala index 8e323de62377..57aaffe54fa8 100644 --- a/src/compiler/scala/tools/nsc/transform/Delambdafy.scala +++ b/src/compiler/scala/tools/nsc/transform/Delambdafy.scala @@ -18,7 +18,7 @@ import scala.collection.mutable.LinkedHashMap * * From a lambda, Delambdafy will create: * - * Under -target:jvm-1.7 and below: + * Under GenASM * * 1) a new top level class that a) has fields and a constructor taking the captured environment (including possibly the "this" @@ -27,7 +27,7 @@ import scala.collection.mutable.LinkedHashMap * c) if needed a bridge method for the apply method * 2) an instantiation of the newly created class which replaces the lambda * - * Under -target:jvm-1.8 with GenBCode: + * Under GenBCode: * * 1) An application of the captured arguments to a fictional symbol representing the lambda factory. * This will be translated by the backed into an invokedynamic using a bootstrap method in JDK8's `LambdaMetaFactory`. @@ -573,8 +573,7 @@ abstract class Delambdafy extends Transform with TypingTransformers with ast.Tre // given function type. Returns `NoSymbol` if the compiler settings are unsuitable. private def java8CompatFunctionalInterface(target: Symbol, functionType: Type): (Symbol, Boolean) = { val canUseLambdaMetafactory: Boolean = { - val isTarget18 = settings.target.value.contains("jvm-1.8") - settings.isBCodeActive && isTarget18 + settings.isBCodeActive } val sym = functionType.typeSymbol diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index d5a7213cfb71..f817eca3b9d9 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -241,7 +241,7 @@ abstract class UnCurry extends InfoTransform def canUseDelamdafyMethod = ( (inConstructorFlag == 0) // Avoiding synthesizing code prone to SI-6666, SI-8363 by using old-style lambda translation - && (!isSpecialized || (settings.isBCodeActive && settings.target.value == "jvm-1.8")) // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime + && (!isSpecialized || settings.isBCodeActive) // DelambdafyTransformer currently only emits generic FunctionN-s, use the old style in the meantime ) if (inlineFunctionExpansion || !canUseDelamdafyMethod) { val parents = addSerializable(abstractFunctionForFunctionType(fun.tpe)) diff --git a/src/manual/scala/man1/scalac.scala b/src/manual/scala/man1/scalac.scala index a20c1ac2e6b3..41dae1b322b4 100644 --- a/src/manual/scala/man1/scalac.scala +++ b/src/manual/scala/man1/scalac.scala @@ -148,12 +148,9 @@ object scalac extends Command { CmdOption("sourcepath", Argument("path")), "Specify location(s) of source files."), Definition( - CmdOptionBound("target:", "{jvm-1.5,jvm-1.6,jvm-1.7,jvm-1.8}"), - SeqPara( - Mono("\"jvm-1.5\"") & " target JVM 1.5 (deprecated),", - Mono("\"jvm-1.6\"") & " target JVM 1.6 (default),", - Mono("\"jvm-1.7\"") & " target JVM 1.7,", - Mono("\"jvm-1.8\"") & " target JVM 1.8,")), + CmdOptionBound("target:", "{jvm-1.8}"), + SeqPara( + Mono("\"jvm-1.8\"") & " target JVM 1.8 (default)")), Definition( CmdOption("toolcp", Argument("path")), "Add to the runner classpath."), diff --git a/test/files/neg/deprecated-target.check b/test/files/neg/deprecated-target.check new file mode 100644 index 000000000000..307d3d25ab41 --- /dev/null +++ b/test/files/neg/deprecated-target.check @@ -0,0 +1,4 @@ +warning: -target is deprecated: -target:jvm-1.7 is deprecated, forcing use of jvm-1.8 +error: No warnings can be incurred under -Xfatal-warnings. +one warning found +one error found diff --git a/test/files/neg/deprecated-target.flags b/test/files/neg/deprecated-target.flags new file mode 100644 index 000000000000..458ded8123b5 --- /dev/null +++ b/test/files/neg/deprecated-target.flags @@ -0,0 +1 @@ +-target:jvm-1.7 -deprecation -Xfatal-warnings diff --git a/test/files/neg/deprecated-target.scala b/test/files/neg/deprecated-target.scala new file mode 100644 index 000000000000..9dccdd5e5954 --- /dev/null +++ b/test/files/neg/deprecated-target.scala @@ -0,0 +1 @@ +class C \ No newline at end of file diff --git a/test/files/neg/t6289.check b/test/files/neg/t6289.check index 989932750f2f..7b2b4b2d32b7 100644 --- a/test/files/neg/t6289.check +++ b/test/files/neg/t6289.check @@ -1,9 +1,3 @@ -#partest java6 -t6289/J.java:2: method does not override or implement a method from a supertype - @Override public void foo() { } - ^ -1 error -#partest !java6 t6289/J.java:2: error: method does not override or implement a method from a supertype @Override public void foo() { } ^ diff --git a/test/files/run/nothingTypeDce.flags b/test/files/run/nothingTypeDce.flags index d85321ca0eaa..fde52cc7dfeb 100644 --- a/test/files/run/nothingTypeDce.flags +++ b/test/files/run/nothingTypeDce.flags @@ -1 +1 @@ --target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code +-Ybackend:GenBCode -Yopt:unreachable-code diff --git a/test/files/run/nothingTypeDce.scala b/test/files/run/nothingTypeDce.scala index 5f3692fd3366..92d3ca6f89cd 100644 --- a/test/files/run/nothingTypeDce.scala +++ b/test/files/run/nothingTypeDce.scala @@ -1,7 +1,6 @@ // See comment in BCodeBodyBuilder -// -target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code -// target enables stack map frames generation +// -Ybackend:GenBCode -Yopt:unreachable-code class C { // can't just emit a call to ???, that returns value of type Nothing$ (not Int). diff --git a/test/files/run/nothingTypeNoFramesNoDce.check b/test/files/run/nothingTypeNoFramesNoDce.check deleted file mode 100644 index b1d08b45ffef..000000000000 --- a/test/files/run/nothingTypeNoFramesNoDce.check +++ /dev/null @@ -1 +0,0 @@ -warning: -target:jvm-1.5 is deprecated: use target for Java 1.6 or above. diff --git a/test/files/run/nothingTypeNoFramesNoDce.flags b/test/files/run/nothingTypeNoFramesNoDce.flags deleted file mode 100644 index a035c861798f..000000000000 --- a/test/files/run/nothingTypeNoFramesNoDce.flags +++ /dev/null @@ -1 +0,0 @@ --target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none -deprecation diff --git a/test/files/run/nothingTypeNoOpt.flags b/test/files/run/nothingTypeNoOpt.flags index b3b518051b6f..d3e4d61e19c8 100644 --- a/test/files/run/nothingTypeNoOpt.flags +++ b/test/files/run/nothingTypeNoOpt.flags @@ -1 +1 @@ --target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none +-Ybackend:GenBCode -Yopt:l:none diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala index 76492cfa2335..cd298f822ac7 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/CompactLocalVariablesTest.scala @@ -17,8 +17,8 @@ class CompactLocalVariablesTest { // recurse-unreachable-jumps is required for eliminating catch blocks, in the first dce round they // are still live.only after eliminating the empty handler the catch blocks become unreachable. - val methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code,compact-locals") - val noCompactVarsCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code") + val methodOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code,compact-locals") + val noCompactVarsCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code") @Test def compactUnused(): Unit = { diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala index 5ef2458c0a27..8d910629ca12 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/MethodLevelOpts.scala @@ -16,7 +16,7 @@ import ASMConverters._ import scala.tools.testing.ClearAfterClass object MethodLevelOpts extends ClearAfterClass.Clearable { - var methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method") + var methodOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:method") def clear(): Unit = { methodOptCompiler = null } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala index 902af7b7fae8..0ac206669a5a 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/UnreachableCodeTest.scala @@ -18,18 +18,14 @@ import scala.tools.testing.ClearAfterClass object UnreachableCodeTest extends ClearAfterClass.Clearable { // jvm-1.6 enables emitting stack map frames, which impacts the code generation wrt dead basic blocks, // see comment in BCodeBodyBuilder - var methodOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:method") - var dceCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:unreachable-code") - var noOptCompiler = newCompiler(extraArgs = "-target:jvm-1.6 -Ybackend:GenBCode -Yopt:l:none") - - // jvm-1.5 disables computing stack map frames, and it emits dead code as-is. note that this flag triggers a deprecation warning - var noOptNoFramesCompiler = newCompiler(extraArgs = "-target:jvm-1.5 -Ybackend:GenBCode -Yopt:l:none -deprecation") + var methodOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:method") + var dceCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:unreachable-code") + var noOptCompiler = newCompiler(extraArgs = "-Ybackend:GenBCode -Yopt:l:none") def clear(): Unit = { methodOptCompiler = null dceCompiler = null noOptCompiler = null - noOptNoFramesCompiler = null } } @@ -40,7 +36,6 @@ class UnreachableCodeTest extends ClearAfterClass { val methodOptCompiler = UnreachableCodeTest.methodOptCompiler val dceCompiler = UnreachableCodeTest.dceCompiler val noOptCompiler = UnreachableCodeTest.noOptCompiler - val noOptNoFramesCompiler = UnreachableCodeTest.noOptNoFramesCompiler def assertEliminateDead(code: (Instruction, Boolean)*): Unit = { val method = genMethod()(code.map(_._1): _*) @@ -152,11 +147,6 @@ class UnreachableCodeTest extends ClearAfterClass { // Finally, instructions in the dead basic blocks are replaced by ATHROW, as explained in // a comment in BCodeBodyBuilder. assertSameCode(noDce.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ATHROW), Op(ATHROW))) - - // when NOT computing stack map frames, ASM's ClassWriter does not replace dead code by NOP/ATHROW - val warn = "target:jvm-1.5 is deprecated" - val noDceNoFrames = singleMethodInstructions(noOptNoFramesCompiler)(code, allowMessage = _.msg contains warn) - assertSameCode(noDceNoFrames.dropNonOp, List(Op(ICONST_1), Op(IRETURN), Op(ICONST_2), Op(IRETURN))) } @Test From 95a5ac6f58741db83ee634c274db507149ec52e5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 28 May 2019 15:04:05 +1000 Subject: [PATCH 1744/2793] Prefer Type.foreach to Type.filter The latter creates temporary lists in FilterTypeCollector, which was showing as an allocation hotspot for builds with `-Ywarn-unused` enabled. --- src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 6a7e527f9ad8..778ed35267bf 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -549,7 +549,7 @@ trait TypeDiagnostics { } if (t.tpe ne null) { - for (tp <- t.tpe if !treeTypes(tp)) { + for (tp <- t.tpe) if (!treeTypes(tp)) { // Include references to private/local aliases (which might otherwise refer to an enclosing class) val isAlias = { val td = tp.typeSymbolDirect From 6909e6df048402ae4cc12f2dca95c3293af2b8f5 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 May 2019 15:27:06 +0200 Subject: [PATCH 1745/2793] Update callcc-interpreter.check Not sure what went wrong in 6b883e1 --- test/files/presentation/callcc-interpreter.check | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check index 62d1db11e605..94a3d64d68dc 100644 --- a/test/files/presentation/callcc-interpreter.check +++ b/test/files/presentation/callcc-interpreter.check @@ -18,7 +18,7 @@ case class Var extends callccInterpreter.Term with Product with Serializable case object Wrong def +(other: String): String def ->[B](y: B): (callccInterpreter.type, B) -def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Serializable with Product with callccInterpreter.Value] +def add(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] def apply(a: callccInterpreter.Value,b: callccInterpreter.Value): callccInterpreter.M[callccInterpreter.Value] def callCC[A](h: (A => callccInterpreter.M[A]) => callccInterpreter.M[A]): callccInterpreter.M[A] def ensuring(cond: Boolean): callccInterpreter.type @@ -90,7 +90,7 @@ def showM(m: callccInterpreter.M[callccInterpreter.Value]): String = m.in.apply( askType at CallccInterpreter.scala(50,30) ================================================================================ [response] askTypeAt (50,30) -def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Serializable with Product with callccInterpreter.Value] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match { +def add(a: callccInterpreter.Value, b: callccInterpreter.Value): callccInterpreter.M[_ >: callccInterpreter.Num with callccInterpreter.Wrong.type <: Product with Serializable with callccInterpreter.Value] = scala.Tuple2.apply[callccInterpreter.Value, callccInterpreter.Value](a, b) match { case (_1: callccInterpreter.Value, _2: callccInterpreter.Value)(callccInterpreter.Value, callccInterpreter.Value)((n: Int)callccInterpreter.Num((m @ _)), (n: Int)callccInterpreter.Num((n @ _))) => this.unitM[callccInterpreter.Num](callccInterpreter.this.Num.apply(m.+(n))) case _ => callccInterpreter.this.unitM[callccInterpreter.Wrong.type](callccInterpreter.this.Wrong) } From efd31fffd1025e55072115d82d215d93a76846ec Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 May 2019 17:43:33 +0200 Subject: [PATCH 1746/2793] Update t7747-repl.check --- test/files/run/t7747-repl.check | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/files/run/t7747-repl.check b/test/files/run/t7747-repl.check index 687d432ea008..d698ea668d5a 100644 --- a/test/files/run/t7747-repl.check +++ b/test/files/run/t7747-repl.check @@ -280,7 +280,7 @@ object $read extends scala.AnyRef { }; val INSTANCE = new $read. } -res3: List[Serializable with Product] = List(BippyBups(), PuppyPups(), Bingo()) +res3: List[Product with Serializable] = List(BippyBups(), PuppyPups(), Bingo()) scala> case class Sum(exp: String, exp2: String) defined class Sum From 21b585dc237388cd52398602d661027c6781f3c4 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 28 May 2019 18:44:30 +1000 Subject: [PATCH 1747/2793] Introduce a new implementation of implicit shadowing Avoid building up a set of all in-scope implicits during each implicit search. Instead, do the filtering of shadowed implicits in a second pass. --- .../tools/nsc/typechecker/Implicits.scala | 76 +++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index e340e45516e2..0f50db503f3e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -257,6 +257,7 @@ trait Implicits { var useCountArg: Int = 0 var useCountView: Int = 0 + def useCount(isView: Boolean): Int = if (isView) useCountView else useCountArg /** Does type `tp` contain an Error type as parameter or result? */ @@ -995,8 +996,83 @@ trait Implicits { // most frequent one first matches sortBy (x => if (isView) -x.useCountView else -x.useCountArg) } + + /** Sorted list of eligible implicits. + */ + private def eligibleNew = { + final case class Candidate(info: ImplicitInfo, level: Int) + var matches: java.util.ArrayList[Candidate] = null + var matchesNames: java.util.HashSet[Name] = null + + var maxCandidateLevel = 0 + + { + var i = 0 + // Collect candidates, the level at which each was found and build a set of their names + var iss = this.iss + while (!iss.isEmpty) { + var is = iss.head + while (!is.isEmpty) { + val info = is.head + if (checkValid(info.sym) && survives(info, NoShadower)) { + if (matches == null) { + matches = new java.util.ArrayList(16) + matchesNames = new java.util.HashSet(16) + } + matches.add(Candidate(info, i)) + matchesNames.add(info.name) + maxCandidateLevel = i + } + is = is.tail + } + iss = iss.tail + i += 1 + } + } + + if (matches == null) + Nil // OPT common case: no candidates + else { + if (isLocalToCallsite) { + // A second pass to filter out results that are shadowed by implicits in inner scopes. + var i = 0 + var removed = false + var iss = this.iss + while (!iss.isEmpty && i < maxCandidateLevel) { + var is = iss.head + while (!is.isEmpty) { + val info = is.head + if (matchesNames.contains(info.name)) { + var j = 0 + val numMatches = matches.size() + while (j < numMatches) { + val matchInfo = matches.get(j) + if (matchInfo != null && matchInfo.info.name == info.name && matchInfo.level > i) { + // Shadowed. For now set to null, so as not to mess up the indexing our current loop. + matches.set(j, null) + removed = true + } + j += 1 + } + } + is = is.tail + } + iss = iss.tail + i += 1 + } + if (removed) matches.removeIf(_ == null) // remove for real now. + } + // most frequent one first. Sort in-place. + matches.sort(((x, y) => java.lang.Integer.compare(y.info.useCount(isView), x.info.useCount(isView)))) + val result = new ListBuffer[ImplicitInfo] + matches.forEach(x => result += x.info) + result.toList + } + } + if (eligible.nonEmpty) printTyping(tree, eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}") + assert(eligibleNew == eligible, (eligibleNew, eligible)) /** Faster implicit search. Overall idea: * - prune aggressively From 3d1ab81a847c5bd18615af38fff9447e57b19618 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 28 May 2019 18:58:04 +1000 Subject: [PATCH 1748/2793] Enable new implementation by default With an opt-out system property --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 0f50db503f3e..29b17d1c85a9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -181,7 +181,7 @@ trait Implicits { private val infoMapCache = new LinkedHashMap[Symbol, InfoMap] private val improvesCache = perRunCaches.newMap[(ImplicitInfo, ImplicitInfo), Boolean]() private val implicitSearchId = { var id = 1 ; () => try id finally id += 1 } - + private val shadowerUseOldImplementation = java.lang.Boolean.getBoolean("scalac.implicit.shadow.old") def resetImplicits() { implicitsCache.clear() infoMapCache.clear() @@ -986,7 +986,7 @@ trait Implicits { /** Sorted list of eligible implicits. */ - val eligible = Shadower.using(isLocalToCallsite){ shadower => + private def eligibleOld = Shadower.using(isLocalToCallsite){ shadower => val matches = iss flatMap { is => val result = is filter (info => checkValid(info.sym) && survives(info, shadower)) shadower addInfos is @@ -1070,9 +1070,10 @@ trait Implicits { } } + val eligible = if (shadowerUseOldImplementation) eligibleOld else eligibleNew + if (eligible.nonEmpty) printTyping(tree, eligible.size + s" eligible for pt=$pt at ${fullSiteString(context)}") - assert(eligibleNew == eligible, (eligibleNew, eligible)) /** Faster implicit search. Overall idea: * - prune aggressively From b14e79ae0f42cbfecbff7c7b096f8f99216a4471 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sun, 31 Mar 2019 22:15:27 +0100 Subject: [PATCH 1749/2793] RefChecks: avoid List allocations from flatMap and Map In the RefChecks module, the code of `lessAccessibleSymsInType` was using a `List.flatMap` in a recursive loop, which could create several extra allocations. We replace this with a `ListBuffer` and a recursive procedural code. Merge List.map on annots into the List.foreach succeeding it. `transformedAnnots` was the result of applying a `List.map`, and it was immediately consumed in a `List.foreach` following it. Instead, we put the map into the beginning of the foreach. --- .../tools/nsc/typechecker/RefChecks.scala | 22 +++++++++++-------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 09d1115e9dc1..7d44439817d1 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1314,16 +1314,20 @@ abstract class RefChecks extends Transform { && (otherSym isLessAccessibleThan memberSym.enclClass) ) private def lessAccessibleSymsInType(other: Type, memberSym: Symbol): List[Symbol] = { - val extras = other match { - case TypeRef(pre, _, args) => + val res: ListBuffer[Symbol] = ListBuffer.empty[Symbol] + def loop(tp: Type): Unit = { + if (lessAccessible(tp.typeSymbol, memberSym)) + res += tp.typeSymbol + tp match { // checking the prefix here gives us spurious errors on e.g. a private[process] // object which contains a type alias, which normalizes to a visible type. - args filterNot (_ eq NoPrefix) flatMap (tp => lessAccessibleSymsInType(tp, memberSym)) - case _ => - Nil + case TypeRef(pre, _, args) => + args foreach { arg => if (arg ne NoPrefix) loop(arg) } + case _ => () + } } - if (lessAccessible(other.typeSymbol, memberSym)) other.typeSymbol :: extras - else extras + loop(other) + res.toList } private def warnLessAccessible(otherSym: Symbol, memberSym: Symbol) { val comparison = accessFlagsToString(memberSym) match { @@ -1440,8 +1444,8 @@ abstract class RefChecks extends Transform { } val annotsBySymbol = new mutable.LinkedHashMap[Symbol, ListBuffer[AnnotationInfo]]() - val transformedAnnots = annots.map(_.transformArgs(transformTrees)) - for (transformedAnnot <- transformedAnnots) { + annots foreach { annot => + val transformedAnnot = annot.transformArgs(transformTrees) val buffer = annotsBySymbol.getOrElseUpdate(transformedAnnot.symbol, new ListBuffer) buffer += transformedAnnot } From e01f945b2fdda507bf19682f08cfb5340a860a76 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso-Blas" Date: Sat, 6 Apr 2019 20:35:03 +0100 Subject: [PATCH 1750/2793] TypeBounds: try to avoid creation of the type bounds. The method "bounds" from the Type class hierarchy is usually implemented in terms of the "lowerBound" and "upperBound" methods. Thus, it is better to use the upper or lower bounds directly, even if both of them are used, to avoid creating the TypeBounds object. --- .../scala/tools/nsc/typechecker/Infer.scala | 32 +++++++++++-------- .../scala/tools/nsc/typechecker/Typers.scala | 15 ++++++--- .../scala/reflect/internal/Types.scala | 6 ++-- .../scala/reflect/internal/tpe/GlbLubs.scala | 6 ++-- .../reflect/internal/transform/UnCurry.scala | 2 +- .../tools/nsc/doc/model/ModelFactory.scala | 8 ++--- .../doc/model/ModelFactoryTypeSupport.scala | 2 +- 7 files changed, 41 insertions(+), 30 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index b896b09aa1c0..3a0dd470244d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1082,21 +1082,23 @@ trait Infer extends Checkable { } } - def instBounds(tvar: TypeVar): TypeBounds = { - val tparam = tvar.origin.typeSymbol - val instType = toOrigin(tvar.constr.inst) - val TypeBounds(lo, hi) = tparam.info.bounds - val (loBounds, hiBounds) = - if (isFullyDefined(instType)) (List(instType), List(instType)) - else (tvar.constr.loBounds, tvar.constr.hiBounds) - + @inline + private[this] def instBounds(tvar: TypeVar): TypeBounds = { + val tparam = tvar.origin.typeSymbol + val instType = toOrigin(tvar.constr.inst) + val lo = tparam.info.lowerBound + val hi = tparam.info.upperBound + val ifd = isFullyDefined(instType) + val loBounds = if (ifd) List(instType) else tvar.constr.loBounds + val hiBounds = if (ifd) List(instType) else tvar.constr.hiBounds TypeBounds( lub(lo :: loBounds map toOrigin), glb(hi :: hiBounds map toOrigin) ) } - def isInstantiatable(tvars: List[TypeVar]) = { + @inline + private[this] def isInstantiatable(tvars: List[TypeVar]) = { val tvars1 = tvars map (_.cloneInternal) // Note: right now it's not clear that solving is complete, or how it can be made complete! // So we should come back to this and investigate. @@ -1106,12 +1108,14 @@ trait Infer extends Checkable { // this is quite nasty: it destructively changes the info of the syms of e.g., method type params // (see #3692, where the type param T's bounds were set to > : T <: T, so that parts looped) // the changes are rolled back by restoreTypeBounds, but might be unintentionally observed in the mean time - def instantiateTypeVar(tvar: TypeVar) { - val tparam = tvar.origin.typeSymbol - val TypeBounds(lo0, hi0) = tparam.info.bounds + private[this] def instantiateTypeVar(tvar: TypeVar): Unit = { + val tparam = tvar.origin.typeSymbol + val tpinfo = tparam.info + val lo0 = tpinfo.lowerBound + val hi0 = tpinfo.upperBound val tb @ TypeBounds(lo1, hi1) = instBounds(tvar) - val enclCase = context.enclosingCaseDef - def enclCase_s = enclCase.toString.replaceAll("\\n", " ").take(60) + val enclCase = context.enclosingCaseDef + def enclCase_s = enclCase.toString.replaceAll("\\n", " ").take(60) if (enclCase.savedTypeBounds.nonEmpty) log( sm"""|instantiateTypeVar with nonEmpty saved type bounds { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 17af06e2398d..4bdf7b2b118d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -5289,15 +5289,20 @@ trait Typers extends Adaptations with Tags with TypersTracking with PatternTyper foreach2(args, tparams) { (arg, tparam) => // note: can't use args1 in selector, because Binds got replaced val asym = arg.symbol - def abounds = asym.info.bounds - def tbounds = tparam.info.bounds def enhanceBounds(): Unit = { - val TypeBounds(lo0, hi0) = abounds - val TypeBounds(lo1, hi1) = tbounds.subst(tparams, argtypes) + val info0 = asym.info + val lo0 = info0.lowerBound + val hi0 = info0.upperBound + val tpinfo = tparam.info + val lo1 = tpinfo.lowerBound.subst(tparams, argtypes) + val hi1 = tpinfo.upperBound.subst(tparams, argtypes) val lo = lub(List(lo0, lo1)) val hi = glb(List(hi0, hi1)) if (!(lo =:= lo0 && hi =:= hi0)) - asym setInfo logResult(s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to")(TypeBounds(lo, hi)) + asym setInfo logResult({ + val abounds = TypeBounds(lo0, hi0) + s"Updating bounds of ${asym.fullLocationString} in $tree from '$abounds' to" + })(TypeBounds(lo, hi)) } if (asym != null && asym.isAbstractType) { arg match { diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index fe261147bedf..92ac84ff876d 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1322,8 +1322,8 @@ trait Types case TypeBounds(_, _) => that <:< this case _ => lo <:< that && that <:< hi } - def emptyLowerBound = typeIsNothing(lo) || lo.isWildcard - def emptyUpperBound = typeIsAny(hi) || hi.isWildcard + def emptyLowerBound = TypeBounds.isEmptyLower(lo) + def emptyUpperBound = TypeBounds.isEmptyUpper(hi) def isEmptyBounds = emptyLowerBound && emptyUpperBound override def safeToString = scalaNotation(_.toString) @@ -1355,6 +1355,8 @@ trait Types def apply(lo: Type, hi: Type): TypeBounds = { unique(new UniqueTypeBounds(lo, hi)).asInstanceOf[TypeBounds] } + def isEmptyUpper(hi: Type): Boolean = typeIsAny(hi) || hi.isWildcard + def isEmptyLower(lo: Type): Boolean = typeIsNothing(lo) || lo.isWildcard } object CompoundType { diff --git a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala index 3a4a07d0d6fe..4a0546318778 100644 --- a/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala +++ b/src/reflect/scala/reflect/internal/tpe/GlbLubs.scala @@ -356,10 +356,10 @@ private[internal] trait GlbLubs { else if (symtypes.tail forall (symtypes.head =:= _)) proto.cloneSymbol(lubRefined.typeSymbol).setInfoOwnerAdjusted(symtypes.head) else { - def lubBounds(bnds: List[TypeBounds]): TypeBounds = - TypeBounds(glb(bnds map (_.lo), depth.decr), lub(bnds map (_.hi), depth.decr)) + val lo = glb(symtypes map (_.lowerBound), depth.decr) + val hi = lub(symtypes map (_.upperBound), depth.decr) lubRefined.typeSymbol.newAbstractType(proto.name.toTypeName, proto.pos) - .setInfoOwnerAdjusted(lubBounds(symtypes map (_.bounds))) + .setInfoOwnerAdjusted(TypeBounds(lo, hi)) } } } diff --git a/src/reflect/scala/reflect/internal/transform/UnCurry.scala b/src/reflect/scala/reflect/internal/transform/UnCurry.scala index f8783e36fd6c..6bdbeccb4518 100644 --- a/src/reflect/scala/reflect/internal/transform/UnCurry.scala +++ b/src/reflect/scala/reflect/internal/transform/UnCurry.scala @@ -70,7 +70,7 @@ trait UnCurry { object DesugaredParameterType { def isUnboundedGeneric(tp: Type) = tp match { case t @ TypeRef(_, sym, _) if sym.isAbstractType => - sym.info.resultType.bounds.emptyUpperBound + TypeBounds.isEmptyUpper(sym.info.resultType.upperBound) case _ => false } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala index dee00a35cf5d..69e6db65a0ad 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactory.scala @@ -542,13 +542,13 @@ class ModelFactory(val global: Global, val settings: doc.Settings) { private trait TypeBoundsImpl { def sym: Symbol def inTpl: TemplateImpl - def lo = sym.info.bounds match { - case TypeBounds(lo, hi) if lo.typeSymbol != NothingClass => + def lo = sym.info.lowerBound match { + case lo if lo.typeSymbol != NothingClass => Some(makeTypeInTemplateContext(appliedType(lo, sym.info.typeParams map {_.tpe}), inTpl, sym)) case _ => None } - def hi = sym.info.bounds match { - case TypeBounds(lo, hi) if hi.typeSymbol != AnyClass => + def hi = sym.info.upperBound match { + case hi if hi.typeSymbol != AnyClass => Some(makeTypeInTemplateContext(appliedType(hi, sym.info.typeParams map {_.tpe}), inTpl, sym)) case _ => None } diff --git a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala index 805604bfd58f..a534a3851190 100644 --- a/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala +++ b/src/scaladoc/scala/tools/nsc/doc/model/ModelFactoryTypeSupport.scala @@ -240,7 +240,7 @@ trait ModelFactoryTypeSupport { nameBuffer append "val " nameBuffer append tpnme.dropSingletonName(sym.name) nameBuffer append ": " - appendType0(dropSingletonType(sym.info.bounds.hi)) + appendType0(dropSingletonType(sym.info.upperBound)) } else { if (sym.flagString != "") nameBuffer append (sym.flagString + " ") if (sym.keyString != "") nameBuffer append (sym.keyString + " ") From fc5132843fc8dc8cc96eef11d2de7ff1db737e58 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Tue, 4 Jun 2019 17:16:13 -0400 Subject: [PATCH 1751/2793] Also deprecate backing field symbols. Compiling @deprecated val foo: T = some.deprecated(call) yielded private[this] val `foo `: T = some.deprecated(call) @deprecated def foo: T = this.`foo ` where the `@deprecated` has been slapped on the def (where it'll incur deprecation warnings on callers) but not on the val (where it'll suppress deprecation warnings on the body. Just copy the annotation across. Fixes scala/bug#11538 in an expedient manner. --- src/library/scala/deprecated.scala | 2 +- test/files/pos/t11538.flags | 1 + test/files/pos/t11538.scala | 13 +++++++++++++ 3 files changed, 15 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t11538.flags create mode 100644 test/files/pos/t11538.scala diff --git a/src/library/scala/deprecated.scala b/src/library/scala/deprecated.scala index b35288a22915..42dccf60cb69 100644 --- a/src/library/scala/deprecated.scala +++ b/src/library/scala/deprecated.scala @@ -64,5 +64,5 @@ import scala.annotation.meta._ * @see [[scala.deprecatedOverriding]] * @see [[scala.deprecatedName]] */ -@getter @setter @beanGetter @beanSetter +@getter @setter @beanGetter @beanSetter @field class deprecated(message: String = "", since: String = "") extends scala.annotation.StaticAnnotation diff --git a/test/files/pos/t11538.flags b/test/files/pos/t11538.flags new file mode 100644 index 000000000000..7882ee62698f --- /dev/null +++ b/test/files/pos/t11538.flags @@ -0,0 +1 @@ +-Xfatal-warnings -deprecation -stop:refchecks \ No newline at end of file diff --git a/test/files/pos/t11538.scala b/test/files/pos/t11538.scala new file mode 100644 index 000000000000..77c931e2c202 --- /dev/null +++ b/test/files/pos/t11538.scala @@ -0,0 +1,13 @@ +package t11538 + +@deprecated("not for you", since = "just now") +class Abhorrent + +object Bizzle { + @deprecated("use mipple instead", since = "recently") + val wibble: Abhorrent = mipple + @deprecated("use wobble instead", since = "recently") + def mipple: Abhorrent = wobble + @deprecated("use wibble instead", since = "recently") + var wobble: Abhorrent = wibble +} \ No newline at end of file From 4b5835c839eb914836eac808c9ee020c2b33d820 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 6 Jun 2019 03:57:46 +0200 Subject: [PATCH 1752/2793] Avoid invalid paths in the pickle cache on Windows --- src/compiler/scala/tools/nsc/PipelineMain.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 2e5d6d0d0530..258ebfc6430c 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -41,7 +41,10 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe private val pickleCache: Path = configuredPickleCache.getOrElse(Files.createTempDirectory("scala.picklecache")) private def cachePath(file: Path): Path = { val newExtension = if (useJars) ".jar" else "" - changeExtension(pickleCache.resolve("./" + file).normalize(), newExtension) + val root = file.getRoot + // An empty component on Unix, just the drive letter on Windows + val validRootPathComponent = root.toString.replaceAllLiterally("/", "").replaceAllLiterally(":", "") + changeExtension(pickleCache.resolve(validRootPathComponent).resolve(root.relativize(file)).normalize(), newExtension) } private val strippedAndExportedClassPath = mutable.HashMap[Path, Path]() From 369c74329b3d5d30bd2f5d9c6d2107d3d7af125b Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 6 Jun 2019 12:22:05 -0400 Subject: [PATCH 1753/2793] Correct Java signature for value classes appearing in type arguments Value class values are always boxed when used in a generic context. Specifically, in val foo: Option[VC] = Some(vc) the runtime value of `foo` will be a `Some` wrapping a value of the (boxed) class `VC`. This is analogous to what happens with primitive value classes in this situation. Renamed `primitiveOK` to imply that it affects the signature generated for any value class, not just primitives. Fixes scala/bug#11321. --- .../scala/tools/nsc/transform/Erasure.scala | 31 ++++++++----------- test/files/jvm/t11321.check | 3 ++ test/files/jvm/t11321.scala | 26 ++++++++++++++++ test/files/jvm/t11321b.check | 2 ++ test/files/jvm/t11321b/Test.java | 9 ++++++ test/files/jvm/t11321b/XFoo.scala | 4 +++ test/files/run/t6344.check | 16 +++++----- 7 files changed, 65 insertions(+), 26 deletions(-) create mode 100644 test/files/jvm/t11321.check create mode 100644 test/files/jvm/t11321.scala create mode 100644 test/files/jvm/t11321b.check create mode 100644 test/files/jvm/t11321b/Test.java create mode 100644 test/files/jvm/t11321b/XFoo.scala diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 0501dfd91126..36ac2ab55330 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -238,7 +238,7 @@ abstract class Erasure extends InfoTransform val ps = ensureClassAsFirstParent(validParents) ps.foreach(boxedSig) } - def boxedSig(tp: Type): Unit = jsig(tp, primitiveOK = false) + def boxedSig(tp: Type): Unit = jsig(tp, unboxedVCs = false) def boundsSig(bounds: List[Type]): Unit = { val (isTrait, isClass) = bounds partition (_.typeSymbol.isTrait) isClass match { @@ -268,13 +268,13 @@ abstract class Erasure extends InfoTransform def fullNameInSig(sym: Symbol): Unit = builder.append('L').append(enteringJVM(sym.javaBinaryNameString)) @noinline - def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, primitiveOK: Boolean = true): Unit = { + def jsig(tp0: Type, existentiallyBound: List[Symbol] = Nil, toplevel: Boolean = false, unboxedVCs: Boolean = true): Unit = { val tp = tp0.dealias tp match { case st: SubType => - jsig(st.supertype, existentiallyBound, toplevel, primitiveOK) + jsig(st.supertype, existentiallyBound, toplevel, unboxedVCs) case ExistentialType(tparams, tpe) => - jsig(tpe, tparams, toplevel, primitiveOK) + jsig(tpe, tparams, toplevel, unboxedVCs) case TypeRef(pre, sym, args) => def argSig(tp: Type): Unit = if (existentiallyBound contains tp.typeSymbol) { @@ -347,25 +347,20 @@ abstract class Erasure extends InfoTransform else if (sym == NullClass) jsig(RuntimeNullClass.tpe) else if (isPrimitiveValueClass(sym)) { - if (!primitiveOK) jsig(ObjectTpe) + if (!unboxedVCs) jsig(ObjectTpe) else if (sym == UnitClass) jsig(BoxedUnitTpe) else builder.append(abbrvTag(sym)) } else if (sym.isDerivedValueClass) { - val unboxed = sym.derivedValueClassUnbox.tpe_*.finalResultType - val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType - def unboxedMsg = if (unboxed == unboxedSeen) "" else s", seen within ${sym.simpleName} as $unboxedSeen" - logResult(s"Erasure of value class $sym (underlying type $unboxed$unboxedMsg) is") { - if (isPrimitiveValueType(unboxedSeen) && !primitiveOK) - classSig - else - jsig(unboxedSeen, existentiallyBound, toplevel, primitiveOK) - } + if (unboxedVCs) { + val unboxedSeen = (tp memberType sym.derivedValueClassUnbox).finalResultType + jsig(unboxedSeen, existentiallyBound, toplevel) + } else classSig } else if (sym.isClass) classSig else - jsig(erasure(sym0)(tp), existentiallyBound, toplevel, primitiveOK) + jsig(erasure(sym0)(tp), existentiallyBound, toplevel, unboxedVCs) case PolyType(tparams, restpe) => assert(tparams.nonEmpty) if (toplevel) polyParamSig(tparams) @@ -392,14 +387,14 @@ abstract class Erasure extends InfoTransform if (restpe.typeSymbol == UnitClass || sym0.isConstructor) builder.append(VOID_TAG) else jsig(restpe) case RefinedType(parents, decls) => - jsig(intersectionDominator(parents), primitiveOK = primitiveOK) + jsig(intersectionDominator(parents), unboxedVCs = unboxedVCs) case ClassInfoType(parents, _, _) => superSig(tp.typeSymbol, parents) case AnnotatedType(_, atp) => - jsig(atp, existentiallyBound, toplevel, primitiveOK) + jsig(atp, existentiallyBound, toplevel, unboxedVCs) case BoundedWildcardType(bounds) => println("something's wrong: "+sym0+":"+sym0.tpe+" has a bounded wildcard type") - jsig(bounds.hi, existentiallyBound, toplevel, primitiveOK) + jsig(bounds.hi, existentiallyBound, toplevel, unboxedVCs) case _ => val etp = erasure(sym0)(tp) if (etp eq tp) throw new UnknownSig diff --git a/test/files/jvm/t11321.check b/test/files/jvm/t11321.check new file mode 100644 index 000000000000..9f0979ba1dab --- /dev/null +++ b/test/files/jvm/t11321.check @@ -0,0 +1,3 @@ +t11321.V: scala.Option +t11321.U: scala.Option +t11321.W: scala.Option>> diff --git a/test/files/jvm/t11321.scala b/test/files/jvm/t11321.scala new file mode 100644 index 000000000000..fe91e0f31afd --- /dev/null +++ b/test/files/jvm/t11321.scala @@ -0,0 +1,26 @@ +package t11321 { + final class V(val x: Int) extends AnyVal + object V { def get: Option[V] = null } + + final class U(val y: String) extends AnyVal + object U { def get: Option[U] = null } + + final class W[T](val z: T) extends AnyVal + object W { def get: Option[W[Int => String]] = null } +} + + +object Test extends App { + def check[T](implicit tt: reflect.ClassTag[T]): Unit = { + val companion = tt.runtimeClass.getClassLoader.loadClass(tt.runtimeClass.getName + '$') + val get = companion.getMethod("get") + assert(get.getReturnType == classOf[Option[_]]) + println(s"${tt.runtimeClass.getName}: ${get.getGenericReturnType}") + } + + import t11321._ + + check[V] + check[U] + check[W[_]] +} \ No newline at end of file diff --git a/test/files/jvm/t11321b.check b/test/files/jvm/t11321b.check new file mode 100644 index 000000000000..e008cea3575a --- /dev/null +++ b/test/files/jvm/t11321b.check @@ -0,0 +1,2 @@ +minnow +class java.lang.String diff --git a/test/files/jvm/t11321b/Test.java b/test/files/jvm/t11321b/Test.java new file mode 100644 index 000000000000..4801bb43173c --- /dev/null +++ b/test/files/jvm/t11321b/Test.java @@ -0,0 +1,9 @@ +import t11321.*; + +public class Test { + public static void main(String ...args) { + scala.Option b = new Foo().b(); + System.out.println(b.get().x()); + System.out.println(b.get().x().getClass()); + } +} \ No newline at end of file diff --git a/test/files/jvm/t11321b/XFoo.scala b/test/files/jvm/t11321b/XFoo.scala new file mode 100644 index 000000000000..846ba4a6fc84 --- /dev/null +++ b/test/files/jvm/t11321b/XFoo.scala @@ -0,0 +1,4 @@ +package t11321 + +class X(val x: String) extends AnyVal +class Foo { def b = Option(new X("minnow")); def get = b.get } \ No newline at end of file diff --git a/test/files/run/t6344.check b/test/files/run/t6344.check index 8d9adac849d4..b535f31cca05 100644 --- a/test/files/run/t6344.check +++ b/test/files/run/t6344.check @@ -14,9 +14,9 @@ public java.lang.Object C1.v1(java.lang.Object) public java.lang.Object C1.v3() public java.lang.Object C1.v3() public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List) -public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List) +public java.lang.Object C1.v4(java.lang.Object,scala.collection.immutable.List>) public scala.collection.immutable.List C1.v2() -public scala.collection.immutable.List C1.v2() +public scala.collection.immutable.List> C1.v2() C2 public java.lang.String C2.v1(java.lang.String) @@ -24,9 +24,9 @@ public java.lang.String C2.v1(java.lang.String) public java.lang.String C2.v3() public java.lang.String C2.v3() public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List) -public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List) +public java.lang.String C2.v4(java.lang.String,scala.collection.immutable.List>) public scala.collection.immutable.List C2.v2() -public scala.collection.immutable.List C2.v2() +public scala.collection.immutable.List> C2.v2() C3 public java.lang.Object C3.v1(java.lang.Object) @@ -34,11 +34,11 @@ public A C3.v1(A) public java.lang.Object C3.v3() public A C3.v3() public java.lang.Object C3.v4(java.lang.Object,scala.collection.immutable.List) -public A C3.v4(A,scala.collection.immutable.List) +public A C3.v4(A,scala.collection.immutable.List>) public java.lang.Object C3.x() public A C3.x() public scala.collection.immutable.List C3.v2() -public scala.collection.immutable.List C3.v2() +public scala.collection.immutable.List> C3.v2() C4 public java.lang.Integer C4.v1(java.lang.Integer) @@ -56,9 +56,9 @@ public java.lang.String C4B.v1(java.lang.String) public java.lang.String C4B.v3() public java.lang.String C4B.v3() public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List) -public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List) +public java.lang.String C4B.v4(java.lang.String,scala.collection.immutable.List>) public scala.collection.immutable.List C4B.v2() -public scala.collection.immutable.List C4B.v2() +public scala.collection.immutable.List> C4B.v2() C5 public double C5.f2(int,java.lang.Object,java.lang.String,double) From 4627c4e0031526bdd3a151f096c958e485217f62 Mon Sep 17 00:00:00 2001 From: "ta.tanaka" Date: Fri, 7 Jun 2019 01:02:37 +0900 Subject: [PATCH 1754/2793] Awaitable.result should have a throws annotation of TimeoutException and InterruptedException as well as Awaitable.ready. --- src/library/scala/concurrent/Awaitable.scala | 3 ++- src/library/scala/concurrent/Future.scala | 3 ++- src/library/scala/concurrent/package.scala | 3 ++- test/files/jvm/future-spec/main.scala | 2 +- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala index 4714b351944b..d201a14570f2 100644 --- a/src/library/scala/concurrent/Awaitable.scala +++ b/src/library/scala/concurrent/Awaitable.scala @@ -60,7 +60,8 @@ trait Awaitable[+T] { * @throws TimeoutException if after waiting for the specified time this `Awaitable` is still not ready * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]] */ - @throws(classOf[Exception]) + @throws(classOf[TimeoutException]) + @throws(classOf[InterruptedException]) def result(atMost: Duration)(implicit permit: CanAwait): T } diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 8f6983b27d1d..4f12a8379419 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -578,7 +578,8 @@ object Future { throw new TimeoutException(s"Future timed out after [$atMost]") } - @throws(classOf[Exception]) + @throws(classOf[TimeoutException]) + @throws(classOf[InterruptedException]) override def result(atMost: Duration)(implicit permit: CanAwait): Nothing = { ready(atMost) throw new TimeoutException(s"Future timed out after [$atMost]") diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index 042b1ab636d0..bc3853a0b98b 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -214,7 +214,8 @@ package concurrent { * @throws TimeoutException if after waiting for the specified time `awaitable` is still not ready * @throws IllegalArgumentException if `atMost` is [[scala.concurrent.duration.Duration.Undefined Duration.Undefined]] */ - @throws(classOf[Exception]) + @throws(classOf[TimeoutException]) + @throws(classOf[InterruptedException]) def result[T](awaitable: Awaitable[T], atMost: Duration): T = blocking(awaitable.result(atMost)(AwaitPermission)) } diff --git a/test/files/jvm/future-spec/main.scala b/test/files/jvm/future-spec/main.scala index 697d0fe91f3f..f5db78e30b11 100644 --- a/test/files/jvm/future-spec/main.scala +++ b/test/files/jvm/future-spec/main.scala @@ -107,7 +107,7 @@ class TestLatch(count: Int = 1) extends Awaitable[Unit] { this } - @throws(classOf[Exception]) + @throws(classOf[TimeoutException]) def result(atMost: Duration)(implicit permit: CanAwait): Unit = { ready(atMost) } From a3fdd73783b82bb94e5c01f74a6773bc7ccc4b53 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sun, 16 Jun 2019 16:29:43 +1000 Subject: [PATCH 1755/2793] Windows compat for PipelineMainTest, finally? https://stackoverflow.com/questions/39628328/trying-to-create-a-directory-immediately-after-a-successful-deleteifexists-throw --- test/junit/scala/tools/nsc/PipelineMainTest.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index 8d4218029c6d..e779cfc774e7 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -263,8 +263,8 @@ class PipelineMainTest { class CleanVisitor() extends SimpleFileVisitor[Path] { override def preVisitDirectory(dir: Path, attrs: BasicFileAttributes): FileVisitResult = { if (dir.getFileName.toString == "target") { - deleteRecursive(dir) - Files.createDirectories(dir) + for (p <- Files.list(dir).iterator.asScala) + deleteRecursive(p) FileVisitResult.SKIP_SUBTREE } else super.preVisitDirectory(dir, attrs) } From 205f1c532d0a1b54a2b1874db4c4a553284911b6 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 17 Jun 2019 07:25:06 +1000 Subject: [PATCH 1756/2793] Close .args file after reading --- src/compiler/scala/tools/nsc/CompilerCommand.scala | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 66eb574d97d8..86f9e0aa6c1e 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -12,6 +12,8 @@ package scala.tools.nsc +import java.nio.file.Files + import io.File /** A class representing command line info for scalac */ @@ -119,11 +121,12 @@ class CompilerCommand(arguments: List[String], val settings: Settings) { */ def expandArg(arg: String): List[String] = { def stripComment(s: String) = s takeWhile (_ != '#') - val file = File(arg stripPrefix "@") - if (!file.exists) - throw new java.io.FileNotFoundException("argument file %s could not be found" format file.name) - - settings splitParams (file.lines() map stripComment mkString " ") + import java.nio.file._ + import collection.JavaConverters._ + val file = Paths.get(arg stripPrefix "@") + if (!Files.exists(file)) + throw new java.io.FileNotFoundException("argument file %s could not be found" format file) + settings splitParams (Files.readAllLines(file).asScala map stripComment mkString " ") } // override this if you don't want arguments processed here From d37ff076906a7bb1e09877654bca77becf3f350e Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 17 Jun 2019 14:49:33 +0200 Subject: [PATCH 1757/2793] Update windows job to use default java (i.e., 8) --- scripts/jobs/integrate/windows | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/scripts/jobs/integrate/windows b/scripts/jobs/integrate/windows index 426958b3215b..ec4224c32990 100755 --- a/scripts/jobs/integrate/windows +++ b/scripts/jobs/integrate/windows @@ -4,15 +4,16 @@ export ANT_OPTS="-Dfile.encoding=UTF-8 -server -XX:+AggressiveOpts -XX:+UseParNewGC -Xmx2G -Xss1M -XX:MaxPermSize=512M -XX:ReservedCodeCacheSize=128M" -export JAVA_HOME="C:/java/jdk-1.6" -export PATH="$(cygpath $JAVA_HOME)/bin:$PATH" +# scala 2.11.13 and up will be built with Java 8 (until 2.11.12 we used Java 6) +#export JAVA_HOME="C:/java/jdk-1.6" +#export PATH="$(cygpath $JAVA_HOME)/bin:$PATH" java -version javac -version ant -version ant \ - -Dstarr.version=2.11.11 \ + -Dstarr.version=2.11.12 \ -Dscalac.args.optimise=-optimise \ -Dlocker.skip=1 \ test From df44d5c17da45863b35c40711c4482f5bda08b4c Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 19 Jun 2019 13:36:39 +1000 Subject: [PATCH 1758/2793] Test case for fixed Java interop bug Fixed in #7671, which was backported to 2.12.x in #7738 --- test/files/neg/t9111b.check | 6 ++++++ test/files/neg/t9111b/A.java | 12 ++++++++++++ test/files/neg/t9111b/Test.scala | 5 +++++ test/files/pos/t9111/A.java | 8 ++++++++ test/files/pos/t9111/C.scala | 4 ++++ 5 files changed, 35 insertions(+) create mode 100644 test/files/neg/t9111b.check create mode 100644 test/files/neg/t9111b/A.java create mode 100644 test/files/neg/t9111b/Test.scala create mode 100644 test/files/pos/t9111/A.java create mode 100644 test/files/pos/t9111/C.scala diff --git a/test/files/neg/t9111b.check b/test/files/neg/t9111b.check new file mode 100644 index 000000000000..668cacbcfd8c --- /dev/null +++ b/test/files/neg/t9111b.check @@ -0,0 +1,6 @@ +Test.scala:4: error: type mismatch; + found : A.T + required: A.P.T + println(j.foo(new A.T())) // compiles in mixed compilation (it should not) + ^ +one error found diff --git a/test/files/neg/t9111b/A.java b/test/files/neg/t9111b/A.java new file mode 100644 index 000000000000..78a0b2f37952 --- /dev/null +++ b/test/files/neg/t9111b/A.java @@ -0,0 +1,12 @@ +public class A { + public static class P { + public static class T { public void f() { } } + } + public static class T { public void g() { } } + public static class Inner extends P { + public class Deeper { + public void foo(T t) { t.f(); } + } + } + } + \ No newline at end of file diff --git a/test/files/neg/t9111b/Test.scala b/test/files/neg/t9111b/Test.scala new file mode 100644 index 000000000000..a6b937b08897 --- /dev/null +++ b/test/files/neg/t9111b/Test.scala @@ -0,0 +1,5 @@ +object Test extends App { + val i = new A.Inner() + val j = new i.Deeper() + println(j.foo(new A.T())) // compiles in mixed compilation (it should not) +} diff --git a/test/files/pos/t9111/A.java b/test/files/pos/t9111/A.java new file mode 100644 index 000000000000..eec221d35ebb --- /dev/null +++ b/test/files/pos/t9111/A.java @@ -0,0 +1,8 @@ +public final class A { + public static final class T { } + public static final class Inner { + public static final class T { } + public T newT() { return null; } + } + } + \ No newline at end of file diff --git a/test/files/pos/t9111/C.scala b/test/files/pos/t9111/C.scala new file mode 100644 index 000000000000..5282862b6ead --- /dev/null +++ b/test/files/pos/t9111/C.scala @@ -0,0 +1,4 @@ +class C { + val i = new A.Inner() + println(i.newT()) +} From 4d3c01bd90fe68ae735669f59841a3e46863c161 Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Wed, 19 Jun 2019 08:08:15 +0200 Subject: [PATCH 1759/2793] mark 2.12 spec as not current anymore partially addresses scala/bug#11566 --- spec/_config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spec/_config.yml b/spec/_config.yml index 22bccafc90bd..ad57339382d2 100644 --- a/spec/_config.yml +++ b/spec/_config.yml @@ -1,5 +1,5 @@ baseurl: /files/archive/spec/2.12 -latestScalaVersion: 2.12 +latestScalaVersion: 2.13 thisScalaVersion: 2.12 safe: true lsi: false From cacfe1ed175baf6677d6755b3249ffa37552c5d7 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 22 Mar 2019 04:47:48 -0400 Subject: [PATCH 1760/2793] Fix invisible dead link in Scaladoc Fixes scala/bug#11300 scaladoc was producing `` from `scala.collection` package page. scaladoc will produce `` from `scala.collection` package page. --- src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala | 9 ++++----- src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala | 2 +- .../scala/tools/nsc/scaladoc/HtmlFactoryTest.scala | 4 ++-- 3 files changed, 7 insertions(+), 8 deletions(-) diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala index ef5e0cc27b48..469541aabb50 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlPage.scala @@ -260,9 +260,9 @@ abstract class HtmlPage extends Page { thisPage => val Trait, Class, Type, Object, Package = Value } - def permalink(template: Entity, isSelf: Boolean = true): Elem = + def permalink(template: Entity): Elem = - + @@ -297,16 +297,15 @@ abstract class HtmlPage extends Page { thisPage => } } - private def memberToUrl(template: Entity, isSelf: Boolean = true): String = { + private def memberToUrl(template: Entity): String = { val (signature: Option[String], containingTemplate: TemplateEntity) = template match { - case dte: DocTemplateEntity if (!isSelf) => (Some(dte.signature), dte.inTemplate) case dte: DocTemplateEntity => (None, dte) case me: MemberEntity => (Some(me.signature), me.inTemplate) case tpl => (None, tpl) } val templatePath = templateToPath(containingTemplate) - val url = "../" * (templatePath.size - 1) + templatePath.reverse.mkString("/") + val url = "../" * (thisPage.path.size - 1) + templatePath.reverse.mkString("/") url + signature.map("#" + _).getOrElse("") } } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 42a88f537498..9c701e960508 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -987,7 +987,7 @@ trait EntityPage extends HtmlPage { mbr match { case dte: DocTemplateEntity if !isSelf => - permalink(dte, isSelf) ++ { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) } + permalink(dte) ++ { inside(hasLinks = true, nameLink = relativeLinkTo(dte)) } case _ if isSelf =>

{ inside(hasLinks = true) }

case _ => diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala index 91a38084c92a..289e04987ad7 100644 --- a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala +++ b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala @@ -749,8 +749,8 @@ object HtmlFactoryTest extends Properties("HtmlFactory") { property("scala/bug#8144: Members' permalink - inner package") = check("some/pack/index.html") { node => ("type link" |: node.assertTypeLink("../../some/pack/index.html")) && - ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../some/pack/index.html#SomeType")) && - ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../some/pack/index.html#SomeTypeextendsAnyRef")) + ("member: SomeType (object)" |: node.assertValuesLink("some.pack.SomeType", "../../some/pack/SomeType$.html")) && + ("member: SomeType (class)" |: node.assertMemberLink("types")("some.pack.SomeType", "../../some/pack/SomeType.html")) } property("scala/bug#8144: Members' permalink - companion object") = check("some/pack/SomeType$.html") { node => From b18bdddde0357158796dfbfd77581f7cb98e20e8 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 Jun 2019 07:21:41 +1000 Subject: [PATCH 1761/2793] Integrate benchmarks into the main build and compile the benchmarks in testAll (cherry picked from commit 7b85527ed578bb9a0670af2e0621cb4ec2c325b3) --- build.sbt | 23 ++++++++-- project/plugins.sbt | 2 + test/benchmarks/README.md | 34 +++++---------- test/benchmarks/build.sbt | 11 ----- test/benchmarks/project/build.properties | 1 - test/benchmarks/project/plugins.sbt | 3 -- .../nsc/backend/jvm/ProdConsBenchmark.scala | 43 ------------------- 7 files changed, 31 insertions(+), 86 deletions(-) delete mode 100644 test/benchmarks/build.sbt delete mode 100644 test/benchmarks/project/build.properties delete mode 100644 test/benchmarks/project/plugins.sbt delete mode 100644 test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala diff --git a/build.sbt b/build.sbt index 0651a09e3758..a8de5a531a17 100644 --- a/build.sbt +++ b/build.sbt @@ -116,7 +116,7 @@ mimaReferenceVersion in Global := Some("2.12.0") scalaVersion in Global := versionProps("starr.version") -lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]]( +lazy val instanceSettings = Seq[Setting[_]]( // we don't cross build Scala itself crossPaths := false, // do not add Scala library jar as a dependency automatically @@ -142,6 +142,10 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // This doesn't work in the scala/scala build because the version of scala-library and the scalaVersion of // scala-library are correct to be different. So disable overriding. ivyScala ~= (_ map (_ copy (overrideScalaVersion = false))), + Quiet.silenceScalaBinaryVersionWarning +) + +lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories ++ publishSettings ++ Seq[Setting[_]]( // we always assume that Java classes are standalone and do not have any dependency // on Scala classes compileOrder := CompileOrder.JavaThenScala, @@ -238,8 +242,7 @@ lazy val commonSettings = clearSourceAndResourceDirectories ++ publishSettings + // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it // directly to stdout - outputStrategy in run := Some(StdoutOutput), - Quiet.silenceScalaBinaryVersionWarning + outputStrategy in run := Some(StdoutOutput) ) ++ removePomDependencies /** Extra post-processing for the published POM files. These are needed to create POMs that @@ -669,6 +672,17 @@ lazy val specLib = project.in(file("test") / "instrumented") }.taskValue ) +lazy val bench = project.in(file("test") / "benchmarks") + .dependsOn(library) + .settings(instanceSettings) + .settings(disableDocs) + .settings(disablePublishing) + .enablePlugins(JmhPlugin) + .settings( + name := "test-benchmarks", + libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6", + scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:**") + ) lazy val junit = project.in(file("test") / "junit") .dependsOn(library, reflect, compiler, partestExtras, scaladoc) @@ -947,7 +961,7 @@ lazy val root: Project = (project in file(".")) (Keys.test in Test in osgiTestFelix).result, (Keys.test in Test in osgiTestEclipse).result)).value, - // all of testRun, testPosPres, testRest + // all of testRun, testPosPres, testRest and more testAll := { val results = ScriptCommands.sequence[(Result[Unit], String)](List( (Keys.test in Test in junit).result map (_ -> "junit/test"), @@ -961,6 +975,7 @@ lazy val root: Project = (project in file(".")) (Keys.test in Test in osgiTestEclipse).result map (_ -> "osgiTestEclipse/test"), (mimaReportBinaryIssues in library).result map (_ -> "library/mimaReportBinaryIssues"), (mimaReportBinaryIssues in reflect).result map (_ -> "reflect/mimaReportBinaryIssues"), + (compile in Compile in bench).map(_ => ()).result map (_ -> "bench/compile"), Def.task(()).dependsOn( // Run these in parallel: doc in Compile in library, doc in Compile in reflect, diff --git a/project/plugins.sbt b/project/plugins.sbt index 96f27899ff8f..73ea2e392f49 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -32,3 +32,5 @@ concurrentRestrictions in Global := Seq( ) addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") + +addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") diff --git a/test/benchmarks/README.md b/test/benchmarks/README.md index a5f1e0f6bee6..994297110f20 100644 --- a/test/benchmarks/README.md +++ b/test/benchmarks/README.md @@ -1,11 +1,12 @@ # Scala library benchmarks -This directory is a standalone sbt project, within the Scala project, -that makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/). +This directory is used by the `bench` subproject of the Scala sbt build. +It makes use of the [sbt plugin](https://github.com/ktoso/sbt-jmh) for [JMH](http://openjdk.java.net/projects/code-tools/jmh/). ## Running a benchmark -The benchmarks require first building Scala into `../../build/pack`. +Benchmarks are built with the bootstrap compiler ("starr") using the library built from the `library` project ("quick"). +If you want to test compiler changes you need to bootstrap with the new compiler. You'll then need to know the fully-qualified name of the benchmark runner class. The benchmarking classes are organized under `src/main/scala`, @@ -14,12 +15,12 @@ Assuming that we're benchmarking `scala.collection.mutable.OpenHashMap`, the benchmark runner would likely be named `scala.collection.mutable.OpenHashMapRunner`. Using this example, one would simply run - jmh:runMain scala.collection.mutable.OpenHashMapRunner + bench/jmh:runMain scala.collection.mutable.OpenHashMapRunner -in sbt, run _from this directory_ (`test/benchmarks`). +in the Scala sbt build. -The JMH results can be found under `target/jmh-results/`. -`target` gets deleted on an sbt `clean`, +The JMH results can be found under `../../target/jmh-results/` (i.e. the main Scala build's `target`, +not the one that contains the benchmark class files). `jmh-results` gets deleted on an sbt `bench/clean`, so you should copy these files out of `target` if you wish to preserve them. ## Creating a benchmark and runner @@ -30,9 +31,9 @@ should that be necessary for benchmarking. There are two types of classes in the source directory: those suffixed `Benchmark` and those suffixed `Runner`. -The former are benchmarks that can be run directly using `jmh:run`; +The former are benchmarks that can be run directly using `bench/jmh:run`; however, they are normally run from a corresponding class of the latter type, -which is run using `jmh:runMain` (as described above). +which is run using `bench/jmh:runMain` (as described above). This …`Runner` class is useful for setting appropriate JMH command options, and for processing the JMH results into files that can be read by other tools, such as Gnuplot. @@ -85,18 +86,3 @@ To show it for _all_ methods, add `-XX:+PrintAssembly`. * "[Anatomy of a flawed benchmark](http://www.ibm.com/developerworks/java/library/j-jtp02225/)" * [Doug Lea's JSR 166 benchmarks](http://gee.cs.oswego.edu/cgi-bin/viewcvs.cgi/jsr166/src/test/loops/) * "[Measuring performance](http://docs.scala-lang.org/overviews/parallel-collections/performance.html)" of Scala parallel collections - -## Legacy frameworks - -An older version of the benchmarking framework is still present in this directory, in the following locations: - -
-
bench
-
A script to run the old benchmarks.
-
source.list
-
A temporary file used by bench.
-
src/scala/
-
The older benchmarks, including the previous framework.
-
- -Another, older set of benchmarks is present in `../benchmarking/`. diff --git a/test/benchmarks/build.sbt b/test/benchmarks/build.sbt deleted file mode 100644 index 09d1de73bb47..000000000000 --- a/test/benchmarks/build.sbt +++ /dev/null @@ -1,11 +0,0 @@ -scalaHome := Some(file("../../build/pack")) -scalaVersion := "2.12.1-dev" -scalacOptions ++= Seq("-feature", "-opt:l:inline", "-opt-inline-from:**") - -lazy val root = (project in file(".")). - enablePlugins(JmhPlugin). - settings( - name := "test-benchmarks", - version := "0.0.1", - libraryDependencies += "org.openjdk.jol" % "jol-core" % "0.6" - ) diff --git a/test/benchmarks/project/build.properties b/test/benchmarks/project/build.properties deleted file mode 100644 index 8e682c526d5d..000000000000 --- a/test/benchmarks/project/build.properties +++ /dev/null @@ -1 +0,0 @@ -sbt.version=0.13.18 diff --git a/test/benchmarks/project/plugins.sbt b/test/benchmarks/project/plugins.sbt deleted file mode 100644 index bbb093a14b0d..000000000000 --- a/test/benchmarks/project/plugins.sbt +++ /dev/null @@ -1,3 +0,0 @@ -addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "4.0.0") - -addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") \ No newline at end of file diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala deleted file mode 100644 index 761b1168576e..000000000000 --- a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala +++ /dev/null @@ -1,43 +0,0 @@ -package scala.tools.nsc -package backend.jvm - -import java.util.concurrent.TimeUnit - -import scala.tools.asm.tree.ClassNode -import org.openjdk.jmh.annotations._ -import org.openjdk.jmh.infra.Blackhole - -import scala.collection.JavaConverters.asScalaIteratorConverter -import scala.tools.asm.tree.ClassNode - -@BenchmarkMode(Array(Mode.AverageTime)) -@Fork(2) -@Threads(1) -@Warmup(iterations = 10) -@Measurement(iterations = 10) -@OutputTimeUnit(TimeUnit.NANOSECONDS) -@State(Scope.Benchmark) -class ProdConsBenchmark { - type G <: Global - var global: G = _ - private var classNode: ClassNode = _ - - @Setup(Level.Trial) def setup(): Unit = { - val settings = new Settings() - settings.usejavacp.value = true - val global = new Global(settings) - import global._ - this.global = global.asInstanceOf[G] - classNode = AsmUtils.readClass(global.classPath.findClassFile("scala.tools.nsc.typechecker.Implicits$ImplicitSearch").get.toByteArray) - } - - @Benchmark - def prodCons(bh: Blackhole): Unit = { - val global: G = this.global - import global.genBCode.postProcessor.backendUtils._ - for (m <- classNode.methods.iterator().asScala) { - bh.consume(new ProdConsAnalyzer(m, classNode.name)) - } - } -} - From 9fee5748b6128c893fac66679e91b4a629256b0a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 19 Jun 2018 15:44:20 +1000 Subject: [PATCH 1762/2793] Improvements to the IntellIJ config. - Align the junit/test output path with SBT's. This allows you to compile in SBT and trigger a test execution (including debugging) in IntelliJ with a run configuration that has "build" removed as a pre-run ste. - Add the benchmarks subproject. This gives autocomplete and build within IntelliJ. Benchmarks still need to be run under SBT, however, to let sbt-jmh do the code generation. - Add some more auto-imports to our hand-rolled definitions of the scala-build project, to eliminate some highlighting errors in build.sbt (cherry picked from commit cac5a86bae0e05b7e080aa6a78f97ba5351096ff) --- build.sbt | 1 + src/intellij/benchmarks.iml.SAMPLE | 20 ++++++++++++++++++++ src/intellij/junit.iml.SAMPLE | 2 +- src/intellij/scala-build.iml.SAMPLE | 2 +- src/intellij/scala.ipr.SAMPLE | 11 +++++++++++ 5 files changed, 34 insertions(+), 2 deletions(-) create mode 100644 src/intellij/benchmarks.iml.SAMPLE diff --git a/build.sbt b/build.sbt index a8de5a531a17..91484cbc1e2f 100644 --- a/build.sbt +++ b/build.sbt @@ -1208,6 +1208,7 @@ intellij := { val buildModule = ("scala-build", scalabuild.BuildInfo.buildClasspath.split(java.io.File.pathSeparator).toSeq.map(new File(_))) // `sbt projects` lists all modules in the build buildModule :: List( + moduleDeps(bench).value, moduleDeps(compilerP).value, // moduleDeps(dist).value, // No sources, therefore no module in IntelliJ moduleDeps(interactive).value, diff --git a/src/intellij/benchmarks.iml.SAMPLE b/src/intellij/benchmarks.iml.SAMPLE new file mode 100644 index 000000000000..60beb65ec0f3 --- /dev/null +++ b/src/intellij/benchmarks.iml.SAMPLE @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/intellij/junit.iml.SAMPLE b/src/intellij/junit.iml.SAMPLE index 87ca58676109..dc0dd9c1199d 100644 --- a/src/intellij/junit.iml.SAMPLE +++ b/src/intellij/junit.iml.SAMPLE @@ -2,7 +2,7 @@ - + diff --git a/src/intellij/scala-build.iml.SAMPLE b/src/intellij/scala-build.iml.SAMPLE index b8f066a2ef98..9bd319bacd53 100644 --- a/src/intellij/scala-build.iml.SAMPLE +++ b/src/intellij/scala-build.iml.SAMPLE @@ -1,5 +1,5 @@ - + diff --git a/src/intellij/scala.ipr.SAMPLE b/src/intellij/scala.ipr.SAMPLE index ed483d019c86..fdad3dbe6895 100644 --- a/src/intellij/scala.ipr.SAMPLE +++ b/src/intellij/scala.ipr.SAMPLE @@ -166,6 +166,7 @@ + @@ -198,6 +199,16 @@ + + + + + + + + + + From 26a27f27f09f5c1a5f973e0161a1ab535c944cfa Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 Jun 2019 16:34:42 +1000 Subject: [PATCH 1763/2793] Improve assertion failure in bytecode diffing tests List "added", "deleted" files, and show ASM bytecode diff for the first changed file. Sample output: ``` java.lang.AssertionError: assertion failed: Difference detected between recompiling OutlineTypePipeline Run: jardiff -r /var/folders/b7/xcc2k0ln6ldcv247ffpy2d1w0000gp/T/pipelineBase7354584447902237582/Traditional/classes /var/folders/b7/xcc2k0ln6ldcv247ffpy2d1w0000gp/T/pipelineBase7354584447902237582/OutlineTypePipeline/classes ContentsDiffer(b5/p2/target/b5/p2/ScalaSub.class)--- /var/folders/b7/xcc2k0ln6ldcv247ffpy2d1w0000gp/T/pipelineBase7354584447902237582/Traditional/classes/b5/p2/target/b5/p2/ScalaSub.class +++ /var/folders/b7/xcc2k0ln6ldcv247ffpy2d1w0000gp/T/pipelineBase7354584447902237582/OutlineTypePipeline/classes/b5/p2/target/b5/p2/ScalaSub.class @@ -3,32 +3,20 @@ public class b5/p2/ScalaSub extends b5/p1/JavaProtectedMethod implements b5/p1/NeedSuperAccessor { // compiled from: ScalaSub.scala @Lscala/reflect/ScalaSignature;(bytes="\u0006\u0001Y1AAA\u0002\u0001\u0011!)!\u0003\u0001C\u0001'\u0009A1kY1mCN+(M\u0003\u0002\u0005\u000b\u0005\u0011\u0001O\r\u0006\u0002\r\u0005\u0011!-N\u0002\u0001'\r\u0001\u0011b\u0004\u0009\u0003\u00155i\u0011a\u0003\u0006\u0003\u0019\u0015\u0009!\u0001]\u0019\n\u00059Y!a\u0005&bm\u0006\u0004&o\u001c;fGR,G-T3uQ>$\u0007C\u0001\u0006\u0011\u0013\u0009\u00092BA\u0009OK\u0016$7+\u001e9fe\u0006\u001b7-Z:t_J\u000ca\u0001P5oSRtD#\u0001\u000b\u0011\u0005U\u0001Q\"A\u0002") ATTRIBUTE ScalaSig : unknown ATTRIBUTE ScalaInlineInfo : unknown - // access flags 0x1001 - public synthetic b5$p1$NeedSuperAccessor$$super$foo$JavaProtectedMethod()Ljava/lang/String; - L0 - LINENUMBER 3 L0 - ALOAD 0 - INVOKESPECIAL b5/p1/JavaProtectedMethod.foo ()Ljava/lang/String; - ARETURN - L1 - LOCALVARIABLE this Lb5/p2/ScalaSub; L0 L1 0 - MAXSTACK = 1 - MAXLOCALS = 1 - // access flags 0x1 public foo()Ljava/lang/String; L0 LINENUMBER 3 L0 ALOAD 0 INVOKESTATIC b5/p1/NeedSuperAccessor.foo$ (Lb5/p1/NeedSuperAccessor;)Ljava/lang/String; (itf) ARETURN L1 LOCALVARIABLE this Lb5/p2/ScalaSub; L0 L1 0 MAXSTACK = 1 at scala.Predef$.assert(Predef.scala:223) at scala.tools.nsc.FileUtils$.assertDirectorySame(FileUtils.scala:27) at scala.tools.nsc.PipelineMainTest.$anonfun$check$3(PipelineMainTest.scala:76) at scala.tools.nsc.PipelineMainTest.$anonfun$check$3$adapted(PipelineMainTest.scala:71) at scala.collection.immutable.List.foreach(List.scala:392) at scala.tools.nsc.PipelineMainTest.check(PipelineMainTest.scala:71) at scala.tools.nsc.PipelineMainTest.pipelineMainBuildsSeparate(PipelineMainTest.scala:36) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.junit.runner.JUnitCore.run(JUnitCore.java:160) at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68) at com.intellij.rt.execution.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:47) at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:242) at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:70) ``` --- test/junit/scala/tools/nsc/FileUtils.scala | 95 ++++++++++++++++++++-- 1 file changed, 87 insertions(+), 8 deletions(-) diff --git a/test/junit/scala/tools/nsc/FileUtils.scala b/test/junit/scala/tools/nsc/FileUtils.scala index 03befd661cab..a3443febc036 100644 --- a/test/junit/scala/tools/nsc/FileUtils.scala +++ b/test/junit/scala/tools/nsc/FileUtils.scala @@ -3,23 +3,87 @@ package scala.tools.nsc import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} -import scala.collection.JavaConverters.asScalaIteratorConverter +import difflib.DiffUtils + +import scala.collection.JavaConverters.{asJavaIteratorConverter, asScalaBufferConverter, asScalaIteratorConverter} import scala.reflect.io.PlainNioFile +import scala.tools.nsc.backend.jvm.AsmUtils object FileUtils { def assertDirectorySame(dir1: Path, dir2: Path, dir2Label: String): Unit = { - assert(FileUtils.diff(dir1, dir2), s"Difference detected between recompiling $dir2Label Run:\njardiff -r $dir1 $dir2\n") + val diffs = FileUtils.diff(dir1, dir2) + def diffText = { + val builder = new java.lang.StringBuilder + var showDetail = 1 // limit printing of diff to first class + diffs.foreach { diff => + val showDiff = { + try showDetail > 0 + finally showDetail -= 1 + } + diff.diffString(builder, showDiff) + } + builder.toString + } + assert(diffs.isEmpty, s"Difference detected between recompiling $dir2Label Run:\njardiff -r $dir1 $dir2\n$diffText") + } + sealed abstract class Diff(path: Path) { + def diffString(builder: java.lang.StringBuilder, showDiff: Boolean): Unit = builder.append(toString) + } + final case class ContentsDiffer(relativePath: Path, path1: Path, path2: Path, left: Array[Byte], right: Array[Byte]) extends Diff(relativePath) { + override def toString: String = { + s"ContentsDiffer($relativePath)" + } + override def diffString(builder: java.lang.StringBuilder, showDiff: Boolean): Unit = { + builder.append(productPrefix).append("(").append(relativePath).append(")") + if (relativePath.getFileName.toString.endsWith(".class")) { + if (showDiff) { + val class1 = AsmUtils.readClass(path1.toFile.getAbsolutePath) + val class2 = AsmUtils.readClass(path2.toFile.getAbsolutePath) + val text1 = AsmUtils.textify(class1) + val text2 = AsmUtils.textify(class2) + builder.append(unifiedDiff(path1, path2, text1, text2)) + } else { + builder.append("[diff suppressed for brevity]") + } + } + } } - def diff(dir1: Path, dir2: Path): Boolean = { - def allFiles(dir: Path) = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).sortBy(_._1.toString) + final case class Missing(relativePath: Path, foundPath: Path) extends Diff(relativePath) + + def diff(dir1: Path, dir2: Path): List[Diff] = { + val diffs = collection.mutable.ListBuffer[Diff]() + def allFiles(dir: Path): Map[Path, Map[String, Path]] = { + val classFiles: List[(Path, Path)] = Files.walk(dir).iterator().asScala.map(x => (dir.relativize(x), x)).toList.filter(_._2.getFileName.toString.endsWith(".class")).toList + classFiles.groupBy(_._1).mapValues(ps => ps.map { case (_, p) => (p.getFileName.toString, p)}.toMap).toMap + } val dir1Files = allFiles(dir1) val dir2Files = allFiles(dir2) - val identical = dir1Files.corresponds(dir2Files) { - case ((rel1, file1), (rel2, file2)) => - rel1 == rel2 && java.util.Arrays.equals(Files.readAllBytes(file1), Files.readAllBytes(file2)) + val allSubDirs = dir1Files.keySet ++ dir2Files.keySet + for (subDir <- allSubDirs.toList.sortBy(_.iterator().asScala.map(_.toString).toIterable)) { + val files1 = dir1Files.getOrElse(subDir, Map.empty) + val files2 = dir2Files.getOrElse(subDir, Map.empty) + val allFileNames = files1.keySet ++ files2.keySet + for (name <- allFileNames.toList.sorted) { + (files1.get(name), files2.get(name)) match { + case (Some(file1), Some(file2)) => + val bytes1 = Files.readAllBytes(file1) + val bytes2 = Files.readAllBytes(file2) + if (!java.util.Arrays.equals(bytes1, bytes2)) { + diffs += ContentsDiffer(dir1.relativize(file1), file1, file2, bytes1, bytes2) + } + case (Some(file1), None) => + val relativePath = file1.relativize(dir1) + diffs += Missing(relativePath, file1) + case (None, Some(file2)) => + val relativePath = file2.relativize(dir2) + diffs += Missing(relativePath, file2) + case (None, None) => + throw new IllegalStateException() + } + } } - identical + diffs.toList } def deleteRecursive(f: Path) = new PlainNioFile(f).delete() @@ -36,4 +100,19 @@ object FileUtils { } Files.walkFileTree(src, new CopyVisitor(src, dest)) } + + private def unifiedDiff(path1: Path, path2: Path, text1: String, text2: String) = { + def lines(s: String) = { + val result = new java.util.ArrayList[String]() + s.linesIterator.foreach(result.add) + result + } + + val lines1 = lines(text1) + val lines2 = lines(text2) + val patch = DiffUtils.diff(lines1, lines2) + val value = DiffUtils.generateUnifiedDiff(path1.toString, path2.toString, lines1, patch, 10) + val diffToString = value.asScala.mkString("\n") + diffToString + } } From 0b4b1c0caf099bbe4cf3db653d7fc7baf0a688ec Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 Jun 2019 17:46:37 +1000 Subject: [PATCH 1764/2793] Disable flaky tests for now I'm able to reproduce the failure on a branch and am working on a fix, but I'm not sure how long it will take. Let's turn the tests off until that lands. --- test/junit/scala/tools/nsc/PipelineMainTest.scala | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index e779cfc774e7..5614c2fd007f 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -5,7 +5,7 @@ import java.nio.charset.Charset import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} -import org.junit.{After, Before, Test} +import org.junit.{After, Before, Ignore, Test} import scala.collection.JavaConverters._ import scala.collection.mutable @@ -32,14 +32,17 @@ class PipelineMainTest { private def projectsBase = createDir(base, "projects") + @Ignore("scala/scala-dev#637") @Test def pipelineMainBuildsSeparate(): Unit = { check(allBuilds.map(_.projects)) } + @Ignore("scala/scala-dev#637") @Test def pipelineMainBuildsCombined(): Unit = { check(List(allBuilds.flatMap(_.projects))) } + @Ignore("scala/scala-dev#637") @Test def pipelineMainBuildsJavaAccessor(): Unit = { // Tests the special case in Typer:::canSkipRhs to make outline typing descend into method bodies might // give rise to super accssors From 25cd14e53e92b2bcf139fe5e891af5c1299bb993 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 Jun 2019 18:23:30 +1000 Subject: [PATCH 1765/2793] [backport] +compiler/reflect the the bench classpath Partial backport of #6622 Restores a ProdConsBenchmark now that it will compile. --- build.sbt | 2 +- .../nsc/backend/jvm/ProdConsBenchmark.scala | 43 +++++++++++++++++++ 2 files changed, 44 insertions(+), 1 deletion(-) create mode 100644 test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala diff --git a/build.sbt b/build.sbt index 91484cbc1e2f..9ee4c76a3fb0 100644 --- a/build.sbt +++ b/build.sbt @@ -673,7 +673,7 @@ lazy val specLib = project.in(file("test") / "instrumented") ) lazy val bench = project.in(file("test") / "benchmarks") - .dependsOn(library) + .dependsOn(library, compiler) .settings(instanceSettings) .settings(disableDocs) .settings(disablePublishing) diff --git a/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala new file mode 100644 index 000000000000..761b1168576e --- /dev/null +++ b/test/benchmarks/src/main/scala/scala/tools/nsc/backend/jvm/ProdConsBenchmark.scala @@ -0,0 +1,43 @@ +package scala.tools.nsc +package backend.jvm + +import java.util.concurrent.TimeUnit + +import scala.tools.asm.tree.ClassNode +import org.openjdk.jmh.annotations._ +import org.openjdk.jmh.infra.Blackhole + +import scala.collection.JavaConverters.asScalaIteratorConverter +import scala.tools.asm.tree.ClassNode + +@BenchmarkMode(Array(Mode.AverageTime)) +@Fork(2) +@Threads(1) +@Warmup(iterations = 10) +@Measurement(iterations = 10) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Benchmark) +class ProdConsBenchmark { + type G <: Global + var global: G = _ + private var classNode: ClassNode = _ + + @Setup(Level.Trial) def setup(): Unit = { + val settings = new Settings() + settings.usejavacp.value = true + val global = new Global(settings) + import global._ + this.global = global.asInstanceOf[G] + classNode = AsmUtils.readClass(global.classPath.findClassFile("scala.tools.nsc.typechecker.Implicits$ImplicitSearch").get.toByteArray) + } + + @Benchmark + def prodCons(bh: Blackhole): Unit = { + val global: G = this.global + import global.genBCode.postProcessor.backendUtils._ + for (m <- classNode.methods.iterator().asScala) { + bh.consume(new ProdConsAnalyzer(m, classNode.name)) + } + } +} + From 35501d9b3119073db138c3e8c7b0248629a44ae3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 31 May 2019 10:51:11 +1000 Subject: [PATCH 1766/2793] Cache materialized TypeTags Type tags summoned with `universe.typeTag` or an implicit search are expanded thusly: ``` object Test { def materializeTag = reflect.runtime.universe.typeTag[Option[String]] def main(args: Array[String]): Unit = { val tag1 = materializeTag val tag2 = materializeTag println(tag1 eq tag2) } } ``` ``` def materializeTag: reflect.runtime.universe.TypeTag[Option[String]] = scala.reflect.runtime.`package`.universe.typeTag[Option[String]](({ val $u: reflect.runtime.universe.type = scala.this.reflect.runtime.`package`.universe; val $m: $u.Mirror = scala.this.reflect.runtime.`package`.universe.runtimeMirror(this.getClass().getClassLoader()); $u.TypeTag.apply[Option[String]]($m, { final class $typecreator1 extends TypeCreator { def (): $typecreator1 = { $typecreator1.super.(); () }; def apply[U <: scala.reflect.api.Universe with Singleton]($m$untyped: scala.reflect.api.Mirror[U]): U#Type = { val $u: U = $m$untyped.universe; val $m: $u.Mirror = $m$untyped.asInstanceOf[$u.Mirror]; $u.internal.reificationSupport.TypeRef($u.internal.reificationSupport.ThisType($m.staticPackage("scala").asModule.moduleClass), $m.staticClass("scala.Option"), scala.collection.immutable.List.apply[$u.Type]($u.internal.reificationSupport.TypeRef($u.internal.reificationSupport.SingleType($m.staticPackage("scala").asModule.moduleClass.asType.toTypeConstructor, $m.staticModule("scala.Predef")), $u.internal.reificationSupport.selectType($m.staticModule("scala.Predef").asModule.moduleClass, "String"), scala.collection.immutable.Nil))) } }; new $typecreator1() }) }: reflect.runtime.universe.TypeTag[Option[String]])); ``` A new TypeTag is created time `def materializeTag` is called above; the program prints `false`. This commit introduces a cache, keyed by the synthetic `$typecreator1`, and hosted in the `JavaMirror`. We know that the `apply` method is a pure, so the caching is sound. Using `ClassValue` means that we're not introducing a classloader leak. We are extending the lifetime of the `TypeTag` and contained type itself, which represents a small risk to existing applications, so I've included an opt-out System property. --- .../mima-filters/2.12.0.forwards.excludes | 7 ++++++- src/reflect/scala/reflect/api/TypeTags.scala | 18 +++++++++++++---- .../scala/reflect/runtime/JavaMirrors.scala | 20 +++++++++++++++++++ test/files/run/typetags_caching.scala | 15 ++++++++++++++ 4 files changed, 55 insertions(+), 5 deletions(-) create mode 100644 test/files/run/typetags_caching.scala diff --git a/src/reflect/mima-filters/2.12.0.forwards.excludes b/src/reflect/mima-filters/2.12.0.forwards.excludes index 0f3b81cd3cc7..ee9004e6f2ec 100644 --- a/src/reflect/mima-filters/2.12.0.forwards.excludes +++ b/src/reflect/mima-filters/2.12.0.forwards.excludes @@ -31,4 +31,9 @@ ProblemFilters.exclude[MissingClassProblem]("scala.reflect.io.RootPath$") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.URLZipArchive.close") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.FileZipArchive.close") ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ManifestResources.close") -ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.close") \ No newline at end of file +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.io.ZipArchive.close") + +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.runtime.JavaMirrors#JavaMirror.typeTag") +ProblemFilters.exclude[MissingClassProblem]("scala.reflect.runtime.JavaMirrors$JavaMirror$typeTagCache$") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.api.TypeTags.TypeTagImpl") +ProblemFilters.exclude[DirectMissingMethodProblem]("scala.reflect.api.Universe.TypeTagImpl") \ No newline at end of file diff --git a/src/reflect/scala/reflect/api/TypeTags.scala b/src/reflect/scala/reflect/api/TypeTags.scala index cdcd8b6926eb..f61ca3862760 100644 --- a/src/reflect/scala/reflect/api/TypeTags.scala +++ b/src/reflect/scala/reflect/api/TypeTags.scala @@ -288,12 +288,22 @@ trait TypeTags { self: Universe => val Nothing: TypeTag[scala.Nothing] = new PredefTypeTag[scala.Nothing] (NothingTpe, _.TypeTag.Nothing) val Null: TypeTag[scala.Null] = new PredefTypeTag[scala.Null] (NullTpe, _.TypeTag.Null) - def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] = - new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1) - + def apply[T](mirror1: scala.reflect.api.Mirror[self.type], tpec1: TypeCreator): TypeTag[T] = { + (mirror1: AnyRef) match { + case m: scala.reflect.runtime.JavaMirrors#JavaMirror + if cacheMaterializedTypeTags && tpec1.getClass.getName.contains("$typecreator") + && tpec1.getClass.getDeclaredFields.length == 0 => // excludes type creators that splice in bound types. + + m.typeTag(tpec1).asInstanceOf[TypeTag[T]] + case _ => + new TypeTagImpl[T](mirror1.asInstanceOf[Mirror], tpec1) + } + } def unapply[T](ttag: TypeTag[T]): Option[Type] = Some(ttag.tpe) - } + private val cacheMaterializedTypeTags = !java.lang.Boolean.getBoolean("scala.reflect.runtime.disable.typetag.cache") + } + private[reflect] def TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator): TypeTag[T] = new TypeTagImpl[T](mirror, tpec) /* @group TypeTags */ private class TypeTagImpl[T](mirror: Mirror, tpec: TypeCreator) extends WeakTypeTagImpl[T](mirror, tpec) with TypeTag[T] { override def in[U <: Universe with Singleton](otherMirror: scala.reflect.api.Mirror[U]): U # TypeTag[T] = { diff --git a/src/reflect/scala/reflect/runtime/JavaMirrors.scala b/src/reflect/scala/reflect/runtime/JavaMirrors.scala index 59f6005261e7..fc15d8ddbe7f 100644 --- a/src/reflect/scala/reflect/runtime/JavaMirrors.scala +++ b/src/reflect/scala/reflect/runtime/JavaMirrors.scala @@ -33,6 +33,7 @@ import internal.pickling.UnPickler import scala.collection.mutable.ListBuffer import internal.Flags._ import ReflectionUtils._ +import scala.reflect.api.TypeCreator import scala.runtime.{ScalaRunTime, BoxesRunTime} private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUniverse with TwoWayCaches { thisUniverse: SymbolTable => @@ -104,6 +105,25 @@ private[scala] trait JavaMirrors extends internal.SymbolTable with api.JavaUnive private val fieldCache = new TwoWayCache[jField, TermSymbol] private val tparamCache = new TwoWayCache[jTypeVariable[_ <: GenericDeclaration], TypeSymbol] + private[this] object typeTagCache extends ClassValue[TypeTag[_]]() { + val typeCreator = new ThreadLocal[TypeCreator]() + + override protected def computeValue(cls: jClass[_]): TypeTag[_] = { + val creator = typeCreator.get() + assert(creator.getClass == cls, (creator, cls)) + TypeTagImpl[AnyRef](thisMirror.asInstanceOf[Mirror], creator) + } + } + + final def typeTag(typeCreator: TypeCreator): TypeTag[_] = { + typeTagCache.typeCreator.set(typeCreator) + try { + typeTagCache.get(typeCreator.getClass) + } finally { + typeTagCache.typeCreator.remove() + } + } + private[runtime] def toScala[J: HasJavaClass, S](cache: TwoWayCache[J, S], key: J)(body: (JavaMirror, J) => S): S = cache.toScala(key){ val jclazz = implicitly[HasJavaClass[J]] getClazz key diff --git a/test/files/run/typetags_caching.scala b/test/files/run/typetags_caching.scala new file mode 100644 index 000000000000..3d47518896c6 --- /dev/null +++ b/test/files/run/typetags_caching.scala @@ -0,0 +1,15 @@ +object Test { + + def materializeTag = reflect.runtime.universe.typeTag[Option[String]] + + def materializeTagBinder[T: reflect.runtime.universe.TypeTag] = reflect.runtime.universe.typeTag[Option[T]] + + def main(args: Array[String]): Unit = { + val tag1 = materializeTag + val tag2 = materializeTag + assert(tag1 eq tag2) // materialized TypeTags are now cached + assert(tag1.tpe eq tag2.tpe) // TypeTags themselves have always cached the created Type in a lazy val. + + assert(materializeTagBinder[String] ne materializeTagBinder[Object]) // type creators that splice bound types aren't cacheable. + } +} From b894a1a426602f037ecc085d0c8b932d0037c6e9 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 21 Jun 2019 09:51:19 +1000 Subject: [PATCH 1767/2793] Fix race condition in pipeline builds Javac must await completion of javac for internal projects on its classpath. I hadn't noticed this problem before because javac is so fast! This commit also fixes the return status of `PipelineMain.process` based on whether the reporter has errors or not. I also close Javac's filemanager explicitly, which is good practice but not actually related to this bug. --- src/compiler/scala/tools/nsc/PipelineMain.scala | 5 +++-- test/junit/scala/tools/nsc/PipelineMainTest.scala | 5 +---- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 258ebfc6430c..29b9c560bcec 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -64,7 +64,6 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe } implicit val executor = ExecutionContext.fromExecutor(new java.util.concurrent.ForkJoinPool(parallelism), t => handler.uncaughtException(Thread.currentThread(), t)) - val fileManager = ToolProvider.getSystemJavaCompiler.getStandardFileManager(null, null, null) def changeExtension(p: Path, newExtension: String): Path = { val fileName = p.getFileName.toString val changedFileName = fileName.lastIndexOf('.') match { @@ -255,6 +254,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe p.fullCompile() Future.traverse(p.groups)(_.done.future) } + _ <- Future.traverse(dependsOn.getOrElse(p, Nil))(task => task.t.javaDone.future) } yield { p.javaCompile() } @@ -294,6 +294,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe // Start javac after scalac has completely finished Future.traverse(p.groups)(_.done.future) } + _ <- Future.traverse(dependsOn.getOrElse(p, Nil))(task => task.t.javaDone.future) } yield { p.javaCompile() } @@ -351,7 +352,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe writeChromeTrace(dir, projects) } deleteTempPickleCache() - true + !reporter.hasErrors } private def deleteTempPickleCache(): Unit = { diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index 5614c2fd007f..e779cfc774e7 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -5,7 +5,7 @@ import java.nio.charset.Charset import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} -import org.junit.{After, Before, Ignore, Test} +import org.junit.{After, Before, Test} import scala.collection.JavaConverters._ import scala.collection.mutable @@ -32,17 +32,14 @@ class PipelineMainTest { private def projectsBase = createDir(base, "projects") - @Ignore("scala/scala-dev#637") @Test def pipelineMainBuildsSeparate(): Unit = { check(allBuilds.map(_.projects)) } - @Ignore("scala/scala-dev#637") @Test def pipelineMainBuildsCombined(): Unit = { check(List(allBuilds.flatMap(_.projects))) } - @Ignore("scala/scala-dev#637") @Test def pipelineMainBuildsJavaAccessor(): Unit = { // Tests the special case in Typer:::canSkipRhs to make outline typing descend into method bodies might // give rise to super accssors From e48cfd26f66c8f128f966ecd100d23171d9429cf Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 19 Jun 2019 16:36:12 +1000 Subject: [PATCH 1768/2793] Backport changes to Names and ClassfileParser ------------------------------------------------------------------------ Reuse the buffer for classfile reading Classfile parsing does re-enter when we're reading package objects or classfiles for things like `scala/native.class`. But for the most part the prior refactorings mean that we typically only parse a single classfile at a time, and as such we can profit from a one-element cache for the buffer to read this into. (cherry picked from commit ed8d95eb3092a6fd239820362034b42ad636d85b) ------------------------------------------------------------------------ Eagerly read from the constant pool as a basis for lazy types java class/method I've used lazy types for field/method/class infos, which is analagous to what we do in `Unpickler` for scala originated types. We read all data needed by the inner class table and the type completers from the pool eagerly, but still be lazy about interning strings to Names and completion of the field/method types themselves. This fixes some long standing spurious cyclic errors: Manually tested with: ``` $ scalac -cp $(coursier fetch -q -p com.datastax.cassandra:dse-driver:1.0.0) test.scala test.scala:2: error: illegal cyclic reference involving class Cluster new com.datastax.driver.dse.DseCluster.Builder() ^ one error found $ /code/scala/build/quick/bin/scalac -cp $(coursier fetch -q -p com.datastax.cassandra:dse-driver:1.0.0) test.scala $ cat test.scala class Test { new com.datastax.driver.dse.DseCluster.Builder() } ``` ------------------------------------------------------------------------ Avoid using Names for fully qualified class names There is no good reason for these dotted names to be Names and stick around in the name table. Let's use short lived strings instead. Reduces the name table by 5% in terms of entries and 10% in terms of characters when compiling src/scalap/**/*.scala (cherry picked from commit ae18049a6c5f8851e01ac5baebb4b95262df0685) ------------------------------------------------------------------------ Avoid Names for descriptors, generic sigs, and string constants We can just keep these are short-lived Strings, rather than interning them into the Name table for the entire lifetime of Global. (cherry picked from commit 688bf0fcae4ced47fa440def73e3940005c841b1) ------------------------------------------------------------------------ Invalidate symbols for artifact classfiles, refactor classfile parser No longer run the classfile parser on Scala generated classfiles that don't have a Scala signature (module classes, inner classes, etc). Various cleanups in the classfile parser, minimize the work performed on Scala classfiles. Before, the attributes section was parsed twice: once to find the ScalaSig attribute, the second time to find the ScalaSignature in the RuntimeVisibleAnnotations. Now everything happens in the first iteration. Also fixes a bug in the backend: classes ending in `$` did not get a ScalaSignature by mistake. They were filtered out by the name-based test that is supposed to identify module classes. (cherry picked from commit 3aea776ca1aa82c9de44cc6806dcdb242f3b40f8) ------------------------------------------------------------------------ Remove unnecessary abstraction Added in ced7214959, no longer needed since ICodeReader is gone. (cherry picked from commit e216e0ef0376c550846de974d5b71b39b92120b8) --- src/compiler/scala/tools/nsc/Global.scala | 3 + .../tools/nsc/backend/jvm/BCodeHelpers.scala | 2 +- .../tools/nsc/symtab/SymbolLoaders.scala | 10 +- .../symtab/classfile/AbstractFileReader.scala | 32 +- .../symtab/classfile/ClassfileParser.scala | 977 ++++++++++-------- .../nsc/symtab/classfile/DataReader.scala | 68 ++ .../symtab/classfile/ReusableDataReader.scala | 156 +++ .../scala/reflect/internal/Definitions.scala | 25 +- .../scala/reflect/internal/Mirrors.scala | 73 +- .../scala/reflect/internal/Names.scala | 2 + .../scala/reflect/internal/StdNames.scala | 26 +- .../scala/reflect/internal/Symbols.scala | 11 +- .../scala/reflect/internal/Types.scala | 48 +- .../scala/reflect/io/AbstractFile.scala | 1 + src/reflect/scala/reflect/io/PlainFile.scala | 4 + .../reflect/runtime/JavaUniverseForce.scala | 1 - test/files/jvm/throws-annot-from-java.check | 10 +- .../jvm/throws-annot-from-java/Test_3.scala | 6 +- test/files/neg/moduleClassReference.check | 4 + test/files/neg/moduleClassReference.scala | 3 + test/files/neg/t7251.check | 2 +- test/files/run/compiler-asSeenFrom.scala | 2 +- test/files/run/existentials-in-compiler.scala | 4 +- .../t7008-scala-defined/Impls_Macros_2.scala | 2 + test/files/run/t7008/Impls_Macros_2.scala | 2 + test/files/run/t7096.scala | 2 +- test/files/run/t7455/Test.scala | 2 +- 27 files changed, 897 insertions(+), 581 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala create mode 100644 src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala create mode 100644 test/files/neg/moduleClassReference.check create mode 100644 test/files/neg/moduleClassReference.scala diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index eaaba1e99b2e..9bf44d789767 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1548,6 +1548,9 @@ class Global(var currentSettings: Settings, reporter0: Reporter) reporting.summarizeErrors() + // val allNamesArray: Array[String] = allNames().map(_.toString).toArray.sorted + // allNamesArray.foreach(println(_)) + if (traceSymbolActivity) units map (_.body) foreach (traceSymbols recordSymbolsInTree _) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index 5fe51011b856..df9aa82a6792 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -415,7 +415,7 @@ abstract class BCodeHelpers extends BCodeIdiomatic { */ def getAnnotPickle(jclassName: String, sym: Symbol): Option[AnnotationInfo] = { currentRun.symData get sym match { - case Some(pickle) if !sym.isModuleClass => + case Some(pickle) if !sym.isModuleClass => // pickles for module classes are in the companion / mirror class val scalaAnnot = { val sigBytes = ScalaSigBytes(pickle.bytes.take(pickle.writeIndex)) AnnotationInfo(sigBytes.sigAnnot, Nil, (nme.bytes, sigBytes) :: Nil) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 2ad68f4d6203..847b1837bbe7 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -13,13 +13,13 @@ package scala.tools.nsc package symtab -import classfile.ClassfileParser +import classfile.{ClassfileParser, ReusableDataReader} import java.io.IOException import scala.reflect.internal.MissingRequirementError import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import scala.reflect.internal.TypesStats -import scala.reflect.internal.util.StatisticsStatics +import scala.reflect.internal.util.{ReusableInstance, StatisticsStatics} /** This class ... * @@ -301,13 +301,11 @@ abstract class SymbolLoaders { } } } - + private val classFileDataReader: ReusableInstance[ReusableDataReader] = new ReusableInstance[ReusableDataReader](() => new ReusableDataReader()) class ClassfileLoader(val classfile: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol) extends SymbolLoader with FlagAssigningCompleter { private object classfileParser extends { val symbolTable: SymbolLoaders.this.symbolTable.type = SymbolLoaders.this.symbolTable - } with ClassfileParser { - override protected type ThisConstantPool = ConstantPool - override protected def newConstantPool: ThisConstantPool = new ConstantPool + } with ClassfileParser(classFileDataReader) { override protected def lookupMemberAtTyperPhaseIfPossible(sym: Symbol, name: Name): Symbol = SymbolLoaders.this.lookupMemberAtTyperPhaseIfPossible(sym, name) /* diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 19be00dd686a..17d70998f3d9 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -14,8 +14,10 @@ package scala.tools.nsc package symtab package classfile -import java.lang.Float.intBitsToFloat +import java.io.{ByteArrayInputStream, DataInputStream} import java.lang.Double.longBitsToDouble +import java.lang.Float.intBitsToFloat +import java.util import scala.tools.nsc.io.AbstractFile @@ -25,8 +27,11 @@ import scala.tools.nsc.io.AbstractFile * @author Philippe Altherr * @version 1.0, 23/03/2004 */ -class AbstractFileReader(val file: AbstractFile, val buf: Array[Byte]) { - def this(file: AbstractFile) = this(file, file.toByteArray) +final class AbstractFileReader(val buf: Array[Byte]) extends DataReader { + @deprecated("Use other constructor", "2.13.0") + def this(file: AbstractFile) { + this(file.toByteArray) + } /** the current input pointer */ @@ -59,17 +64,25 @@ class AbstractFileReader(val file: AbstractFile, val buf: Array[Byte]) { ((nextByte & 0xff) << 24) + ((nextByte & 0xff) << 16) + ((nextByte & 0xff) << 8) + (nextByte & 0xff) + /** extract a byte at position bp from buf + */ + def getByte(mybp: Int): Byte = + buf(mybp) + + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + System.arraycopy(buf, mybp, bytes, 0, bytes.length) + } /** extract a character at position bp from buf */ def getChar(mybp: Int): Char = - (((buf(mybp) & 0xff) << 8) + (buf(mybp+1) & 0xff)).toChar + (((getByte(mybp) & 0xff) << 8) + (getByte(mybp+1) & 0xff)).toChar /** extract an integer at position bp from buf */ def getInt(mybp: Int): Int = - ((buf(mybp ) & 0xff) << 24) + ((buf(mybp+1) & 0xff) << 16) + - ((buf(mybp+2) & 0xff) << 8) + (buf(mybp+3) & 0xff) + ((getByte(mybp) & 0xff) << 24) + ((getByte(mybp + 1) & 0xff) << 16) + + ((getByte(mybp + 2) & 0xff) << 8) + (getByte(mybp + 3) & 0xff) /** extract a long integer at position bp from buf */ @@ -84,8 +97,11 @@ class AbstractFileReader(val file: AbstractFile, val buf: Array[Byte]) { */ def getDouble(mybp: Int): Double = longBitsToDouble(getLong(mybp)) + def getUTF(mybp: Int, len: Int): String = { + new DataInputStream(new ByteArrayInputStream(buf, mybp, len)).readUTF + } + /** skip next 'n' bytes */ - def skip(n: Int) { bp += n } - + def skip(n: Int): Unit = { bp += n } } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index c855f1c11bb6..f637f28d4ecf 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -17,13 +17,15 @@ package classfile import java.io.{ByteArrayInputStream, DataInputStream, File, IOException} import java.lang.Integer.toHexString +import java.nio.ByteBuffer import scala.collection.{immutable, mutable} import scala.collection.mutable.{ArrayBuffer, ListBuffer} import scala.annotation.switch import scala.reflect.internal.JavaAccFlags -import scala.reflect.internal.pickling.{ByteCodecs, PickleBuffer} -import scala.reflect.io.NoAbstractFile +import scala.reflect.internal.pickling.ByteCodecs +import scala.reflect.internal.util.ReusableInstance +import scala.reflect.io.{NoAbstractFile, VirtualFile} import scala.reflect.internal.util.Collections._ import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io.AbstractFile @@ -34,7 +36,7 @@ import scala.util.control.NonFatal * @author Martin Odersky * @version 1.0 */ -abstract class ClassfileParser { +abstract class ClassfileParser(reader: ReusableInstance[ReusableDataReader]) { val symbolTable: SymbolTable { def settings: Settings } @@ -60,21 +62,17 @@ abstract class ClassfileParser { import scala.reflect.internal.ClassfileConstants._ import Flags._ - protected type ThisConstantPool <: ConstantPool - protected def newConstantPool: ThisConstantPool - - protected var file: AbstractFile = _ // the class file - protected var in: AbstractFileReader = _ // the class file reader + protected var file: AbstractFile = _ // the class file + protected var in: DataReader = _ // the class file reader protected var clazz: ClassSymbol = _ // the class symbol containing dynamic members protected var staticModule: ModuleSymbol = _ // the module symbol containing static members protected var instanceScope: Scope = _ // the scope of all instance definitions protected var staticScope: Scope = _ // the scope of all static definitions - protected var pool: ThisConstantPool = _ // the classfile's constant pool + protected var pool: ConstantPool = _ // the classfile's constant pool protected var isScala: Boolean = _ // does class file describe a scala class? - protected var isScalaAnnot: Boolean = _ // does class file describe a scala class with its pickled info in an annotation? protected var isScalaRaw: Boolean = _ // this class file is a scala class with no pickled info protected var busy: Symbol = _ // lock to detect recursive reads - protected var currentClass: Name = _ // JVM name of the current class + protected var currentClass: String = _ // JVM name of the current class protected var classTParams = Map[Name,Symbol]() protected var srcfile0 : Option[AbstractFile] = None protected def moduleClass: Symbol = staticModule.moduleClass @@ -100,7 +98,7 @@ abstract class ClassfileParser { private def readMethodFlags() = JavaAccFlags methodFlags u2 private def readFieldFlags() = JavaAccFlags fieldFlags u2 private def readTypeName() = readName().toTypeName - private def readName() = pool getName u2 + private def readName() = pool.getName(u2).name private def readType() = pool getType u2 private object unpickler extends scala.reflect.internal.pickling.UnPickler { @@ -134,11 +132,6 @@ abstract class ClassfileParser { catch parseErrorHandler finally busy = NoSymbol } - @inline private def raiseLoaderLevel[T](body: => T): T = { - loaders.parentsLevel += 1 - try body - finally loaders.parentsLevel -= 1 - } /** * `clazz` and `module` are the class and module symbols corresponding to the classfile being @@ -152,20 +145,23 @@ abstract class ClassfileParser { def parse(file: AbstractFile, clazz: ClassSymbol, module: ModuleSymbol): Unit = { this.file = file pushBusy(clazz) { - this.clazz = clazz - this.staticModule = module - this.isScala = false - - this.in = new AbstractFileReader(file) - val magic = in.getInt(in.bp) - if (magic != JAVA_MAGIC && file.name.endsWith(".sig")) { - currentClass = TermName(clazz.javaClassName) - isScala = true - unpickler.unpickle(in.buf, 0, clazz, staticModule, file.name) - } else { - parseHeader() - this.pool = newConstantPool - parseClass() + reader.using { reader => + this.clazz = clazz + this.staticModule = module + this.isScala = false + + val fileContents = file.toByteArray + this.in = new AbstractFileReader(fileContents) + val magic = in.getInt(in.bp) + if (magic != JAVA_MAGIC && file.name.endsWith(".sig")) { + currentClass = clazz.javaClassName + isScala = true + unpickler.unpickle(fileContents, 0, clazz, staticModule, file.name) + } else { + parseHeader() + this.pool = new ConstantPool + parseClass() + } } } } @@ -173,11 +169,26 @@ abstract class ClassfileParser { private def parseHeader() { val magic = u4 if (magic != JAVA_MAGIC) - abort(s"class file ${in.file} has wrong magic number 0x${toHexString(magic)}") + abort(s"class file ${file} has wrong magic number 0x${toHexString(magic)}") val minor, major = u2 if (major < JAVA_MAJOR_VERSION || major == JAVA_MAJOR_VERSION && minor < JAVA_MINOR_VERSION) - abort(s"class file ${in.file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") + abort(s"class file ${file} has unknown version $major.$minor, should be at least $JAVA_MAJOR_VERSION.$JAVA_MINOR_VERSION") + } + + protected class NameOrString(val value: String) { + private var _name: Name = null + def name: Name = { + if (_name eq null) _name = TermName(value) + _name + } + } + + def getClassSymbol(name: String): Symbol = { + name match { + case name if name.endsWith(nme.MODULE_SUFFIX_STRING) => rootMirror getModuleByName newTermName(name).dropModule + case name => classNameToSymbol(name) + } } /** @@ -187,7 +198,9 @@ abstract class ClassfileParser { protected val len = u2 protected val starts = new Array[Int](len) protected val values = new Array[AnyRef](len) - protected val internalized = new Array[Name](len) + protected val internalized = new Array[NameOrString](len) + + val initBp = in.bp { var i = 1 while (i < starts.length) { @@ -205,7 +218,7 @@ abstract class ClassfileParser { } } } - + val endBp = in.bp def recordAtIndex[T <: AnyRef](value: T, idx: Int): T = { values(idx) = value value @@ -213,33 +226,30 @@ abstract class ClassfileParser { def firstExpecting(index: Int, expected: Int): Int = { val start = starts(index) - val first = in.buf(start).toInt + val first = in.getByte(start).toInt if (first == expected) start + 1 else this errorBadTag start } /** Return the name found at given index. */ - def getName(index: Int): Name = ( + def getName(index: Int): NameOrString = ( if (index <= 0 || len <= index) errorBadIndex(index) else values(index) match { - case name: Name => name + case name: NameOrString => name case _ => val start = firstExpecting(index, CONSTANT_UTF8) val len = in.getChar(start).toInt - recordAtIndex(TermName(fromMUTF8(in.buf, start, len + 2)), index) + recordAtIndex(new NameOrString(in.getUTF(start, len + 2)), index) } ) - private def fromMUTF8(bytes: Array[Byte], offset: Int, len: Int): String = - new DataInputStream(new ByteArrayInputStream(bytes, offset, len)).readUTF - /** Return the name found at given index in the constant pool, with '/' replaced by '.'. */ - def getExternalName(index: Int): Name = { + def getExternalName(index: Int): NameOrString = { if (index <= 0 || len <= index) errorBadIndex(index) if (internalized(index) == null) - internalized(index) = getName(index).replace('/', '.') + internalized(index) = new NameOrString(getName(index).value.replace('/', '.')) internalized(index) } @@ -249,10 +259,7 @@ abstract class ClassfileParser { values(index) match { case sym: Symbol => sym case _ => - val result = getClassName(index) match { - case name if nme.isModuleName(name) => rootMirror getModuleByName name.dropModule - case name => classNameToSymbol(name) - } + val result = ClassfileParser.this.getClassSymbol(getClassName(index).value) recordAtIndex(result, index) } } @@ -260,9 +267,9 @@ abstract class ClassfileParser { /** Return the external name of the class info structure found at 'index'. * Use 'getClassSymbol' if the class is sure to be a top-level class. */ - def getClassName(index: Int): Name = { + def getClassName(index: Int): NameOrString = { val start = firstExpecting(index, CONSTANT_CLASS) - getExternalName((in getChar start).toInt) + getExternalName((in.getChar(start)).toInt) } /** Return a name and a type at the given index. If the type is a method @@ -279,14 +286,14 @@ abstract class ClassfileParser { val start = firstExpecting(index, CONSTANT_NAMEANDTYPE) val name = getName(in.getChar(start).toInt) // create a dummy symbol for method types - val dummy = ownerTpe.typeSymbol.newMethod(name.toTermName, ownerTpe.typeSymbol.pos) + val dummy = ownerTpe.typeSymbol.newMethod(name.name.toTermName, ownerTpe.typeSymbol.pos) val tpe = getType(dummy, in.getChar(start + 2).toInt) // fix the return type, which is blindly set to the class currently parsed val restpe = tpe match { - case MethodType(formals, _) if name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe) - case _ => tpe + case MethodType(formals, _) if name.name == nme.CONSTRUCTOR => MethodType(formals, ownerTpe) + case _ => tpe } - ((name, restpe)) + ((name.name, restpe)) } } @@ -301,21 +308,21 @@ abstract class ClassfileParser { case cls: Symbol => cls.tpe_* case _ => val name = getClassName(index) - name charAt 0 match { - case ARRAY_TAG => recordAtIndex(sigToType(null, name), index) - case _ => recordAtIndex(classNameToSymbol(name), index).tpe_* + name.value.charAt(0) match { + case ARRAY_TAG => recordAtIndex(sigToType(null, name.value), index) + case _ => recordAtIndex(classNameToSymbol(name.value), index).tpe_* } } } def getType(index: Int): Type = getType(null, index) - def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index)) - def getSuperClass(index: Int): Symbol = if (index == 0) AnyClass else getClassSymbol(index) // the only classfile that is allowed to have `0` in the super_class is java/lang/Object (see jvm spec) + def getType(sym: Symbol, index: Int): Type = sigToType(sym, getExternalName(index).value) + def getSuperClassName(index: Int): NameOrString = if (index == 0) null else getClassName(index) // the only classfile that is allowed to have `0` in the super_class is java/lang/Object (see jvm spec) private def createConstant(index: Int): Constant = { val start = starts(index) - Constant((in.buf(start).toInt: @switch) match { - case CONSTANT_STRING => getName(in.getChar(start + 1).toInt).toString + Constant((in.getByte(start).toInt: @switch) match { + case CONSTANT_STRING => getName(in.getChar(start + 1).toInt).value case CONSTANT_INTEGER => in.getInt(start + 1) case CONSTANT_FLOAT => in.getFloat(start + 1) case CONSTANT_LONG => in.getLong(start + 1) @@ -350,7 +357,7 @@ abstract class ClassfileParser { val start = firstExpecting(index, CONSTANT_UTF8) val len = (in getChar start).toInt val bytes = new Array[Byte](len) - System.arraycopy(in.buf, start + 2, bytes, 0, len) + in.getBytes(start + 2, bytes) recordAtIndex(getSubArray(bytes), index) } ) @@ -364,7 +371,10 @@ abstract class ClassfileParser { if (index <= 0 || ConstantPool.this.len <= index) errorBadIndex(index) val start = firstExpecting(index, CONSTANT_UTF8) val len = (in getChar start).toInt - in.buf drop start + 2 take len + val s = start + 2 + val result = new Array[Byte](len) + in.getBytes(s, result) + result } recordAtIndex(getSubArray(arr), head) } @@ -376,7 +386,7 @@ abstract class ClassfileParser { /** Throws an exception signaling a bad tag at given address. */ protected def errorBadTag(start: Int) = - abort(s"bad constant pool tag ${in.buf(start)} at byte $start") + abort(s"bad constant pool tag ${in.getByte(start)} at byte $start") } def stubClassSymbol(name: Name): Symbol = { @@ -392,13 +402,13 @@ abstract class ClassfileParser { NoSymbol.newStubSymbol(name.toTypeName, msg) } - private def lookupClass(name: Name) = try { + private def lookupClass(name: String) = try { def lookupTopLevel = { - if (name containsChar '.') + if (name contains '.') rootMirror getClassByName name else // FIXME - we shouldn't be doing ad hoc lookups in the empty package, getClassByName should return the class - definitions.getMember(rootMirror.EmptyPackageClass, name.toTypeName) + definitions.getMember(rootMirror.EmptyPackageClass, newTypeName(name)) } // For inner classes we usually don't get here: `classNameToSymbol` already returns the symbol @@ -409,21 +419,23 @@ abstract class ClassfileParser { // what the logic below is for (see PR #5822 / scala/bug#9937). val split = if (isScalaRaw) -1 else name.lastIndexOf('$') if (split > 0 && split < name.length) { - val outerName = name.subName(0, split) - val innerName = name.subName(split + 1, name.length).toTypeName + val outerName = name.substring(0, split) + val innerName = name.substring(split + 1, name.length) val outerSym = classNameToSymbol(outerName) // If the outer class C cannot be found, look for a top-level class C$D if (outerSym.isInstanceOf[StubSymbol]) lookupTopLevel else { + val innerNameAsName = newTypeName(innerName) + // We have a java-defined class name C$D and look for a member D of C. But we don't know if // D is declared static or not, so we have to search both in class C and its companion. val r = if (outerSym == clazz) - staticScope.lookup(innerName) orElse - instanceScope.lookup(innerName) + staticScope.lookup(innerNameAsName) orElse + instanceScope.lookup(innerNameAsName) else - lookupMemberAtTyperPhaseIfPossible(outerSym, innerName) orElse - lookupMemberAtTyperPhaseIfPossible(outerSym.companionModule, innerName) + lookupMemberAtTyperPhaseIfPossible(outerSym, innerNameAsName) orElse + lookupMemberAtTyperPhaseIfPossible(outerSym.companionModule, innerNameAsName) r orElse lookupTopLevel } } else @@ -434,14 +446,16 @@ abstract class ClassfileParser { // - was referenced in the bugfix commit for scala/bug#3756 (4fb0d53), not sure why // - covers the case when a type alias in a package object shadows a class symbol, // getClassByName throws a MissingRequirementError (scala-dev#248) - case _: FatalError => + case ex: FatalError => // getClassByName can throw a MissingRequirementError (which extends FatalError) // definitions.getMember can throw a FatalError, for example in pos/t5165b - stubClassSymbol(name) + if (settings.debug) + ex.printStackTrace() + stubClassSymbol(newTypeName(name)) } /** Return the class symbol of the given name. */ - def classNameToSymbol(name: Name): Symbol = { + def classNameToSymbol(name: String): Symbol = { if (innerClasses contains name) innerClasses innerSymbol name else @@ -449,87 +463,90 @@ abstract class ClassfileParser { } def parseClass() { - val jflags = readClassFlags() - val sflags = jflags.toScalaFlags - val nameIdx = u2 - currentClass = pool.getClassName(nameIdx) - - /* Parse parents for Java classes. For Scala, return AnyRef, since the real type will be unpickled. - * Updates the read pointer of 'in'. */ - def parseParents: List[Type] = { - if (isScala) { - u2 // skip superclass - val ifaces = u2 - in.bp += ifaces * 2 // .. and iface count interfaces - List(AnyRefTpe) // dummy superclass, will be replaced by pickled information - } - else raiseLoaderLevel { - val superType = if (jflags.isAnnotation) { u2; AnnotationClass.tpe } - else pool.getSuperClass(u2).tpe_* - val ifaceCount = u2 - var ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClass(u2).tpe_* - if (jflags.isAnnotation) ifaces ::= ClassfileAnnotationClass.tpe - superType :: ifaces - } - } + unpickleOrParseInnerClasses() - val isTopLevel = !(currentClass containsChar '$') // Java class name; *don't* try to to use Scala name decoding (scala/bug#7532) + val jflags = readClassFlags() + val classNameIndex = u2 + currentClass = pool.getClassName(classNameIndex).value + + // Ensure that (top-level) classfiles are in the correct directory + val isTopLevel = !(currentClass contains '$') // Java class name; *don't* try to to use Scala name decoding (scala/bug#7532) if (isTopLevel) { - val c = pool.getClassSymbol(nameIdx) + val c = pool.getClassSymbol(classNameIndex) // scala-dev#248: when a type alias (in a package object) shadows a class symbol, getClassSymbol returns a stub + // TODO: this also prevents the error when it would be useful (`mv a/C.class .`) if (!c.isInstanceOf[StubSymbol] && c != clazz) mismatchError(c) } - addEnclosingTParams(clazz) - parseInnerClasses() // also sets the isScala / isScalaRaw flags, see r15956 - // get the class file parser to reuse scopes. - instanceScope = newScope - staticScope = newScope + // TODO: remove after the next 2.13 milestone + // A bug in the backend caused classes ending in `$` do get only a Scala marker attribute + // instead of a ScalaSig and a Signature annotaiton. This went unnoticed because isScalaRaw + // classes were parsed like Java classes. The below covers the cases in the std lib. + def isNothingOrNull = { + val n = clazz.fullName.toString + n == "scala.runtime.Nothing$" || n == "scala.runtime.Null$" + } + + if (isScala) { + () // We're done + } else if (isScalaRaw && !isNothingOrNull) { + val decls = clazz.enclosingPackage.info.decls + for (c <- List(clazz, staticModule, staticModule.moduleClass)) { + c.setInfo(NoType) + decls.unlink(c) + } + } else { + val sflags = jflags.toScalaFlags // includes JAVA + + addEnclosingTParams(clazz) - val classInfo = ClassInfoType(parseParents, instanceScope, clazz) - val staticInfo = ClassInfoType(List(), staticScope, moduleClass) + // Create scopes before calling `enterOwnInnerClasses` + instanceScope = newScope + staticScope = newScope + val staticInfo = ClassInfoType(List(), staticScope, moduleClass) + + val parentIndex = u2 + val parentName = if (parentIndex == 0) null else pool.getClassName(parentIndex) + val ifaceCount = u2 + val ifaces = for (i <- List.range(0, ifaceCount)) yield pool.getSuperClassName(u2) + val completer = new ClassTypeCompleter(clazz.name, jflags, parentName, ifaces) - if (!isScala && !isScalaRaw) enterOwnInnerClasses() - val curbp = in.bp - skipMembers() // fields - skipMembers() // methods - if (!isScala) { + clazz setInfo completer clazz setFlag sflags - propagatePackageBoundary(jflags, clazz, staticModule, staticModule.moduleClass) - clazz setInfo classInfo moduleClass setInfo staticInfo + moduleClass setFlag JAVA staticModule setInfo moduleClass.tpe staticModule setFlag JAVA - staticModule.moduleClass setFlag JAVA - // attributes now depend on having infos set already - parseAttributes(clazz, classInfo) - - def queueLoad() { - in.bp = curbp - 0 until u2 foreach (_ => parseField()) - sawPrivateConstructor = false - 0 until u2 foreach (_ => parseMethod()) - val needsConstructor = ( - !sawPrivateConstructor - && !(instanceScope containsName nme.CONSTRUCTOR) - && (sflags & INTERFACE) == 0 - ) - if (needsConstructor) - instanceScope enter clazz.newClassConstructor(NoPosition) - } - loaders.pendingLoadActions ::= (queueLoad _) - if (loaders.parentsLevel == 0) { - while (loaders.pendingLoadActions.nonEmpty) { - val item = loaders.pendingLoadActions.head - loaders.pendingLoadActions = loaders.pendingLoadActions.tail - item() - } - } - } else - parseAttributes(clazz, classInfo) + propagatePackageBoundary(jflags, clazz, staticModule, moduleClass) + + val fieldsStartBp = in.bp + skipMembers() // fields + skipMembers() // methods + + parseAttributes(clazz, completer) + + in.bp = fieldsStartBp + 0 until u2 foreach (_ => parseField()) + sawPrivateConstructor = false + 0 until u2 foreach (_ => parseMethod()) + val needsConstructor = ( + !sawPrivateConstructor + && !(instanceScope containsName nme.CONSTRUCTOR) + && ((sflags & INTERFACE) == 0) + ) + if (needsConstructor) + instanceScope enter clazz.newClassConstructor(NoPosition) + + // we could avoid this if we eagerly created class type param symbols here to expose through the + // ClassTypeCompleter to satisfy the calls to rawInfo.typeParams from Symbol.typeParams. That would + // require a refactor of `sigToType`. + // + // We would also need to make sure that clazzTParams is populated before member type completers called sig2type. + clazz.initialize + } } /** Add type parameters of enclosing classes */ @@ -551,17 +568,17 @@ abstract class ClassfileParser { in.skip(4); skipAttributes() } else { val name = readName() - val info = readType() + val lazyInfo = new MemberTypeCompleter(name, jflags, pool.getExternalName(u2).value) val sym = ownerForFlags(jflags).newValue(name.toTermName, NoPosition, sflags) // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR sym setInfo { if (jflags.isEnum) ConstantType(Constant(sym)) - else info + else lazyInfo } propagatePackageBoundary(jflags, sym) - parseAttributes(sym, info) + parseAttributes(sym, lazyInfo) addJavaFlagsAnnotations(sym, jflags) getScope(jflags) enter sym @@ -586,8 +603,8 @@ abstract class ClassfileParser { val jflags = readMethodFlags() val sflags = jflags.toScalaFlags if (jflags.isPrivate) { - val name = readName() - if (name == nme.CONSTRUCTOR) + val isConstructor = pool.getName(u2).value == "" // opt avoid interning a Name for private methods we're about to discard + if (isConstructor) sawPrivateConstructor = true in.skip(2); skipAttributes() } else { @@ -596,63 +613,30 @@ abstract class ClassfileParser { } else { val name = readName() val sym = ownerForFlags(jflags).newMethod(name.toTermName, NoPosition, sflags) - var info = pool.getType(sym, u2) - var removedOuterParameter = false - if (name == nme.CONSTRUCTOR) - info match { - case MethodType(params, restpe) => - // if this is a non-static inner class, remove the explicit outer parameter - val paramsNoOuter = innerClasses getEntry currentClass match { - case Some(entry) if !isScalaRaw && !entry.jflags.isStatic => - /* About `clazz.owner.hasPackageFlag` below: scala/bug#5957 - * For every nested java class A$B, there are two symbols in the scala compiler. - * 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package - * 2. created by ClassfileParser of A when reading the inner classes, owner: A - * If symbol 1 gets completed (e.g. because the compiled source mentions `A$B`, not `A#B`), the - * ClassfileParser for 1 executes, and clazz.owner is the package. - */ - assert(params.head.tpe.typeSymbol == clazz.owner || clazz.owner.hasPackageFlag, params.head.tpe.typeSymbol + ": " + clazz.owner) - removedOuterParameter = true - params.tail - case _ => - params - } - val newParams = paramsNoOuter match { - case (init :+ tail) if jflags.isSynthetic => - // scala/bug#7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which - // are added when an inner class needs to access a private constructor. - init - case _ => - paramsNoOuter - } - - info = MethodType(newParams, clazz.tpe) - } // Note: the info may be overwritten later with a generic signature // parsed from SignatureATTR - sym setInfo info + val lazyInfo = new MemberTypeCompleter(name, jflags, pool.getExternalName(u2).value) + sym.info = lazyInfo propagatePackageBoundary(jflags, sym) - parseAttributes(sym, info, removedOuterParameter) + parseAttributes(sym, lazyInfo) addJavaFlagsAnnotations(sym, jflags) - if (jflags.isVarargs) - sym modifyInfo arrayToRepeated - getScope(jflags) enter sym } } } - private def sigToType(sym: Symbol, sig: Name): Type = { + private def sigToType(sym: Symbol, sig: String): Type = { + val sigChars = sig.toCharArray var index = 0 val end = sig.length def accept(ch: Char) { assert(sig.charAt(index) == ch, (sig.charAt(index), ch)) index += 1 } - def subName(isDelimiter: Char => Boolean): Name = { + def subName(isDelimiter: Char => Boolean): String = { val start = index while (!isDelimiter(sig.charAt(index))) { index += 1 } - sig.subName(start, index) + new String(sigChars, start, index - start) } def sig2type(tparams: immutable.Map[Name,Symbol], skiptvs: Boolean): Type = { val tag = sig.charAt(index); index += 1 @@ -724,7 +708,7 @@ abstract class ClassfileParser { var tpe = processClassType(processInner(classSym.tpe_*)) while (sig.charAt(index) == '.') { accept('.') - val name = subName(c => c == ';' || c == '<' || c == '.').toTypeName + val name = newTypeName(subName(c => c == ';' || c == '<' || c == '.')) val clazz = tpe.member(name) val dummyArgs = Nil // the actual arguments are added in processClassType val inner = typeRef(pre = tpe, sym = clazz, args = dummyArgs) @@ -761,7 +745,7 @@ abstract class ClassfileParser { sig2type(tparams, skiptvs) JavaMethodType(sym.newSyntheticValueParams(paramtypes.toList), restype) case 'T' => - val n = subName(';'.==).toTypeName + val n = newTypeName(subName(';'.==)) index += 1 if (skiptvs) AnyTpe else tparams(n).typeConstructor @@ -785,7 +769,7 @@ abstract class ClassfileParser { index += 1 val start = index while (sig.charAt(index) != '>') { - val tpname = subName(':'.==).toTypeName + val tpname = newTypeName(subName(':'.==)) val s = sym.newTypeParameter(tpname) tparams = tparams + (tpname -> s) sig2typeBounds(tparams, skiptvs = true) @@ -793,7 +777,7 @@ abstract class ClassfileParser { } index = start while (sig.charAt(index) != '>') { - val tpname = subName(':'.==).toTypeName + val tpname = newTypeName(subName(':'.==)) val s = tparams(tpname) s.setInfo(sig2typeBounds(tparams, skiptvs = false)) } @@ -816,96 +800,58 @@ abstract class ClassfileParser { GenPolyType(ownTypeParams, tpe) } // sigToType - def parseAttributes(sym: Symbol, symtype: Type, removedOuterParameter: Boolean = false) { - var paramNames: ListBuffer[Name] = null // null means we didn't find any - def convertTo(c: Constant, pt: Type): Constant = { - if (pt.typeSymbol == BooleanClass && c.tag == IntTag) - Constant(c.value != 0) - else - c convertTo pt - } - def parseAttribute() { + /** + * Only invoked for java classfiles. + */ + private def parseAttributes(sym: symbolTable.Symbol, completer: JavaTypeCompleter): Unit = { + def parseAttribute(): Unit = { val attrName = readTypeName() val attrLen = u4 attrName match { case tpnme.SignatureATTR => - if (!isScala && !isScalaRaw) { - val sig = pool.getExternalName(u2) - val newType = sigToType(sym, sig) - sym.setInfo(newType) - } - else in.skip(attrLen) + val sigIndex = u2 + val sig = pool.getExternalName(sigIndex) + assert(sym.rawInfo == completer, sym) + completer.sig = sig.value case tpnme.SyntheticATTR => sym.setFlag(SYNTHETIC | ARTIFACT) in.skip(attrLen) + case tpnme.BridgeATTR => sym.setFlag(BRIDGE | ARTIFACT) in.skip(attrLen) + case tpnme.DeprecatedATTR => val arg = Literal(Constant("see corresponding Javadoc for more information.")) sym.addAnnotation(DeprecatedAttr, arg, Literal(Constant(""))) in.skip(attrLen) + case tpnme.ConstantValueATTR => - val c = pool.getConstant(u2) - val c1 = convertTo(c, symtype) - if (c1 ne null) sym.setInfo(ConstantType(c1)) - else devWarning(s"failure to convert $c to $symtype") + completer.constant = pool.getConstant(u2) + case tpnme.MethodParametersATTR => def readParamNames(): Unit = { - import scala.tools.asm.Opcodes.ACC_SYNTHETIC val paramCount = u1 + val paramNames = new Array[NameOrString](paramCount) + val paramNameAccess = new Array[Int](paramCount) var i = 0 - if (removedOuterParameter && i < paramCount) { - in.skip(4) - i += 1 - } - paramNames = new ListBuffer() while (i < paramCount) { - val rawname = pool.getName(u2) - val access = u2 - - val name = - if ((access & ACC_SYNTHETIC) == 0) rawname.encode - else nme.NO_NAME - - paramNames += name + paramNames(i) = pool.getExternalName(u2) + paramNameAccess(i) = u2 i += 1 } + completer.paramNames = new ParamNames(paramNames, paramNameAccess) } readParamNames() - case tpnme.ScalaSignatureATTR => - if (!isScalaAnnot) { - devWarning(s"symbol ${sym.fullName} has pickled signature in attribute") - unpickler.unpickle(in.buf, in.bp, clazz, staticModule, in.file.name) - } - in.skip(attrLen) - case tpnme.ScalaATTR => - isScalaRaw = true - // Attribute on methods of java annotation classes when that method has a default - case tpnme.AnnotationDefaultATTR => + + case tpnme.AnnotationDefaultATTR => // Methods of java annotation classes that have a default sym.addAnnotation(AnnotationDefaultAttr) in.skip(attrLen) - // Java annotations on classes / methods / fields with RetentionPolicy.RUNTIME + case tpnme.RuntimeAnnotationATTR => - if (isScalaAnnot || !isScala) { - // For Scala classfiles we are only interested in the scala signature annotations. Other - // annotations should be skipped (the pickle contains the symbol's annotations). - // Skipping them also prevents some spurious warnings / errors related to scala/bug#7014, - // scala/bug#7551, pos/5165b - val scalaSigAnnot = parseAnnotations(onlyScalaSig = isScalaAnnot) - if (isScalaAnnot) scalaSigAnnot match { - case Some(san: AnnotationInfo) => - val bytes = - san.assocs.find({ _._1 == nme.bytes }).get._2.asInstanceOf[ScalaSigBytes].bytes - - unpickler.unpickle(bytes, 0, clazz, staticModule, in.file.name) - case None => - throw new RuntimeException("Scala class file does not contain Scala annotation") - } - debuglog("[class] << " + sym.fullName + sym.annotationsString) - } - else - in.skip(attrLen) + val numAnnots = u2 + for (n <- 0 until numAnnots; annot <- parseAnnotation(u2)) + sym.addAnnotation(annot) // TODO 1: parse runtime visible annotations on parameters // case tpnme.RuntimeParamAnnotationATTR @@ -913,8 +859,8 @@ abstract class ClassfileParser { // TODO 2: also parse RuntimeInvisibleAnnotation / RuntimeInvisibleParamAnnotation, // i.e. java annotations with RetentionPolicy.CLASS? - case tpnme.ExceptionsATTR if (!isScala) => - parseExceptions(attrLen) + case tpnme.ExceptionsATTR => + parseExceptions(attrLen, completer) case tpnme.SourceFileATTR => if (forInteractive) { @@ -935,196 +881,108 @@ abstract class ClassfileParser { case rootMirror.EmptyPackage => srcfileLeaf case pkg => pkg.fullName(File.separatorChar)+File.separator+srcfileLeaf } - srcfile0 = settings.outputDirs.srcFilesFor(in.file, srcpath).find(_.exists) + srcfile0 = settings.outputDirs.srcFilesFor(file, srcpath).find(_.exists) } else in.skip(attrLen) + case tpnme.CodeATTR => if (sym.owner.isInterface) { sym setFlag JAVA_DEFAULTMETHOD log(s"$sym in ${sym.owner} is a java8+ default method.") } in.skip(attrLen) + case _ => in.skip(attrLen) } } - def skipAnnotArg(): Unit = { - u1 match { - case STRING_TAG | BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | - INT_TAG | LONG_TAG | FLOAT_TAG | DOUBLE_TAG | CLASS_TAG => - in.skip(2) - - case ENUM_TAG => - in.skip(4) - - case ARRAY_TAG => - val num = u2 - for (i <- 0 until num) skipAnnotArg() - - case ANNOTATION_TAG => - parseAnnotation(u2, onlyScalaSig = true) - } - } - - def parseAnnotArg: Option[ClassfileAnnotArg] = { - val tag = u1 - val index = u2 - tag match { - case STRING_TAG => - Some(LiteralAnnotArg(Constant(pool.getName(index).toString))) - case BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | INT_TAG | - LONG_TAG | FLOAT_TAG | DOUBLE_TAG => - Some(LiteralAnnotArg(pool.getConstant(index))) - case CLASS_TAG => - Some(LiteralAnnotArg(Constant(pool.getType(index)))) - case ENUM_TAG => - val t = pool.getType(index) - val n = readName() - val module = t.typeSymbol.companionModule - val s = module.info.decls.lookup(n) - if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s))) - else { - warning( - sm"""While parsing annotations in ${in.file}, could not find $n in enum ${module.nameString}. - |This is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (scala/bug#7014).""" - ) - None - } - - case ARRAY_TAG => - val arr = new ArrayBuffer[ClassfileAnnotArg]() - var hasError = false - for (i <- 0 until index) - parseAnnotArg match { - case Some(c) => arr += c - case None => hasError = true - } - if (hasError) None - else Some(ArrayAnnotArg(arr.toArray)) - case ANNOTATION_TAG => - parseAnnotation(index, onlyScalaSig = false) map (NestedAnnotArg(_)) - } - } - - def parseScalaSigBytes: Option[ScalaSigBytes] = { - val tag = u1 - assert(tag == STRING_TAG, tag) - Some(ScalaSigBytes(pool getBytes u2)) - } - - def parseScalaLongSigBytes: Option[ScalaSigBytes] = { - val tag = u1 - assert(tag == ARRAY_TAG, tag) - val stringCount = u2 - val entries = - for (i <- 0 until stringCount) yield { - val stag = u1 - assert(stag == STRING_TAG, stag) - u2 - } - Some(ScalaSigBytes(pool.getBytes(entries.toList))) - } - - // TODO scala/bug#9296 duplicated code, refactor - /* Parse and return a single annotation. If it is malformed, - * return None. - */ - def parseAnnotation(attrNameIndex: Int, onlyScalaSig: Boolean): Option[AnnotationInfo] = try { - val attrType = pool.getType(attrNameIndex) - val nargs = u2 - val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)] - var hasError = false - for (i <- 0 until nargs) { - val name = readName() - // The "bytes: String" argument of the ScalaSignature attribute is parsed specially so that it is - // available as an array of bytes (the pickled Scala signature) instead of as a string. The pickled signature - // is encoded as a string because of limitations in the Java class file format. - if ((attrType == ScalaSignatureAnnotation.tpe) && (name == nme.bytes)) - parseScalaSigBytes match { - case Some(c) => nvpairs += ((name, c)) - case None => hasError = true - } - else if ((attrType == ScalaLongSignatureAnnotation.tpe) && (name == nme.bytes)) - parseScalaLongSigBytes match { - case Some(c) => nvpairs += ((name, c)) - case None => hasError = true - } - else - if (onlyScalaSig) skipAnnotArg() - else parseAnnotArg match { - case Some(c) => nvpairs += ((name, c)) - case None => hasError = true - } - } - if (hasError) None - else Some(AnnotationInfo(attrType, List(), nvpairs.toList)) - } catch { - case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found - case NonFatal(ex) => - // We want to be robust when annotations are unavailable, so the very least - // we can do is warn the user about the exception - // There was a reference to ticket 1135, but that is outdated: a reference to a class not on - // the classpath would *not* end up here. A class not found is signaled - // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), - // and that should never be swallowed silently. - warning(s"Caught: $ex while parsing annotations in ${in.file}") - if (settings.debug) ex.printStackTrace() - None // ignore malformed annotations - } - /* * Parse the "Exceptions" attribute which denotes the exceptions * thrown by a method. */ - def parseExceptions(len: Int) { + def parseExceptions(len: Int, completer: JavaTypeCompleter): Unit = { val nClasses = u2 for (n <- 0 until nClasses) { // FIXME: this performs an equivalent of getExceptionTypes instead of getGenericExceptionTypes (scala/bug#7065) - val cls = pool.getClassSymbol(u2) - // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation - // and that method requires Symbol to be forced to give the right answers, see scala/bug#7107 for details - cls.initialize - sym.addThrowsAnnotation(cls) + val cls = pool.getClassName(u2) + completer.exceptions ::= cls } } + // begin parseAttributes + for (i <- 0 until u2) parseAttribute() + } - /* Parse a sequence of annotations and attaches them to the - * current symbol sym, except for the ScalaSignature annotation that it returns, if it is available. */ - def parseAnnotations(onlyScalaSig: Boolean): Option[AnnotationInfo] = { - val nAttr = u2 - var scalaSigAnnot: Option[AnnotationInfo] = None - for (n <- 0 until nAttr) parseAnnotation(u2, onlyScalaSig) match { - case Some(scalaSig) if scalaSig.atp == ScalaSignatureAnnotation.tpe => - scalaSigAnnot = Some(scalaSig) - case Some(scalaSig) if scalaSig.atp == ScalaLongSignatureAnnotation.tpe => - scalaSigAnnot = Some(scalaSig) - case Some(annot) => - sym.addAnnotation(annot) - case None => - } - scalaSigAnnot + def parseAnnotArg(): Option[ClassfileAnnotArg] = { + val tag = u1 + val index = u2 + tag match { + case STRING_TAG => + Some(LiteralAnnotArg(Constant(pool.getName(index).value))) + case BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | INT_TAG | + LONG_TAG | FLOAT_TAG | DOUBLE_TAG => + Some(LiteralAnnotArg(pool.getConstant(index))) + case CLASS_TAG => + Some(LiteralAnnotArg(Constant(pool.getType(index)))) + case ENUM_TAG => + val t = pool.getType(index) + val n = readName() + val module = t.typeSymbol.companionModule + val s = module.info.decls.lookup(n) + if (s != NoSymbol) Some(LiteralAnnotArg(Constant(s))) + else { + warning( + sm"""While parsing annotations in ${file}, could not find $n in enum ${module.nameString}. + |This is likely due to an implementation restriction: an annotation argument cannot refer to a member of the annotated class (scala/bug#7014).""" + ) + None + } + + case ARRAY_TAG => + val arr = new ArrayBuffer[ClassfileAnnotArg]() + var hasError = false + for (i <- 0 until index) + parseAnnotArg() match { + case Some(c) => arr += c + case None => hasError = true + } + if (hasError) None + else Some(ArrayAnnotArg(arr.toArray)) + case ANNOTATION_TAG => + parseAnnotation(index) map (NestedAnnotArg(_)) } + } - def addParamNames(): Unit = - if ((paramNames ne null) && sym.hasRawInfo && sym.isMethod) { - val params = sym.rawInfo.params - foreach2(paramNames.toList, params) { - case (nme.NO_NAME, _) => // param was ACC_SYNTHETIC; ignore - case (name, param) => - param.resetFlag(SYNTHETIC) - param.name = name - } - devWarningIf(!sameLength(paramNames.toList, params)) { - // there's not anything we can do, but it's slightly worrisome - sm"""MethodParameters length mismatch while parsing $sym: - | rawInfo.params: ${sym.rawInfo.params} - | MethodParameters: ${paramNames.toList}""" - } - } - // begin parseAttributes - for (i <- 0 until u2) parseAttribute() - addParamNames() + // TODO scala/bug#9296 duplicated code, refactor + /** + * Parse and return a single annotation. If it is malformed, return None. + */ + def parseAnnotation(attrNameIndex: Int): Option[AnnotationInfo] = try { + val attrType = pool.getType(attrNameIndex) + val nargs = u2 + val nvpairs = new ListBuffer[(Name, ClassfileAnnotArg)] + var hasError = false + for (i <- 0 until nargs) { + val name = readName() + parseAnnotArg() match { + case Some(c) => nvpairs += ((name, c)) + case None => hasError = true + } + } + if (hasError) None + else Some(AnnotationInfo(attrType, List(), nvpairs.toList)) + } catch { + case f: FatalError => throw f // don't eat fatal errors, they mean a class was not found + case NonFatal(ex) => + // We want to be robust when annotations are unavailable, so the very least + // we can do is warn the user about the exception + // There was a reference to ticket 1135, but that is outdated: a reference to a class not on + // the classpath would *not* end up here. A class not found is signaled + // with a `FatalError` exception, handled above. Here you'd end up after a NPE (for example), + // and that should never be swallowed silently. + warning(s"Caught: $ex while parsing annotations in ${file}") + if (settings.debug) ex.printStackTrace() + None // ignore malformed annotations } /** Apply `@native`/`@transient`/`@volatile` annotations to `sym`, @@ -1136,9 +994,9 @@ abstract class ClassfileParser { /** Enter own inner classes in the right scope. It needs the scopes to be set up, * and implicitly current class' superclasses. */ - private def enterOwnInnerClasses() { - def className(name: Name): Name = - name.subName(name.lastPos('.') + 1, name.length) + private def enterOwnInnerClasses(): Unit = { + def className(name: String): String = + name.substring(name.lastIndexOf('.') + 1, name.length) def enterClassAndModule(entry: InnerClassEntry, file: AbstractFile) { def jflags = entry.jflags @@ -1186,8 +1044,8 @@ abstract class ClassfileParser { decls unlink e } - val cName = className(entry.externalName) - unlinkIfPresent(cName.toTermName) + val cName = newTermName(className(entry.externalName)) + unlinkIfPresent(cName) unlinkIfPresent(cName.toTypeName) } @@ -1200,54 +1058,145 @@ abstract class ClassfileParser { } } - /** Parse inner classes. Expects `in.bp` to point to the superclass entry. - * Restores the old `bp`. + /** + * Either + * - set `isScala` and invoke the unpickler, or + * - set `isScalaRaw`, or + * - parse inner classes (for Java classfiles) + * + * Expects `in.bp` to point to the `access_flags` entry, restores the old `bp`. */ - def parseInnerClasses() { + def unpickleOrParseInnerClasses() { val oldbp = in.bp + in.skip(4) // access_flags, this_class skipSuperclasses() skipMembers() // fields skipMembers() // methods - val attrs = u2 - for (i <- 0 until attrs) { + + var innersStart = -1 + var runtimeAnnotStart = -1 + + val numAttrs = u2 + var i = 0 + while (i < numAttrs) { val attrName = readTypeName() val attrLen = u4 attrName match { case tpnme.ScalaSignatureATTR => isScala = true - val pbuf = new PickleBuffer(in.buf, in.bp, in.bp + attrLen) - pbuf.readNat(); pbuf.readNat() - if (pbuf.readNat == 0) // a scala signature attribute with no entries means that the actual scala signature - isScalaAnnot = true // is in a ScalaSignature annotation. - in.skip(attrLen) + if (runtimeAnnotStart != -1) i = numAttrs case tpnme.ScalaATTR => isScalaRaw = true - case tpnme.InnerClassesATTR if !isScala => - val entries = u2 - for (i <- 0 until entries) { - val innerIndex, outerIndex, nameIndex = u2 - val jflags = readInnerClassFlags() - if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) - innerClasses add InnerClassEntry(innerIndex, outerIndex, nameIndex, jflags) + i = numAttrs + case tpnme.InnerClassesATTR => + innersStart = in.bp + case tpnme.RuntimeAnnotationATTR => + runtimeAnnotStart = in.bp + if (isScala) i = numAttrs + case _ => + } + in.skip(attrLen) + i += 1 + } + + if (isScala) { + def parseScalaSigBytes(): Array[Byte] = { + val tag = u1 + assert(tag == STRING_TAG, tag) + pool.getBytes(u2) + } + + def parseScalaLongSigBytes(): Array[Byte] = { + val tag = u1 + assert(tag == ARRAY_TAG, tag) + val stringCount = u2 + val entries = + for (i <- 0 until stringCount) yield { + val stag = u1 + assert(stag == STRING_TAG, stag) + u2 } + pool.getBytes(entries.toList) + } + + def checkScalaSigAnnotArg() = { + val numArgs = u2 + assert(numArgs == 1, s"ScalaSignature has $numArgs arguments") + val name = readName() + assert(name == nme.bytes, s"ScalaSignature argument has name $name") + } + + def skipAnnotArg(): Unit = u1 match { + case STRING_TAG | BOOL_TAG | BYTE_TAG | CHAR_TAG | SHORT_TAG | + INT_TAG | LONG_TAG | FLOAT_TAG | DOUBLE_TAG | CLASS_TAG => + in.skip(2) + + case ENUM_TAG => + in.skip(4) + + case ARRAY_TAG => + val num = u2 + for (i <- 0 until num) skipAnnotArg() + + case ANNOTATION_TAG => + in.skip(2) // type + skipAnnotArgs() + } + + def skipAnnotArgs() = { + val numArgs = u2 + for (i <- 0 until numArgs) { + in.skip(2) + skipAnnotArg() + } + } + + val SigTpe = ScalaSignatureAnnotation.tpe + val LongSigTpe = ScalaLongSignatureAnnotation.tpe + + assert(runtimeAnnotStart != -1, s"No RuntimeVisibleAnnotations in classfile with ScalaSignature attribute: $clazz") + in.bp = runtimeAnnotStart + val numAnnots = u2 + var i = 0 + var bytes: Array[Byte] = null + while (i < numAnnots && bytes == null) pool.getType(u2) match { + case SigTpe => + checkScalaSigAnnotArg() + bytes = parseScalaSigBytes() + case LongSigTpe => + checkScalaSigAnnotArg() + bytes = parseScalaLongSigBytes() case _ => - in.skip(attrLen) + skipAnnotArgs() + } + + AnyRefClass // Force scala.AnyRef, otherwise we get "error: Symbol AnyRef is missing from the classpath" + assert(bytes != null, s"No Scala(Long)Signature annotation in classfile with ScalaSignature attribute: $clazz") + unpickler.unpickle(bytes, 0, clazz, staticModule, file.name) + } else if (!isScalaRaw && innersStart != -1) { + in.bp = innersStart + val entries = u2 + for (i <- 0 until entries) { + val innerIndex, outerIndex, nameIndex = u2 + val jflags = readInnerClassFlags() + if (innerIndex != 0 && outerIndex != 0 && nameIndex != 0) + innerClasses add InnerClassEntry(pool.getClassName(innerIndex), pool.getClassName(outerIndex), pool.getName(nameIndex), jflags) } } in.bp = oldbp } /** An entry in the InnerClasses attribute of this class file. */ - case class InnerClassEntry(external: Int, outer: Int, name: Int, jflags: JavaAccFlags) { - def externalName = pool getClassName external - def outerName = pool getClassName outer - def originalName = pool getName name + case class InnerClassEntry(external: NameOrString, outer: NameOrString, name: NameOrString, jflags: JavaAccFlags) { + def externalName = external.value + def outerName = outer.value + def originalName = name.name def isModule = originalName.isTermName def scope = if (jflags.isStatic) staticScope else instanceScope def enclosing = if (jflags.isStatic) enclModule else enclClass // The name of the outer class, without its trailing $ if it has one. - private def strippedOuter = outerName.dropModule + private def strippedOuter = outerName.stripSuffix(nme.MODULE_SUFFIX_STRING) private def isInner = innerClasses contains strippedOuter private def enclClass = if (isInner) innerClasses innerSymbol strippedOuter else classNameToSymbol(strippedOuter) private def enclModule = enclClass.companionModule @@ -1259,10 +1208,10 @@ abstract class ClassfileParser { * If the given name is not an inner class, it returns the symbol found in `definitions`. */ object innerClasses { - private val inners = mutable.HashMap[Name, InnerClassEntry]() + private val inners = mutable.HashMap[String, InnerClassEntry]() - def contains(name: Name) = inners contains name - def getEntry(name: Name) = inners get name + def contains(name: String) = inners contains name + def getEntry(name: String) = inners get name def entries = inners.values def add(entry: InnerClassEntry): Unit = { @@ -1272,7 +1221,7 @@ abstract class ClassfileParser { } inners(entry.externalName) = entry } - def innerSymbol(externalName: Name): Symbol = this getEntry externalName match { + def innerSymbol(externalName: String): Symbol = this getEntry externalName match { case Some(entry) => innerSymbol(entry) case _ => NoSymbol } @@ -1301,6 +1250,128 @@ abstract class ClassfileParser { sym setInfo createFromClonedSymbols(alias.initialize.typeParams, alias.tpe)(typeFun) } } + private class ParamNames(val names: Array[NameOrString], val access: Array[Int]) { + assert(names.length == access.length) + def length = names.length + } + private abstract class JavaTypeCompleter extends LazyType { + var constant: Constant = _ + var sig: String = _ + var paramNames: ParamNames = _ + var exceptions: List[NameOrString] = Nil + } + private final class ClassTypeCompleter(name: Name, jflags: JavaAccFlags, parent: NameOrString, ifaces: List[NameOrString]) extends JavaTypeCompleter { + override def complete(sym: symbolTable.Symbol): Unit = { + val info = if (sig != null) sigToType(sym, sig) else { + val superType = + if (parent == null) AnyClass.tpe_* + else if (jflags.isAnnotation) { u2; AnnotationClass.tpe } + else getClassSymbol(parent.value).tpe_* + var ifacesTypes = ifaces.filterNot(_ eq null).map(x => getClassSymbol(x.value).tpe_*) + if (jflags.isAnnotation) ifacesTypes ::= ClassfileAnnotationClass.tpe + ClassInfoType(superType :: ifacesTypes, instanceScope, clazz) + } + sym.setInfo(info) + } + } + + private final class MemberTypeCompleter(name: Name, jflags: JavaAccFlags, descriptor: String) extends JavaTypeCompleter { + override def isJavaVarargsMethod: Boolean = jflags.isVarargs + override def javaThrownExceptions: List[Symbol] = exceptions.map(e => classNameToSymbol(e.value)) + override def complete(sym: symbolTable.Symbol): Unit = { + def descriptorInfo = sigToType(sym, descriptor) + val hasOuterParam = (name == nme.CONSTRUCTOR) && (descriptorInfo match { + case MethodType(params, restpe) => + // if this is a non-static inner class, remove the explicit outer parameter + innerClasses getEntry currentClass match { + case Some(entry) if !entry.jflags.isStatic => + /* About `clazz.owner.hasPackageFlag` below: scala/bug#5957 + * For every nested java class A$B, there are two symbols in the scala compiler. + * 1. created by SymbolLoader, because of the existence of the A$B.class file, owner: package + * 2. created by ClassfileParser of A when reading the inner classes, owner: A + * If symbol 1 gets completed (e.g. because the compiled source mentions `A$B`, not `A#B`), the + * ClassfileParser for 1 executes, and clazz.owner is the package. + */ + assert(params.head.tpe.typeSymbol == clazz.owner || clazz.owner.hasPackageFlag, "" + params.head.tpe.typeSymbol + ": " + clazz.owner) + true + case _ => + false + } + case _ => false + }) + + val info = if (sig != null) { + sigToType(sym, sig) + } else if (name == nme.CONSTRUCTOR) { + descriptorInfo match { + case MethodType(params, restpe) => + val paramsNoOuter = if (hasOuterParam) params.tail else params + val newParams = paramsNoOuter match { + case (init :+ tail) if jflags.isSynthetic => + // scala/bug#7455 strip trailing dummy argument ("access constructor tag") from synthetic constructors which + // are added when an inner class needs to access a private constructor. + init + case _ => + paramsNoOuter + } + MethodType(newParams, clazz.tpe) + case info => info + } + } else { + descriptorInfo + } + if (constant != null) { + val c1 = convertTo(constant, info.resultType) + if (c1 ne null) sym.setInfo(ConstantType(c1)) + else { + devWarning(s"failure to convert $constant to ${info.resultType}") + sym.setInfo(info) + } + } else { + sym.setInfo(if (sym.isMethod && jflags.isVarargs) arrayToRepeated(info) else info) + } + + for (e <- exceptions) { + // we call initialize due to the fact that we call Symbol.isMonomorphicType in addThrowsAnnotation + // and that method requires Symbol to be forced to give the right answers, see scala/bug#7107 for details + val cls = getClassSymbol(e.value) + sym withAnnotation AnnotationInfo.lazily { + val throwableTpe = cls.tpe_* + AnnotationInfo(appliedType(ThrowsClass, throwableTpe), List(Literal(Constant(throwableTpe))), Nil) + } + } + + // Note: the info may be overwritten later with a generic signature + // parsed from SignatureATTR + if (paramNames != null) { + import scala.tools.asm.Opcodes.ACC_SYNTHETIC + + if (sym.hasRawInfo && sym.isMethod) { + val paramNamesNoOuter = (if (hasOuterParam) 1 else 0) to paramNames.length + val params = sym.rawInfo.params + foreach2(paramNamesNoOuter.toList, params) { + case (i, param) => + val isSynthetic = (paramNames.access(i) & ACC_SYNTHETIC) != 0 + if (!isSynthetic) { + param.name = paramNames.names(i).name.toTermName.encode + param.resetFlag(SYNTHETIC) + } + } + // there's not anything we can do, but it's slightly worrisome + devWarningIf(!sameLength(paramNamesNoOuter.toList, params)) { + sm"""MethodParameters length mismatch while parsing $sym: + | rawInfo.params: ${sym.rawInfo.params}""" + } + } + } + } + private def convertTo(c: Constant, pt: Type): Constant = { + if (pt.typeSymbol == BooleanClass && c.tag == IntTag) + Constant(c.value != 0) + else + c convertTo pt + } + } def skipAttributes() { var attrCount: Int = u2 diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala new file mode 100644 index 000000000000..8c1287ac0df7 --- /dev/null +++ b/src/compiler/scala/tools/nsc/symtab/classfile/DataReader.scala @@ -0,0 +1,68 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.symtab.classfile + +trait DataReader { + + def bp: Int + def bp_=(i: Int): Unit + + /** read a byte + */ + @throws(classOf[IndexOutOfBoundsException]) + def nextByte: Byte + + /** read some bytes + */ + def nextBytes(len: Int): Array[Byte] + + /** read a character + */ + def nextChar: Char + + /** read an integer + */ + def nextInt: Int + + /** extract a character at position bp from buf + */ + def getChar(mybp: Int): Char + + /** extract an integer at position bp from buf + */ + def getByte(mybp: Int): Byte + + def getBytes(mybp: Int, bytes: Array[Byte]): Unit + + /** extract an integer at position bp from buf + */ + def getInt(mybp: Int): Int + + /** extract a long integer at position bp from buf + */ + def getLong(mybp: Int): Long + + /** extract a float at position bp from buf + */ + def getFloat(mybp: Int): Float + + /** extract a double at position bp from buf + */ + def getDouble(mybp: Int): Double + + def getUTF(mybp: Int, len: Int): String + + /** skip next 'n' bytes + */ + def skip(n: Int): Unit +} diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala new file mode 100644 index 000000000000..8bbbc4a3cce6 --- /dev/null +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala @@ -0,0 +1,156 @@ +/* + * Scala (https://www.scala-lang.org) + * + * Copyright EPFL and Lightbend, Inc. + * + * Licensed under Apache License 2.0 + * (http://www.apache.org/licenses/LICENSE-2.0). + * + * See the NOTICE file distributed with this work for + * additional information regarding copyright ownership. + */ + +package scala.tools.nsc.symtab.classfile + +import java.io.{ByteArrayInputStream, DataInputStream, InputStream} +import java.nio.channels.Channels +import java.nio.{BufferUnderflowException, ByteBuffer} + +final class ReusableDataReader() extends DataReader { + private[this] var data = new Array[Byte](32768) + private[this] var bb: ByteBuffer = ByteBuffer.wrap(data) + private[this] var size = 0 + private[this] val reader: DataInputStream = { + val stream = new InputStream { + override def read(): Int = try { + bb.get & 0xff + } catch { + case _: BufferUnderflowException => -1 + } + + override def read(b: Array[Byte], off: Int, len: Int): Int = { + val pos = bb.position() + bb.get(b, off, len) + bb.position() - pos + } + + override def markSupported(): Boolean = false + } + new DataInputStream(stream) + } + + private def nextPositivePowerOfTwo(target: Int): Int = 1 << -Integer.numberOfLeadingZeros(target - 1) + + def reset(file: scala.reflect.io.AbstractFile): this.type = { + this.size = 0 + file.sizeOption match { + case Some(size) => + if (size > data.length) { + data = new Array[Byte](nextPositivePowerOfTwo(size)) + } else { + java.util.Arrays.fill(data, 0.toByte) + } + val input = file.input + try { + var endOfInput = false + while (!endOfInput) { + val remaining = data.length - this.size + if (remaining == 0) endOfInput = true + else { + val read = input.read(data, this.size, remaining) + if (read < 0) endOfInput = true + else this.size += read + } + } + bb = ByteBuffer.wrap(data, 0, size) + } finally { + input.close() + } + case None => + val input = file.input + try { + var endOfInput = false + while (!endOfInput) { + val remaining = data.length - size + if (remaining == 0) { + data = java.util.Arrays.copyOf(data, nextPositivePowerOfTwo(size)) + } + val read = input.read(data, this.size, data.length - this.size) + if (read < 0) endOfInput = true + else this.size += read + } + bb = ByteBuffer.wrap(data, 0, size) + } finally { + input.close() + } + } + this + } + + @throws(classOf[IndexOutOfBoundsException]) + def nextByte: Byte = bb.get + + def nextBytes(len: Int): Array[Byte] = { + val result = new Array[Byte](len) + reader.readFully(result) + result + } + + def nextChar: Char = bb.getChar() + + def nextInt: Int = bb.getInt() + + def getChar(mybp: Int): Char = { + bb.getChar(mybp) + } + + def getInt(mybp: Int): Int = { + bb.getInt(mybp) + } + + def getLong(mybp: Int): Long = { + bb.getLong(mybp) + } + + def getFloat(mybp: Int): Float = { + bb.getFloat(mybp) + } + + def getDouble(mybp: Int): Double = { + bb.getDouble(mybp) + } + + def skip(n: Int): Unit = { + bb.position(bb.position() + n) + } + def bp: Int = bb.position() + def bp_=(i: Int): Unit = { + try { + bb.position(i) + } catch { + case ex: IllegalArgumentException => + throw ex + } + } + + def getByte(mybp: Int): Byte = { + bb.get(mybp) + } + def getBytes(mybp: Int, bytes: Array[Byte]): Unit = { + val saved = bb.position() + bb.position(mybp) + try reader.readFully(bytes) + finally bb.position(saved) + } + def getUTF(mybp: Int, len: Int): String = { + val saved = bb.position() + val savedLimit = bb.limit() + bb.position(mybp) + bb.limit(mybp + len) + try reader.readUTF() + finally { + bb.limit(savedLimit) + bb.position(saved) + } + } +} diff --git a/src/reflect/scala/reflect/internal/Definitions.scala b/src/reflect/scala/reflect/internal/Definitions.scala index 2828db3e01d4..eba017a6ae87 100644 --- a/src/reflect/scala/reflect/internal/Definitions.scala +++ b/src/reflect/scala/reflect/internal/Definitions.scala @@ -192,11 +192,11 @@ trait Definitions extends api.StandardDefinitions { // It becomes tricky to create dedicated objects for other symbols because // of initialization order issues. - lazy val JavaLangPackage = getPackage(TermName("java.lang")) + lazy val JavaLangPackage = getPackage("java.lang") lazy val JavaLangPackageClass = JavaLangPackage.moduleClass.asClass - lazy val ScalaPackage = getPackage(TermName("scala")) + lazy val ScalaPackage = getPackage("scala") lazy val ScalaPackageClass = ScalaPackage.moduleClass.asClass - lazy val RuntimePackage = getPackage(TermName("scala.runtime")) + lazy val RuntimePackage = getPackage("scala.runtime") lazy val RuntimePackageClass = RuntimePackage.moduleClass.asClass def javaTypeToValueClass(jtype: Class[_]): Symbol = jtype match { @@ -292,7 +292,7 @@ trait Definitions extends api.StandardDefinitions { // top types lazy val AnyClass = enterNewClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) markAllCompleted lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectTpe) markAllCompleted - lazy val ObjectClass = getRequiredClass(sn.Object.toString) + lazy val ObjectClass = getRequiredClass("java.lang.Object") // Cached types for core monomorphic classes lazy val AnyRefTpe = AnyRefClass.tpe @@ -343,12 +343,12 @@ trait Definitions extends api.StandardDefinitions { // exceptions and other throwables lazy val ClassCastExceptionClass = requiredClass[ClassCastException] - lazy val IndexOutOfBoundsExceptionClass = getClassByName(sn.IOOBException) - lazy val InvocationTargetExceptionClass = getClassByName(sn.InvTargetException) + lazy val IndexOutOfBoundsExceptionClass = getClassByName("java.lang.IndexOutOfBoundsException") + lazy val InvocationTargetExceptionClass = getClassByName("java.lang.reflect.InvocationTargetException") lazy val MatchErrorClass = requiredClass[MatchError] lazy val NonLocalReturnControlClass = requiredClass[scala.runtime.NonLocalReturnControl[_]] - lazy val NullPointerExceptionClass = getClassByName(sn.NPException) - lazy val ThrowableClass = getClassByName(sn.Throwable) + lazy val NullPointerExceptionClass = getClassByName("java.lang.NullPointerException") + lazy val ThrowableClass = getClassByName("java.lang.Throwable") lazy val UninitializedErrorClass = requiredClass[UninitializedFieldError] lazy val IllegalArgExceptionClass = requiredClass[IllegalArgumentException] @@ -422,7 +422,10 @@ trait Definitions extends api.StandardDefinitions { def isByName(param: Symbol) = isByNameParamType(param.tpe_*) def isCastSymbol(sym: Symbol) = sym == Any_asInstanceOf || sym == Object_asInstanceOf - def isJavaVarArgsMethod(m: Symbol) = m.isMethod && isJavaVarArgs(m.info.params) + def isJavaVarArgsMethod(m: Symbol) = m.isMethod && (m.rawInfo match { + case completer: LazyType => completer.isJavaVarargsMethod + case _ => isJavaVarArgs(m.info.params) + }) def isJavaVarArgs(params: Seq[Symbol]) = !params.isEmpty && isJavaRepeatedParamType(params.last.tpe) def isScalaVarArgs(params: Seq[Symbol]) = !params.isEmpty && isScalaRepeatedParamType(params.last.tpe) def isVarArgsList(params: Seq[Symbol]) = !params.isEmpty && isRepeatedParamType(params.last.tpe) @@ -488,7 +491,7 @@ trait Definitions extends api.StandardDefinitions { // reflection / structural types lazy val SoftReferenceClass = requiredClass[java.lang.ref.SoftReference[_]] - lazy val MethodClass = getClassByName(sn.MethodAsObject) + lazy val MethodClass = getClassByName("java.lang.reflect.Method") lazy val EmptyMethodCacheClass = requiredClass[scala.runtime.EmptyMethodCache] lazy val MethodCacheClass = requiredClass[scala.runtime.MethodCache] def methodCache_find = getMemberMethod(MethodCacheClass, nme.find_) @@ -1219,7 +1222,7 @@ trait Definitions extends api.StandardDefinitions { // Trying to allow for deprecated locations sym.isAliasType && isMetaAnnotation(sym.info.typeSymbol) ) - lazy val metaAnnotations: Set[Symbol] = getPackage(TermName("scala.annotation.meta")).info.members filter (_ isSubClass StaticAnnotationClass) toSet + lazy val metaAnnotations: Set[Symbol] = getPackage("scala.annotation.meta").info.members filter (_ isSubClass StaticAnnotationClass) toSet // According to the scala.annotation.meta package object: // * By default, annotations on (`val`-, `var`- or plain) constructor parameters diff --git a/src/reflect/scala/reflect/internal/Mirrors.scala b/src/reflect/scala/reflect/internal/Mirrors.scala index befaa49175a1..0ca0794600a9 100644 --- a/src/reflect/scala/reflect/internal/Mirrors.scala +++ b/src/reflect/scala/reflect/internal/Mirrors.scala @@ -46,19 +46,23 @@ trait Mirrors extends api.Mirrors { } /** Todo: organize similar to mkStatic in scala.reflect.Base */ - private def getModuleOrClass(path: Name, len: Int): Symbol = { - val point = path lastPos('.', len - 1) + private def getModuleOrClass(path: Name, len: Int): Symbol = + getModuleOrClass(path.toString, len, path.newName(_)) + + private def getModuleOrClass(path: String, len: Int, toName: String => Name): Symbol = { + val point = path lastIndexOf ('.', len - 1) val owner = - if (point > 0) getModuleOrClass(path.toTermName, point) + if (point > 0) getModuleOrClass(path, point, newTermName(_)) else RootClass - val name = path subName (point + 1, len) + + val name = toName(path.substring(point + 1, len)) val sym = owner.info member name - val result = if (path.isTermName) sym.suchThat(_ hasFlag MODULE) else sym + val result = if (name.isTermName) sym.suchThat(_ hasFlag MODULE) else sym if (result != NoSymbol) result else { if (settings.debug) { log(sym.info); log(sym.info.members) }//debug thisMirror.missingHook(owner, name) orElse { - MissingRequirementError.notFound((if (path.isTermName) "object " else "class ")+path+" in "+thisMirror) + MissingRequirementError.notFound((if (name.isTermName) "object " else "class ")+path+" in "+thisMirror) } } } @@ -69,8 +73,8 @@ trait Mirrors extends api.Mirrors { * Unlike `getModuleOrClass`, this function * loads unqualified names from the root package. */ - private def getModuleOrClass(path: Name): Symbol = - getModuleOrClass(path, path.length) + private def getModuleOrClass(path: String, toName: String => Name): Symbol = + getModuleOrClass(path, path.length, toName) /** If you're looking for a class, pass a type name. * If a module, a term name. @@ -78,10 +82,10 @@ trait Mirrors extends api.Mirrors { * Unlike `getModuleOrClass`, this function * loads unqualified names from the empty package. */ - private def staticModuleOrClass(path: Name): Symbol = { - val isPackageless = path.pos('.') == path.length - if (isPackageless) EmptyPackageClass.info decl path - else getModuleOrClass(path) + private def staticModuleOrClass(path: String, toName: String => Name): Symbol = { + val isPackageless = !path.contains('.') + if (isPackageless) EmptyPackageClass.info decl toName(path) + else getModuleOrClass(path, toName) } protected def mirrorMissingHook(owner: Symbol, name: Name): Symbol = NoSymbol @@ -104,28 +108,41 @@ trait Mirrors extends api.Mirrors { } } + @deprecated("Use overload that accepts a String.", "2.13.0") def getClassByName(fullname: Name): ClassSymbol = - ensureClassSymbol(fullname.toString, getModuleOrClass(fullname.toTypeName)) + ensureClassSymbol(fullname.toString, getModuleOrClass(fullname.toString, fullname.length, newTypeName(_))) + + def getClassByName(fullname: String): ClassSymbol = + getRequiredClass(fullname) + + // TODO_NAMES + def getRequiredClass(fullname: String, toName: String => Name): ClassSymbol = + ensureClassSymbol(fullname, getModuleOrClass(fullname, fullname.length, toName)) def getRequiredClass(fullname: String): ClassSymbol = - getClassByName(newTypeNameCached(fullname)) + ensureClassSymbol(fullname, getModuleOrClass(fullname, fullname.length, newTypeName(_))) def requiredClass[T: ClassTag] : ClassSymbol = - getRequiredClass(erasureName[T]) + getRequiredClass(erasureName[T], newTypeName(_)) def getClassIfDefined(fullname: String): Symbol = - getClassIfDefined(newTypeNameCached(fullname)) + getClassIfDefined(fullname, newTypeName(_)) + @deprecated("Use overload that accepts a String.", "2.13.0") def getClassIfDefined(fullname: Name): Symbol = wrapMissing(getClassByName(fullname.toTypeName)) + // TODO_NAMES + def getClassIfDefined(fullname: String, toName: String => Name): Symbol = + wrapMissing(getRequiredClass(fullname, toName)) + /** @inheritdoc * * Unlike getClassByName/getRequiredClass this function can also load packageless symbols. * Compiler might ignore them, but they should be loadable with macros. */ override def staticClass(fullname: String): ClassSymbol = - try ensureClassSymbol(fullname, staticModuleOrClass(newTypeNameCached(fullname))) + try ensureClassSymbol(fullname, staticModuleOrClass(fullname, newTypeName(_))) catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) } /************************ loaders of module symbols ************************/ @@ -136,11 +153,15 @@ trait Mirrors extends api.Mirrors { case _ => MissingRequirementError.notFound("object " + fullname) } + @deprecated("Use overload that accepts a String.", "2.13.0") def getModuleByName(fullname: Name): ModuleSymbol = - ensureModuleSymbol(fullname.toString, getModuleOrClass(fullname.toTermName), allowPackages = true) + getModuleByName(fullname.toString) + + def getModuleByName(fullname: String): ModuleSymbol = + ensureModuleSymbol(fullname, getModuleOrClass(fullname, fullname.length, newTermName(_)), allowPackages = true) def getRequiredModule(fullname: String): ModuleSymbol = - getModuleByName(newTermNameCached(fullname)) + getModuleByName(fullname) // TODO: What syntax do we think should work here? Say you have an object // like scala.Predef. You can't say requiredModule[scala.Predef] since there's @@ -153,10 +174,11 @@ trait Mirrors extends api.Mirrors { getRequiredModule(erasureName[T] stripSuffix "$") def getModuleIfDefined(fullname: String): Symbol = - getModuleIfDefined(newTermNameCached(fullname)) + wrapMissing(getModuleByName(fullname)) + @deprecated("Use overload that accepts a String.", "2.13.0") def getModuleIfDefined(fullname: Name): Symbol = - wrapMissing(getModuleByName(fullname.toTermName)) + getModuleIfDefined(fullname.toString) /** @inheritdoc * @@ -164,7 +186,7 @@ trait Mirrors extends api.Mirrors { * Compiler might ignore them, but they should be loadable with macros. */ override def staticModule(fullname: String): ModuleSymbol = - try ensureModuleSymbol(fullname, staticModuleOrClass(newTermNameCached(fullname)), allowPackages = false) + try ensureModuleSymbol(fullname, staticModuleOrClass(fullname, newTermName(_)), allowPackages = false) catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) } /************************ loaders of package symbols ************************/ @@ -175,8 +197,11 @@ trait Mirrors extends api.Mirrors { case _ => MissingRequirementError.notFound("package " + fullname) } + @deprecated("Use overload that accepts a String.", "2.13.0") def getPackage(fullname: TermName): ModuleSymbol = - ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname), allowModules = true) + getPackage(fullname.toString) + def getPackage(fullname: String): ModuleSymbol = + ensurePackageSymbol(fullname, getModuleOrClass(fullname, newTermName(_)), allowModules = true) def getPackageIfDefined(fullname: TermName): Symbol = wrapMissing(getPackage(fullname)) @@ -198,7 +223,7 @@ trait Mirrors extends api.Mirrors { wrapMissing(getPackageObject(fullname)) override def staticPackage(fullname: String): ModuleSymbol = - try ensurePackageSymbol(fullname.toString, getModuleOrClass(newTermNameCached(fullname)), allowModules = false) + try ensurePackageSymbol(fullname.toString, getModuleOrClass(fullname, fullname.length, newTermName(_)), allowModules = false) catch { case mre: MissingRequirementError => throw new ScalaReflectionException(mre.msg) } /************************ helpers ************************/ diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 7e19e72e9ea7..e74257dde1d2 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -52,6 +52,8 @@ trait Names extends api.Names { /** Hashtable for finding type names quickly. */ private val typeHashtable = new Array[TypeName](HASH_SIZE) + final def allNames(): Iterator[TermName] = termHashtable.iterator.filter(_ ne null).flatMap(n => Iterator.iterate(n)(_.next).takeWhile(_ ne null)) + private def hashValue(cs: Array[Char], offset: Int, len: Int): Int = { var h = 0 var i = 0 diff --git a/src/reflect/scala/reflect/internal/StdNames.scala b/src/reflect/scala/reflect/internal/StdNames.scala index 6428d83cdf18..75935982a85e 100644 --- a/src/reflect/scala/reflect/internal/StdNames.scala +++ b/src/reflect/scala/reflect/internal/StdNames.scala @@ -1170,21 +1170,15 @@ trait StdNames { protected val stringToTypeName = null protected implicit def createNameType(s: String): TypeName = newTypeNameCached(s) - final val BoxedBoolean: TypeName = "java.lang.Boolean" - final val BoxedByte: TypeName = "java.lang.Byte" - final val BoxedCharacter: TypeName = "java.lang.Character" - final val BoxedDouble: TypeName = "java.lang.Double" - final val BoxedFloat: TypeName = "java.lang.Float" - final val BoxedInteger: TypeName = "java.lang.Integer" - final val BoxedLong: TypeName = "java.lang.Long" - final val BoxedNumber: TypeName = "java.lang.Number" - final val BoxedShort: TypeName = "java.lang.Short" - final val IOOBException: TypeName = "java.lang.IndexOutOfBoundsException" - final val InvTargetException: TypeName = "java.lang.reflect.InvocationTargetException" - final val MethodAsObject: TypeName = "java.lang.reflect.Method" - final val NPException: TypeName = "java.lang.NullPointerException" - final val Object: TypeName = "java.lang.Object" - final val Throwable: TypeName = "java.lang.Throwable" + final val BoxedBoolean: String = "java.lang.Boolean" + final val BoxedByte: String = "java.lang.Byte" + final val BoxedCharacter: String = "java.lang.Character" + final val BoxedDouble: String = "java.lang.Double" + final val BoxedFloat: String = "java.lang.Float" + final val BoxedInteger: String = "java.lang.Integer" + final val BoxedLong: String = "java.lang.Long" + final val BoxedNumber: String = "java.lang.Number" + final val BoxedShort: String = "java.lang.Short" final val GetCause: TermName = newTermName("getCause") final val GetClass: TermName = newTermName("getClass") @@ -1197,7 +1191,7 @@ trait StdNames { final val AltMetafactory: TermName = newTermName("altMetafactory") final val Bootstrap: TermName = newTermName("bootstrap") - val Boxed = immutable.Map[TypeName, TypeName]( + val Boxed = immutable.Map[TypeName, String]( tpnme.Boolean -> BoxedBoolean, tpnme.Byte -> BoxedByte, tpnme.Char -> BoxedCharacter, diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 7982e71000c9..8d9d87c7c2ad 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -24,7 +24,7 @@ import scala.collection.mutable.ListBuffer import util.{ Statistics, shortClassOfInstance, StatisticsStatics } import Flags._ import scala.annotation.tailrec -import scala.reflect.io.{ AbstractFile, NoAbstractFile } +import scala.reflect.io.{AbstractFile, NoAbstractFile} import Variance._ trait Symbols extends api.Symbols { self: SymbolTable => @@ -3030,7 +3030,14 @@ trait Symbols extends api.Symbols { self: SymbolTable => loop(info) } - override def exceptions = for (ThrownException(tp) <- annotations) yield tp.typeSymbol + override def exceptions = { + rawInfo match { + case lt: LazyType if isJava => + lt.javaThrownExceptions + case _ => + for (ThrownException(tp) <- annotations) yield tp.typeSymbol + } + } } implicit val MethodSymbolTag = ClassTag[MethodSymbol](classOf[MethodSymbol]) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 92ac84ff876d..6710f0abbe7f 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -1540,53 +1540,9 @@ trait Types throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol) } - object baseClassesCycleMonitor { - private var open: List[Symbol] = Nil - @inline private def cycleLog(msg: => String) { - if (settings.debug) - Console.err.println(msg) - } - def size = open.size - def push(clazz: Symbol) { - cycleLog("+ " + (" " * size) + clazz.fullNameString) - open ::= clazz - } - def pop(clazz: Symbol) { - assert(open.head eq clazz, (clazz, open)) - open = open.tail - } - def isOpen(clazz: Symbol) = open contains clazz - } - protected def defineBaseClassesOfCompoundType(tpe: CompoundType) { - def define() = defineBaseClassesOfCompoundType(tpe, force = false) - if (!breakCycles || isPastTyper) define() - else tpe match { - // non-empty parents helpfully excludes all package classes - case tpe @ ClassInfoType(_ :: _, _, clazz) if !clazz.isAnonOrRefinementClass => - // Cycle: force update - if (baseClassesCycleMonitor isOpen clazz) - defineBaseClassesOfCompoundType(tpe, force = true) - else { - baseClassesCycleMonitor push clazz - try define() - finally baseClassesCycleMonitor pop clazz - } - case _ => - define() - } - } - private def defineBaseClassesOfCompoundType(tpe: CompoundType, force: Boolean) { val period = tpe.baseClassesPeriod - if (period == currentPeriod) { - if (force && breakCycles) { - def what = tpe.typeSymbol + " in " + tpe.typeSymbol.owner.fullNameString - val bcs = computeBaseClasses(tpe) - tpe.baseClassesCache = bcs - warning(s"Breaking cycle in base class computation of $what ($bcs)") - } - } - else { + if (period != currentPeriod) { tpe.baseClassesPeriod = currentPeriod if (!isValidForBaseClasses(period)) { val start = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.pushTimer(typeOpsStack, baseClassesNanos) else null @@ -3555,6 +3511,8 @@ trait Types override def complete(sym: Symbol) override def safeToString = "" override def kind = "LazyType" + def isJavaVarargsMethod: Boolean = false + def javaThrownExceptions: List[Symbol] = Nil } /** A marker trait representing an as-yet unevaluated type diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 714f4f4b5274..996725a65a9a 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -17,6 +17,7 @@ package io import java.io.{ IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream } import java.io.{ File => JFile } import java.net.URL +import java.nio.ByteBuffer /** * An abstraction over files for use in the reflection/compiler libraries. diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index 75ba6e852023..cb1f73b41644 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -14,6 +14,10 @@ package scala package reflect package io +import java.nio.ByteBuffer +import java.nio.file.StandardOpenOption +import java.util + /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { override def isDirectory = true diff --git a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala index 0b4d7131fbeb..264a3cd9afda 100644 --- a/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala +++ b/src/reflect/scala/reflect/runtime/JavaUniverseForce.scala @@ -162,7 +162,6 @@ trait JavaUniverseForce { self: runtime.JavaUniverse => this.SuperType this.TypeBounds this.CompoundType - this.baseClassesCycleMonitor this.RefinedType this.ClassInfoType this.ConstantType diff --git a/test/files/jvm/throws-annot-from-java.check b/test/files/jvm/throws-annot-from-java.check index bf639260e77a..4a4bd6ad2110 100644 --- a/test/files/jvm/throws-annot-from-java.check +++ b/test/files/jvm/throws-annot-from-java.check @@ -8,10 +8,10 @@ scala> :paste // Entering paste mode (ctrl-D to finish) { - val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2")); + val clazz = rootMirror.getClassByName("test.ThrowsDeclaration_2"); { val method = clazz.info.member(newTermName("foo")) - val throwsAnn = method.annotations.head + val throwsAnn = method.initialize.annotations.head val atp = throwsAnn.atp println("foo") println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty) @@ -21,7 +21,7 @@ scala> :paste { val method = clazz.info.member(newTermName("bar")) - val throwsAnn = method.annotations.head + val throwsAnn = method.initialize.annotations.head val Literal(const) = throwsAnn.args.head val tp = const.typeValue println("bar") @@ -37,7 +37,7 @@ atp.typeParams.isEmpty: true throws[IllegalStateException](classOf[java.lang.IllegalStateException]) bar -tp.typeParams.isEmpty: true -throws[test.PolymorphicException[_]](classOf[test.PolymorphicException]) +tp.typeParams.isEmpty: false +throws[test.PolymorphicException](classOf[test.PolymorphicException]) scala> :quit diff --git a/test/files/jvm/throws-annot-from-java/Test_3.scala b/test/files/jvm/throws-annot-from-java/Test_3.scala index de1d9845732a..df62e032262e 100644 --- a/test/files/jvm/throws-annot-from-java/Test_3.scala +++ b/test/files/jvm/throws-annot-from-java/Test_3.scala @@ -4,10 +4,10 @@ object Test extends ReplTest { def code = """:power :paste { - val clazz = rootMirror.getClassByName(newTermName("test.ThrowsDeclaration_2")); + val clazz = rootMirror.getClassByName("test.ThrowsDeclaration_2"); { val method = clazz.info.member(newTermName("foo")) - val throwsAnn = method.annotations.head + val throwsAnn = method.initialize.annotations.head val atp = throwsAnn.atp println("foo") println("atp.typeParams.isEmpty: " + atp.typeParams.isEmpty) @@ -17,7 +17,7 @@ object Test extends ReplTest { { val method = clazz.info.member(newTermName("bar")) - val throwsAnn = method.annotations.head + val throwsAnn = method.initialize.annotations.head val Literal(const) = throwsAnn.args.head val tp = const.typeValue println("bar") diff --git a/test/files/neg/moduleClassReference.check b/test/files/neg/moduleClassReference.check new file mode 100644 index 000000000000..1f16aeb2509f --- /dev/null +++ b/test/files/neg/moduleClassReference.check @@ -0,0 +1,4 @@ +moduleClassReference.scala:2: error: not found: value Predef$ + def foo = Predef$.MODULE$ == Predef + ^ +one error found diff --git a/test/files/neg/moduleClassReference.scala b/test/files/neg/moduleClassReference.scala new file mode 100644 index 000000000000..dbf688840e2f --- /dev/null +++ b/test/files/neg/moduleClassReference.scala @@ -0,0 +1,3 @@ +object Test { + def foo = Predef$.MODULE$ == Predef +} diff --git a/test/files/neg/t7251.check b/test/files/neg/t7251.check index 33fdafc2ee1e..a17e710d367e 100644 --- a/test/files/neg/t7251.check +++ b/test/files/neg/t7251.check @@ -1,4 +1,4 @@ -B_2.scala:5: error: class s.Outer$Triple$ is not a value +B_2.scala:5: error: object Outer$Triple$ is not a member of package s println( s.Outer$Triple$ ) ^ one error found diff --git a/test/files/run/compiler-asSeenFrom.scala b/test/files/run/compiler-asSeenFrom.scala index a60c2e892524..940907665820 100644 --- a/test/files/run/compiler-asSeenFrom.scala +++ b/test/files/run/compiler-asSeenFrom.scala @@ -42,7 +42,7 @@ abstract class CompilerTest extends DirectTest { } class SymsInPackage(pkgName: String) { - def pkg = rootMirror.getPackage(TermName(pkgName)) + def pkg = rootMirror.getPackage(pkgName) def classes = allMembers(pkg) filter (_.isClass) def modules = allMembers(pkg) filter (_.isModule) def symbols = classes ++ terms filterNot (_ eq NoSymbol) diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala index e35b7231c2d3..2984d81e6009 100644 --- a/test/files/run/existentials-in-compiler.scala +++ b/test/files/run/existentials-in-compiler.scala @@ -74,8 +74,8 @@ package extest { } """ - override def check(source: String, unit: global.CompilationUnit) { - getPackage(TermName("extest")).moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz => + override def check(source: String, unit: global.CompilationUnit): Unit = { + getPackage("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz => exitingTyper { clazz.info println(clazz.defString) diff --git a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala index 330db8da753b..d49cfff1aa11 100644 --- a/test/files/run/t7008-scala-defined/Impls_Macros_2.scala +++ b/test/files/run/t7008-scala-defined/Impls_Macros_2.scala @@ -5,6 +5,8 @@ object Macros { def impl(c: Context) = { import c.universe._ val decls = c.typeOf[ScalaClassWithCheckedExceptions_1[_]].decls.toList + decls.foreach(_.info) + decls.foreach(_.annotations.foreach(_.tpe)) val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL) reify(println(c.Expr[String](Literal(Constant(s))).splice)) } diff --git a/test/files/run/t7008/Impls_Macros_2.scala b/test/files/run/t7008/Impls_Macros_2.scala index 3c6fe116ce2e..e55cbbfdbf8d 100644 --- a/test/files/run/t7008/Impls_Macros_2.scala +++ b/test/files/run/t7008/Impls_Macros_2.scala @@ -5,6 +5,8 @@ object Macros { def impl(c: Context) = { import c.universe._ val decls = c.typeOf[JavaClassWithCheckedExceptions_1[_]].decls.toList + decls.foreach(_.info) + decls.foreach(_.annotations.foreach(_.tpe)) val s = decls.sortBy(_.name.toString).map(decl => (s"${decl.name}: ${decl.annotations}")).mkString(scala.compat.Platform.EOL) reify(println(c.Expr[String](Literal(Constant(s))).splice)) } diff --git a/test/files/run/t7096.scala b/test/files/run/t7096.scala index f723d70abef8..44485e5da1e8 100644 --- a/test/files/run/t7096.scala +++ b/test/files/run/t7096.scala @@ -41,7 +41,7 @@ abstract class CompilerTest extends DirectTest { } class SymsInPackage(pkgName: String) { - def pkg = rootMirror.getPackage(TermName(pkgName)) + def pkg = rootMirror.getPackage(pkgName) def classes = allMembers(pkg) filter (_.isClass) def modules = allMembers(pkg) filter (_.isModule) def symbols = classes ++ terms filterNot (_ eq NoSymbol) diff --git a/test/files/run/t7455/Test.scala b/test/files/run/t7455/Test.scala index 2cda9225f4fa..afe3f09fb57e 100644 --- a/test/files/run/t7455/Test.scala +++ b/test/files/run/t7455/Test.scala @@ -23,8 +23,8 @@ object Test extends DirectTest { clazz = compiler.rootMirror.staticClass(name) constr <- clazz.info.member(termNames.CONSTRUCTOR).alternatives } { - println(constr.defString) fullyInitializeSymbol(constr) + println(constr.defString) } } } From 3ee75e09950b8482651ae84f1bedaa548cd20362 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 14 Jun 2019 09:25:14 +0400 Subject: [PATCH 1769/2793] Add a compiler option to write .sig files to disk Refactor `ClassfileWriters` to share code with this new output type. Take advantage of this in PipelineMain. ``` $ tail sandbox/{test,client}.scala ==> sandbox/test.scala <== package p1.p2 case class Test() ==> sandbox/client.scala <== package p3 class Client { new p1.p2.Test() } $ qscalac -Youtline -Ypickle-java -Ypickle-write /tmp/out -Ystop-after:pickler -d /tmp/out sandbox/test.scala $ find /tmp/out /tmp/out /tmp/out/p1 /tmp/out/p1/p2 /tmp/out/p1/p2/Test.sig $ qscalac -cp /tmp/out -d /tmp/out sandbox/client.scala $ find /tmp/out /tmp/out /tmp/out/p3 /tmp/out/p3/Client.class /tmp/out/p1 /tmp/out/p1/p2 /tmp/out/p1/p2/Test.sig ``` --- .../scala/tools/nsc/PipelineMain.scala | 67 +--- .../nsc/backend/jvm/ClassfileWriters.scala | 335 +++++++++--------- .../backend/jvm/GeneratedClassHandler.scala | 14 +- .../tools/nsc/backend/jvm/PostProcessor.scala | 4 +- .../tools/nsc/settings/ScalaSettings.scala | 1 + .../tools/nsc/symtab/classfile/Pickler.scala | 31 ++ test/files/run/t5717.scala | 5 +- 7 files changed, 214 insertions(+), 243 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 29b9c560bcec..44f46cbc9366 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -17,6 +17,7 @@ import java.lang.Thread.UncaughtExceptionHandler import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} import java.time.Instant +import java.util.concurrent.ConcurrentHashMap import java.util.{Collections, Locale} import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} @@ -44,10 +45,13 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val root = file.getRoot // An empty component on Unix, just the drive letter on Windows val validRootPathComponent = root.toString.replaceAllLiterally("/", "").replaceAllLiterally(":", "") - changeExtension(pickleCache.resolve(validRootPathComponent).resolve(root.relativize(file)).normalize(), newExtension) + val result = changeExtension(pickleCache.resolve(validRootPathComponent).resolve(root.relativize(file)).normalize(), newExtension) + if (useJars) Files.createDirectories(result.getParent) + strippedAndExportedClassPath.put(file.toRealPath().normalize(), result) + result } - private val strippedAndExportedClassPath = mutable.HashMap[Path, Path]() + private val strippedAndExportedClassPath = new ConcurrentHashMap[Path, Path]().asScala /** Forward errors to the (current) reporter. */ protected def scalacError(msg: String): Unit = { @@ -73,51 +77,6 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe p.getParent.resolve(changedFileName) } - def registerPickleClassPath[G <: Global](output: Path, data: mutable.AnyRefMap[G#Symbol, PickleBuffer]): Unit = { - val jarPath = cachePath(output) - val root = RootPath(jarPath, writable = true) - Files.createDirectories(root.root) - - val dirs = mutable.Map[G#Symbol, Path]() - def packageDir(packSymbol: G#Symbol): Path = { - if (packSymbol.isEmptyPackageClass) root.root - else if (dirs.contains(packSymbol)) dirs(packSymbol) - else if (packSymbol.owner.isRoot) { - val subDir = root.root.resolve(packSymbol.encodedName) - Files.createDirectories(subDir) - dirs.put(packSymbol, subDir) - subDir - } else { - val base = packageDir(packSymbol.owner) - val subDir = base.resolve(packSymbol.encodedName) - Files.createDirectories(subDir) - dirs.put(packSymbol, subDir) - subDir - } - } - val written = new java.util.IdentityHashMap[AnyRef, Unit]() - try { - for ((symbol, pickle) <- data) { - if (!written.containsKey(pickle)) { - val base = packageDir(symbol.owner) - val primary = base.resolve(symbol.encodedName + ".sig") - val writer = new BufferedOutputStream(Files.newOutputStream(primary)) - try { - writer.write(pickle.bytes, 0, pickle.writeIndex) - } finally { - writer.close() - } - written.put(pickle, ()) - } - } - } finally { - root.close() - } - Files.setLastModifiedTime(jarPath, FileTime.from(Instant.now())) - strippedAndExportedClassPath.put(output.toRealPath().normalize(), jarPath) - } - - def writeDotFile(logDir: Path, dependsOn: mutable.LinkedHashMap[Task, List[Dependency]]): Unit = { val builder = new java.lang.StringBuilder() builder.append("digraph projects {\n") @@ -375,7 +334,6 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe if (p.outlineTimer.durationMicros > 0d) { val desc = if (strategy == OutlineTypePipeline) "outline-type" else "parser-to-pickler" events += durationEvent(p.label, desc, p.outlineTimer) - events += durationEvent(p.label, "pickle-export", p.pickleExportTimer) } for ((g, ix) <- p.groups.zipWithIndex) { if (g.timer.durationMicros > 0d) @@ -453,7 +411,6 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe val isGrouped = groups.size > 1 val outlineTimer = new Timer() - val pickleExportTimer = new Timer val javaTimer = new Timer() var outlineCriticalPathMs = 0d @@ -491,14 +448,11 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe command.settings.Youtline.value = true command.settings.stopAfter.value = List("pickler") command.settings.Ymacroexpand.value = command.settings.MacroExpand.None + command.settings.YpickleWrite.value = cachePath(command.settings.outputDirs.getSingleOutput.get.file.toPath).toAbsolutePath.toString val run1 = new compiler.Run() run1 compile files outlineTimer.stop() log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") - pickleExportTimer.start() - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, run1.symData) - pickleExportTimer.stop() - log(f"scalac: exported pickles ${pickleExportTimer.durationMs}%.0f ms") reporter.finish() if (reporter.hasErrors) { log("scalac outline: failed") @@ -518,6 +472,7 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe command.settings.Youtline.value = false command.settings.stopAfter.value = Nil command.settings.Ymacroexpand.value = command.settings.MacroExpand.Normal + command.settings.YpickleWrite.value = "" val groupCount = groups.size for ((group, ix) <- groups.zipWithIndex) { @@ -552,18 +507,14 @@ class PipelineMainClass(argFiles: Seq[Path], pipelineSettings: PipelineMain.Pipe assert(groups.size == 1) val group = groups.head log("scalac: start") + command.settings.YpickleWrite.value = cachePath(command.settings.outputDirs.getSingleOutput.get.file.toPath).toString outlineTimer.start() try { val run2 = new compiler.Run() { - override def advancePhase(): Unit = { if (compiler.phase == this.picklerPhase) { outlineTimer.stop() log(f"scalac outline: done ${outlineTimer.durationMs}%.0f ms") - pickleExportTimer.start() - registerPickleClassPath(command.settings.outputDirs.getSingleOutput.get.file.toPath, symData) - pickleExportTimer.stop() - log(f"scalac: exported pickles ${pickleExportTimer.durationMs}%.0f ms") outlineDone.complete(Success(())) group.timer.start() } diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 8109add34c40..629316fed6b0 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -23,6 +23,7 @@ import java.util.concurrent.ConcurrentHashMap import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} import scala.reflect.internal.util.{NoPosition, Statistics} +import scala.reflect.io.{PlainNioFile, VirtualFile} import scala.tools.nsc.Global import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.io.AbstractFile @@ -44,12 +45,15 @@ abstract class ClassfileWriters { /** * Write a classfile */ - def write(name: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths) + def writeClass(name: InternalName, bytes: Array[Byte], sourceFile: AbstractFile) /** * Close the writer. Behavior is undefined after a call to `close`. */ def close(): Unit + + protected def classRelativePath(className: InternalName, suffix: String = ".class"): Path = + Paths.get(className.replace('.', '/') + suffix) } object ClassfileWriter { @@ -68,125 +72,173 @@ abstract class ClassfileWriters { } } - def singleWriter(file: AbstractFile): UnderlyingClassfileWriter = { - if (file hasExtension "jar") { - new JarClassWriter(file, jarManifestMainClass, settings.YjarCompressionLevel.value) - } else if (file.isVirtual) { - new VirtualClassWriter() - } else if (file.isDirectory) { - new DirClassWriter() - } else { - throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") - } - } - val basicClassWriter = settings.outputDirs.getSingleOutput match { - case Some(dest) => singleWriter(dest) + case Some(dest) => new SingleClassWriter(FileWriter(global, dest, jarManifestMainClass)) case None => val distinctOutputs: Set[AbstractFile] = settings.outputDirs.outputs.map(_._2)(scala.collection.breakOut) - if (distinctOutputs.size == 1) singleWriter(distinctOutputs.head) - else new MultiClassWriter(distinctOutputs.map { output: AbstractFile => output -> singleWriter(output) }(scala.collection.breakOut)) + if (distinctOutputs.size == 1) new SingleClassWriter(FileWriter(global, distinctOutputs.head, jarManifestMainClass)) + else { + val sourceToOutput: Map[AbstractFile, AbstractFile] = global.currentRun.units.map(unit => (unit.source.file, frontendAccess.compilerSettings.outputDirectory(unit.source.file))).toMap + new MultiClassWriter(sourceToOutput, distinctOutputs.map { output: AbstractFile => output -> FileWriter(global, output, jarManifestMainClass) }(scala.collection.breakOut)) + } } val withAdditionalFormats = if (settings.Ygenasmp.valueSetByUser.isEmpty && settings.Ydumpclasses.valueSetByUser.isEmpty) basicClassWriter else { - val asmp = settings.Ygenasmp.valueSetByUser map { dir: String => new AsmClassWriter(getDirectory(dir)) } - val dump = settings.Ydumpclasses.valueSetByUser map { dir: String => new DumpClassWriter(getDirectory(dir)) } - new AllClassWriter(basicClassWriter, asmp, dump) + val asmp = settings.Ygenasmp.valueSetByUser map { dir: String => FileWriter(global, new PlainNioFile(getDirectory(dir)), None) } + val dump = settings.Ydumpclasses.valueSetByUser map { dir: String => FileWriter(global, new PlainNioFile(getDirectory(dir)), None) } + new DebugClassWriter(basicClassWriter, asmp, dump) } val enableStats = statistics.enabled && settings.YaddBackendThreads.value == 1 if (enableStats) new WithStatsWriter(withAdditionalFormats) else withAdditionalFormats } - /** - * A marker trait for Classfilewriters that actually write, rather than layer functionality - */ - sealed trait UnderlyingClassfileWriter extends ClassfileWriter - - private final class JarClassWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends UnderlyingClassfileWriter { - //keep these imports local - avoid confusion with scala naming - import java.util.jar.Attributes.Name - import java.util.jar.{JarOutputStream, Manifest} - - val storeOnly = compressionLevel == Deflater.NO_COMPRESSION - - val jarWriter: JarOutputStream = { - val manifest = new Manifest() - mainClass foreach { c => manifest.getMainAttributes.put(Name.MAIN_CLASS, c) } - val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) - jar.setLevel(compressionLevel) - if (storeOnly) jar.setMethod(ZipOutputStream.STORED) - jar + /** Writes to the output directory corresponding to the source file, if multiple output directories are specified */ + private final class MultiClassWriter(sourceToOutput: Map[AbstractFile, AbstractFile], underlying: Map[AbstractFile, FileWriter]) extends ClassfileWriter { + private def getUnderlying(sourceFile: AbstractFile, outputDir: AbstractFile) = underlying.getOrElse(outputDir, { + throw new Exception(s"Cannot determine output directory for ${sourceFile} with output ${outputDir}. Configured outputs are ${underlying.keySet}") + }) + + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { + getUnderlying(sourceFile, sourceToOutput(sourceFile)).writeFile(classRelativePath(className), bytes) } + override def close(): Unit = underlying.values.foreach(_.close()) + } + private final class SingleClassWriter(underlying: FileWriter) extends ClassfileWriter { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { + underlying.writeFile(classRelativePath(className), bytes) + } + override def close(): Unit = underlying.close() + } - lazy val crc = new CRC32 - - override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = this.synchronized { - val path = className + ".class" - val entry = new ZipEntry(path) - if (storeOnly) { - // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ - // uncompressed sizes to be written before the data. The JarOutputStream could compute the - // values while writing the data, but not patch them into the stream after the fact. So we - // need to pre-compute them here. The compressed size is taken from size. - // https://stackoverflow.com/questions/1206970/how-to-create-uncompressed-zip-archive-in-java/5868403 - // With compression method `DEFLATED` JarOutputStream computes and sets the values. - entry.setSize(bytes.length) - crc.reset() - crc.update(bytes) - entry.setCrc(crc.getValue) + private final class DebugClassWriter(basic: ClassfileWriter, asmp: Option[FileWriter], dump: Option[FileWriter]) extends ClassfileWriter { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { + basic.writeClass(className, bytes, sourceFile) + asmp.foreach { writer => + val asmBytes = AsmUtils.textify(AsmUtils.readClass(bytes)).getBytes(StandardCharsets.UTF_8) + writer.writeFile(classRelativePath(className, ".asm"), asmBytes) } - jarWriter.putNextEntry(entry) - try jarWriter.write(bytes, 0, bytes.length) - finally jarWriter.flush() + dump.foreach { writer => + writer.writeFile(classRelativePath(className), bytes) + } + } + + override def close(): Unit = { + basic.close() + asmp.foreach(_.close()) + dump.foreach(_.close()) + } + } + + private final class WithStatsWriter(underlying: ClassfileWriter) extends ClassfileWriter { + override def writeClass(className: InternalName, bytes: Array[Byte], sourceFile: AbstractFile): Unit = { + val statistics = frontendAccess.unsafeStatistics + val snap = statistics.startTimer(statistics.bcodeWriteTimer) + try underlying.writeClass(className, bytes, sourceFile) + finally statistics.stopTimer(statistics.bcodeWriteTimer, snap) } - override def close(): Unit = this.synchronized(jarWriter.close()) + override def close(): Unit = underlying.close() } + } - private sealed class DirClassWriter extends UnderlyingClassfileWriter { - val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() - val noAttributes = Array.empty[FileAttribute[_]] - private val isWindows = scala.util.Properties.isWin + sealed trait FileWriter { + def writeFile(relativePath: Path, bytes: Array[Byte]): Unit + def close(): Unit + } - def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { - import java.lang.Boolean.TRUE - val parent = filePath.getParent - if (!builtPaths.containsKey(parent)) { - try Files.createDirectories(parent, noAttributes: _*) - catch { - case e: FileAlreadyExistsException => - // `createDirectories` reports this exception if `parent` is an existing symlink to a directory - // but that's fine for us (and common enough, `scalac -d /tmp` on mac targets symlink). - if (!Files.isDirectory(parent)) - throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) - } - builtPaths.put(baseDir, TRUE) - var current = parent - while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { - current = current.getParent - } - } + object FileWriter { + def apply(global: Global, file: AbstractFile, jarManifestMainClass: Option[String]): FileWriter = { + if (file hasExtension "jar") { + val jarCompressionLevel = global.settings.YjarCompressionLevel.value + new JarEntryWriter(file, jarManifestMainClass, jarCompressionLevel) + } else if (file.isVirtual) { + new VirtualFileWriter(file) + } else if (file.isDirectory) { + new DirEntryWriter(file.file.toPath) + } else { + throw new IllegalStateException(s"don't know how to handle an output of $file [${file.getClass}]") } + } + } - protected def getPath(className: InternalName, paths: CompilationUnitPaths) = paths.outputPath.resolve(className + ".class") + private final class JarEntryWriter(file: AbstractFile, mainClass: Option[String], compressionLevel: Int) extends FileWriter { + //keep these imports local - avoid confusion with scala naming + import java.util.jar.Attributes.Name + import java.util.jar.{JarOutputStream, Manifest} - protected def formatData(rawBytes: Array[Byte]) = rawBytes + val storeOnly = compressionLevel == Deflater.NO_COMPRESSION - protected def qualifier: String = "" + val jarWriter: JarOutputStream = { + val manifest = new Manifest() + mainClass foreach { c => manifest.getMainAttributes.put(Name.MAIN_CLASS, c) } + val jar = new JarOutputStream(new BufferedOutputStream(new FileOutputStream(file.file), 64000), manifest) + jar.setLevel(compressionLevel) + if (storeOnly) jar.setMethod(ZipOutputStream.STORED) + jar + } - // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive - // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call - // even if the file is new. - // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails + lazy val crc = new CRC32 + + override def writeFile(relativePath: Path, bytes: Array[Byte]): Unit = this.synchronized { + val entry = new ZipEntry(relativePath.toString) + if (storeOnly) { + // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ + // uncompressed sizes to be written before the data. The JarOutputStream could compute the + // values while writing the data, but not patch them into the stream after the fact. So we + // need to pre-compute them here. The compressed size is taken from size. + // https://stackoverflow.com/questions/1206970/how-to-create-uncompressed-zip-archive-in-java/5868403 + // With compression method `DEFLATED` JarOutputStream computes and sets the values. + entry.setSize(bytes.length) + crc.reset() + crc.update(bytes) + entry.setCrc(crc.getValue) + } + jarWriter.putNextEntry(entry) + try jarWriter.write(bytes, 0, bytes.length) + finally jarWriter.flush() + } - private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) - private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + override def close(): Unit = this.synchronized(jarWriter.close()) + } - override def write(className: InternalName, rawBytes: Array[Byte], paths: CompilationUnitPaths): Unit = try { - val path = getPath(className, paths) - val bytes = formatData(rawBytes) - ensureDirForPath(paths.outputPath, path) + private final class DirEntryWriter(base: Path) extends FileWriter { + val builtPaths = new ConcurrentHashMap[Path, java.lang.Boolean]() + val noAttributes = Array.empty[FileAttribute[_]] + private val isWindows = scala.util.Properties.isWin + + def ensureDirForPath(baseDir: Path, filePath: Path): Unit = { + import java.lang.Boolean.TRUE + val parent = filePath.getParent + if (!builtPaths.containsKey(parent)) { + try Files.createDirectories(parent, noAttributes: _*) + catch { + case e: FileAlreadyExistsException => + // `createDirectories` reports this exception if `parent` is an existing symlink to a directory + // but that's fine for us (and common enough, `scalac -d /tmp` on mac targets symlink). + if (!Files.isDirectory(parent)) + throw new FileConflictException(s"Can't create directory $parent; there is an existing (non-directory) file in its path", e) + } + builtPaths.put(baseDir, TRUE) + var current = parent + while ((current ne null) && (null ne builtPaths.put(current, TRUE))) { + current = current.getParent + } + } + } + + // the common case is that we are are creating a new file, and on MS Windows the create and truncate is expensive + // because there is not an options in the windows API that corresponds to this so the truncate is applied as a separate call + // even if the file is new. + // as this is rare, its best to always try to create a new file, and it that fails, then open with truncate if that fails + + private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) + private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) + + override def writeFile(relativePath: Path, bytes: Array[Byte]): Unit = { + val path = base.resolve(relativePath) + try { + ensureDirForPath(base, path) val os = if (isWindows) { try FileChannel.open(path, fastOpenOptions) catch { @@ -208,95 +260,38 @@ abstract class ClassfileWriters { os.close() } catch { case e: FileConflictException => - frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getMessage}") + frontendAccess.backendReporting.error(NoPosition, s"error writing $path: ${e.getMessage}") case e: java.nio.file.FileSystemException => if (frontendAccess.compilerSettings.debug) e.printStackTrace() - frontendAccess.backendReporting.error(NoPosition, s"error writing $className$qualifier: ${e.getClass.getName} ${e.getMessage}") - + frontendAccess.backendReporting.error(NoPosition, s"error writing $path: ${e.getClass.getName} ${e.getMessage}") } - - override def close(): Unit = () - } - - private final class AsmClassWriter(asmOutputPath: Path) extends DirClassWriter { - override protected def getPath(className: InternalName, paths: CompilationUnitPaths) = asmOutputPath.resolve(className + ".asmp") - - override protected def formatData(rawBytes: Array[Byte]) = AsmUtils.textify(AsmUtils.readClass(rawBytes)).getBytes(StandardCharsets.UTF_8) - - override protected def qualifier: String = " [for asmp]" } - private final class DumpClassWriter(dumpOutputPath: Path) extends DirClassWriter { - override protected def getPath(className: InternalName, paths: CompilationUnitPaths) = dumpOutputPath.resolve(className + ".class") - - override protected def qualifier: String = " [for dump]" - } - - private final class VirtualClassWriter extends UnderlyingClassfileWriter { - private def getFile(base: AbstractFile, clsName: String, suffix: String): AbstractFile = { - def ensureDirectory(dir: AbstractFile): AbstractFile = - if (dir.isDirectory) dir - else throw new FileConflictException(s"${base.path}/$clsName$suffix: ${dir.path} is not a directory") - - var dir = base - val pathParts = clsName.split("[./]").toList - for (part <- pathParts.init) dir = ensureDirectory(dir) subdirectoryNamed part - ensureDirectory(dir) fileNamed pathParts.last + suffix - } - - private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { - val out = new DataOutputStream(outFile.bufferedOutput) - try out.write(bytes, 0, bytes.length) - finally out.close() - } - - override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { - val outFile = getFile(paths.outputDir, className, ".class") - writeBytes(outFile, bytes) - } - - override def close(): Unit = () - } - - private final class MultiClassWriter(underlying: Map[AbstractFile, UnderlyingClassfileWriter]) extends ClassfileWriter { - private def getUnderlying(paths: CompilationUnitPaths) = underlying.getOrElse(paths.outputDir, { - throw new Exception(s"Cannot determine output directory for ${paths.sourceFile} with output ${paths.outputDir}. Configured outputs are ${underlying.keySet}") - }) - - override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { - getUnderlying(paths).write(className, bytes, paths) - } + override def close(): Unit = () + } - override def close(): Unit = underlying.values.foreach(_.close()) + private final class VirtualFileWriter(base: AbstractFile) extends FileWriter { + private def getFile(base: AbstractFile, path: Path): AbstractFile = { + def ensureDirectory(dir: AbstractFile): AbstractFile = + if (dir.isDirectory) dir + else throw new FileConflictException(s"${base.path}/${path}: ${dir.path} is not a directory") + var dir = base + for (i <- 0 until path.getNameCount - 1) dir = ensureDirectory(dir) subdirectoryNamed path.getName(i).toString + ensureDirectory(dir) fileNamed path.getFileName.toString } - private final class AllClassWriter(basic: ClassfileWriter, asmp: Option[UnderlyingClassfileWriter], dump: Option[UnderlyingClassfileWriter]) extends ClassfileWriter { - override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { - basic.write(className, bytes, paths) - asmp.foreach(_.write(className, bytes, paths)) - dump.foreach(_.write(className, bytes, paths)) - } - - override def close(): Unit = { - basic.close() - asmp.foreach(_.close()) - dump.foreach(_.close()) - } + private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { + val out = new DataOutputStream(outFile.bufferedOutput) + try out.write(bytes, 0, bytes.length) + finally out.close() } - private final class WithStatsWriter(underlying: ClassfileWriter) - extends ClassfileWriter { - override def write(className: InternalName, bytes: Array[Byte], paths: CompilationUnitPaths): Unit = { - val statistics = frontendAccess.unsafeStatistics - val snap = statistics.startTimer(statistics.bcodeWriteTimer) - underlying.write(className, bytes, paths) - statistics.stopTimer(statistics.bcodeWriteTimer, snap) - } - - override def close(): Unit = underlying.close() + override def writeFile(relativePath: Path, bytes: Array[Byte]): Unit = { + val outFile = getFile(base, relativePath) + writeBytes(outFile, bytes) } - + override def close(): Unit = () } /** Can't output a file due to the state of the file system. */ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index ae7d772bd629..ce02b31a1a58 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -108,8 +108,7 @@ private[jvm] object GeneratedClassHandler { private val processingUnits = ListBuffer.empty[CompilationUnitInPostProcess] def process(unit: GeneratedCompilationUnit): Unit = { - val unitInPostProcess = new CompilationUnitInPostProcess(unit.classes, - CompilationUnitPaths(unit.sourceFile, frontendAccess.compilerSettings.outputDirectory(unit.sourceFile))) + val unitInPostProcess = new CompilationUnitInPostProcess(unit.classes, unit.sourceFile) postProcessUnit(unitInPostProcess) processingUnits += unitInPostProcess } @@ -122,7 +121,7 @@ private[jvm] object GeneratedClassHandler { // we 'take' classes to reduce the memory pressure // as soon as the class is consumed and written, we release its data unitInPostProcess.takeClasses() foreach { - postProcessor.sendToDisk(_, unitInPostProcess.paths) + postProcessor.sendToDisk(_, unitInPostProcess.sourceFile) } } } @@ -169,7 +168,7 @@ private[jvm] object GeneratedClassHandler { case _: ClosedByInterruptException => throw new InterruptedException() case NonFatal(t) => t.printStackTrace() - frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.paths.sourceFile} $t") + frontendAccess.backendReporting.error(NoPosition, s"unable to write ${unitInPostProcess.sourceFile} $t") } } } @@ -198,18 +197,13 @@ private[jvm] object GeneratedClassHandler { } -/** Paths for a compilation unit, used during classfile writing */ -final case class CompilationUnitPaths(sourceFile: AbstractFile, outputDir: AbstractFile) { - def outputPath: Path = outputDir.file.toPath // `toPath` caches its result -} - /** * State for a compilation unit being post-processed. * - Holds the classes to post-process (released for GC when no longer used) * - Keeps a reference to the future that runs the post-processor * - Buffers messages reported during post-processing */ -final class CompilationUnitInPostProcess(private var classes: List[GeneratedClass], val paths: CompilationUnitPaths) { +final class CompilationUnitInPostProcess(private var classes: List[GeneratedClass], val sourceFile: AbstractFile) { def takeClasses(): List[GeneratedClass] = { val c = classes classes = Nil diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index c42a02c58439..52b39e40d204 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -58,7 +58,7 @@ abstract class PostProcessor extends PerRunInit { classfileWriter = classfileWriters.ClassfileWriter(global) } - def sendToDisk(clazz: GeneratedClass, paths: CompilationUnitPaths): Unit = { + def sendToDisk(clazz: GeneratedClass, sourceFile: AbstractFile): Unit = { val classNode = clazz.classNode val internalName = classNode.name val bytes = try { @@ -85,7 +85,7 @@ abstract class PostProcessor extends PerRunInit { if (AsmUtils.traceSerializedClassEnabled && internalName.contains(AsmUtils.traceSerializedClassPattern)) AsmUtils.traceClass(bytes) - classfileWriter.write(internalName, bytes, paths) + classfileWriter.writeClass(internalName, bytes, sourceFile) } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 8b736448822d..1ef4b8c3120c 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -254,6 +254,7 @@ trait ScalaSettings extends AbsScalaSettings val YjarCompressionLevel = IntSetting("-Yjar-compression-level", "compression level to use when writing jar files", Deflater.DEFAULT_COMPRESSION, Some((Deflater.DEFAULT_COMPRESSION,Deflater.BEST_COMPRESSION)), (x: String) => None) val YpickleJava = BooleanSetting("-Ypickle-java", "Pickler phase should compute pickles for .java defined symbols for use by build tools").internalOnly() + val YpickleWrite = StringSetting("-Ypickle-write", "directory|jar", "destination for generated .sig files containing type signatures.", "", None).internalOnly() sealed abstract class CachePolicy(val name: String, val help: String) object CachePolicy { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 1fd7690763e5..b7fb20f590ca 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -16,6 +16,7 @@ package classfile import java.lang.Float.floatToIntBits import java.lang.Double.doubleToLongBits +import java.nio.file.Paths import scala.io.Codec import scala.reflect.internal.pickling.{PickleBuffer, PickleFormat} @@ -23,6 +24,7 @@ import scala.reflect.internal.util.shortClassOfInstance import scala.collection.mutable import PickleFormat._ import Flags._ +import scala.reflect.io.{AbstractFile, NoAbstractFile, PlainFile, PlainNioFile} /** * Serialize a top-level module and/or class. @@ -40,6 +42,13 @@ abstract class Pickler extends SubComponent { def newPhase(prev: Phase): StdPhase = new PicklePhase(prev) class PicklePhase(prev: Phase) extends StdPhase(prev) { + import global.genBCode.postProcessor.classfileWriters.FileWriter + private lazy val sigWriter: Option[FileWriter] = + if (settings.YpickleWrite.isSetByUser && !settings.YpickleWrite.value.isEmpty) + Some(FileWriter(global, new PlainFile(settings.YpickleWrite.value), None)) + else + None + def apply(unit: CompilationUnit): Unit = { def pickle(tree: Tree): Unit = { tree match { @@ -64,6 +73,7 @@ abstract class Pickler extends SubComponent { currentRun.symData(sym) = pickle } pickle.writeArray() + writeSigFile(sym, pickle) currentRun registerPickle sym } case _ => @@ -91,6 +101,27 @@ abstract class Pickler extends SubComponent { } } + override def run(): Unit = { + try super.run() + finally closeSigWriter() + } + + private def writeSigFile(sym: Symbol, pickle: PickleBuffer): Unit = { + sigWriter.foreach { writer => + val binaryName = sym.javaBinaryNameString + val binaryClassName = if (sym.isModule) binaryName.stripSuffix(nme.MODULE_SUFFIX_STRING) else binaryName + val relativePath = java.nio.file.Paths.get(binaryClassName + ".sig") + val data = pickle.bytes.take(pickle.writeIndex) + writer.writeFile(relativePath, data) + } + } + private def closeSigWriter(): Unit = { + sigWriter.foreach { writer => + writer.close() + reporter.info(NoPosition, "[sig files written]", force = false) + } + } + override protected def shouldSkipThisPhaseForJava: Boolean = !settings.YpickleJava.value } diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala index 880d3c8e9128..c92ad650fdd8 100644 --- a/test/files/run/t5717.scala +++ b/test/files/run/t5717.scala @@ -20,9 +20,8 @@ object Test extends StoreReporterDirectTest { val List(i) = filteredInfos // for some reason, nio doesn't throw the same exception on windows and linux/mac val path = if(util.Properties.isWin)"\\a" else "/a" - val expected = "error writing a/B: Can't create directory " + path + + val expected = s"error writing ${testOutput.path}/a/B.class: Can't create directory ${testOutput.path}${path}" + "; there is an existing (non-directory) file in its path" - val actual = i.msg.replace(testOutput.path, "") - assert(actual == expected, actual) + assert(i.msg == expected, i.msg) } } From 9f6f54f1871a50e9216e5e78ea0bda73d7caeef5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 26 Jun 2019 15:36:27 +1000 Subject: [PATCH 1770/2793] Avoid redundant field in TermName, reducing size 40->32 bytes MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Before: ``` ➜ scala git:(topic/name-waste) ✗ java -Djdk.attach.allowAttachSelf=true -cp $(coursier fetch -q -p 'org.openjdk.jol:jol-cli:0.9') org.openjdk.jol.Main internals -cp $(scala-classpath $(scala-ref-version 2.13.x)) 'scala.reflect.internal.Names$TermName' Failed to find matching constructor, falling back to class-only introspection. scala.reflect.internal.Names$TermName object internals: OFFSET SIZE TYPE DESCRIPTION VALUE 0 12 (object header) N/A 12 4 scala.reflect.api.Names NameApi.$outer N/A 16 4 int Name.index N/A 20 4 int Name.len N/A 24 4 java.lang.String Name.cachedString N/A 28 4 scala.reflect.internal.Names.TermName TermName.next N/A 32 4 java.lang.String TermName.cachedString N/A 36 4 (loss due to the next object alignment) Instance size: 40 bytes Space losses: 0 bytes internal + 4 bytes external = 4 bytes total ``` After: ``` ➜ scala git:(topic/name-waste) ✗ java -Djdk.attach.allowAttachSelf=true -cp $(coursier fetch -q -p 'org.openjdk.jol:jol-cli:0.9') org.openjdk.jol.Main internals -cp build/quick/classes/reflect 'scala.reflect.internal.Names$TermName' Failed to find matching constructor, falling back to class-only introspection. scala.reflect.internal.Names$TermName object internals: OFFSET SIZE TYPE DESCRIPTION VALUE 0 12 (object header) N/A 12 4 scala.reflect.api.Names NameApi.$outer N/A 16 4 int Name.index N/A 20 4 int Name.len N/A 24 4 java.lang.String Name.cachedString N/A 28 4 scala.reflect.internal.Names.TermName TermName.next N/A Instance size: 32 bytes Space losses: 0 bytes internal + 0 bytes external = 0 bytes total ``` Exposing `Name.cachedString` as a protected val makes it eligible for the parameter aliasing layout optimization. --- src/reflect/scala/reflect/internal/Names.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/reflect/scala/reflect/internal/Names.scala b/src/reflect/scala/reflect/internal/Names.scala index 7e19e72e9ea7..8a7f701f31ea 100644 --- a/src/reflect/scala/reflect/internal/Names.scala +++ b/src/reflect/scala/reflect/internal/Names.scala @@ -190,7 +190,7 @@ trait Names extends api.Names { * or Strings as Names. Give names the key functions the absence of which * make people want Strings all the time. */ - sealed abstract class Name(protected val index: Int, protected val len: Int, cachedString: String) extends NameApi with CharSequence { + sealed abstract class Name(protected val index: Int, protected val len: Int, protected val cachedString: String) extends NameApi with CharSequence { type ThisNameType >: Null <: Name protected[this] def thisName: ThisNameType From fe17b84e75427835d8185667363e4d1bdec308c7 Mon Sep 17 00:00:00 2001 From: "ta.tanaka" Date: Fri, 28 Jun 2019 11:20:13 +0900 Subject: [PATCH 1771/2793] remove unused private method. --- src/library/scala/collection/concurrent/TrieMap.scala | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/library/scala/collection/concurrent/TrieMap.scala b/src/library/scala/collection/concurrent/TrieMap.scala index 0e4ad733789c..0a5233eadadc 100644 --- a/src/library/scala/collection/concurrent/TrieMap.scala +++ b/src/library/scala/collection/concurrent/TrieMap.scala @@ -570,16 +570,6 @@ private[collection] final class CNode[K, V](val bitmap: Int, val array: Array[Ba private[concurrent] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) - /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */ - private def collectElems: Seq[(K, V)] = array flatMap { - case sn: SNode[K, V] => Some(sn.kvPair) - case in: INode[K, V] => in.mainnode match { - case tn: TNode[K, V] => Some(tn.kvPair) - case ln: LNode[K, V] => ln.listmap.toList - case cn: CNode[K, V] => cn.collectElems - } - } - private def collectLocalElems: Seq[String] = array flatMap { case sn: SNode[K, V] => Some(sn.kvPair._2.toString) case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")") From 516e6e4652284df34027dc17374ae3742e33a279 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Thu, 6 Jun 2019 12:54:20 -0400 Subject: [PATCH 1772/2793] -Ywarn-unused:privates doesn't warn on unused locals Narrow the eponymous test case to privates warnings only (there are similarly-named partests for the other warnable cases). Refactor for alignment. --- .../nsc/typechecker/TypeDiagnostics.scala | 23 ++++---- test/files/neg/warn-unused-locals.check | 24 ++++++++ test/files/neg/warn-unused-locals.flags | 1 + test/files/neg/warn-unused-locals.scala | 36 ++++++++++++ test/files/neg/warn-unused-privates.check | 56 ++----------------- test/files/neg/warn-unused-privates.flags | 2 +- 6 files changed, 78 insertions(+), 64 deletions(-) create mode 100644 test/files/neg/warn-unused-locals.check create mode 100644 test/files/neg/warn-unused-locals.flags create mode 100644 test/files/neg/warn-unused-locals.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 778ed35267bf..5a401215ea8e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -653,8 +653,9 @@ trait TypeDiagnostics { unusedPrivates.traverse(body) if (settings.warnUnusedLocals || settings.warnUnusedPrivates) { + def shouldWarnOn(sym: Symbol) = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals val valAdvice = "is never updated: consider using immutable val" - for (defn: DefTree <- unusedPrivates.unusedTerms) { + def termWarning(defn: SymTree): Unit = { val sym = defn.symbol val pos = ( if (defn.pos.isDefined) defn.pos @@ -663,7 +664,7 @@ trait TypeDiagnostics { case sym: TermSymbol => sym.referenced.pos case _ => NoPosition } - ) + ) val why = if (sym.isPrivate) "private" else "local" var cond = "is never used" val what = ( @@ -682,20 +683,20 @@ trait TypeDiagnostics { else if (sym.isMethod) s"method ${sym.name.decoded}" else if (sym.isModule) s"object ${sym.name.decoded}" else "term" - ) + ) typer.context.warning(pos, s"$why $what in ${sym.owner} $cond") } + def typeWarning(defn: SymTree): Unit = { + val why = if (defn.symbol.isPrivate) "private" else "local" + typer.context.warning(defn.pos, s"$why ${defn.symbol.fullLocationString} is never used") + } + + for (defn <- unusedPrivates.unusedTerms if shouldWarnOn(defn.symbol)) { termWarning(defn) } + for (defn <- unusedPrivates.unusedTypes if shouldWarnOn(defn.symbol)) { typeWarning(defn) } + for (v <- unusedPrivates.unsetVars) { typer.context.warning(v.pos, s"local var ${v.name} in ${v.owner} ${valAdvice}") } - for (t <- unusedPrivates.unusedTypes) { - val sym = t.symbol - val wrn = if (sym.isPrivate) settings.warnUnusedPrivates else settings.warnUnusedLocals - if (wrn) { - val why = if (sym.isPrivate) "private" else "local" - typer.context.warning(t.pos, s"$why ${sym.fullLocationString} is never used") - } - } } if (settings.warnUnusedPatVars) { for (v <- unusedPrivates.unusedPatVars) diff --git a/test/files/neg/warn-unused-locals.check b/test/files/neg/warn-unused-locals.check new file mode 100644 index 000000000000..bc74cb2c1bc4 --- /dev/null +++ b/test/files/neg/warn-unused-locals.check @@ -0,0 +1,24 @@ +warn-unused-locals.scala:7: warning: local var x in method f0 is never used + var x = 1 // warn + ^ +warn-unused-locals.scala:14: warning: local val b in method f1 is never used + val b = new Outer // warn + ^ +warn-unused-locals.scala:25: warning: local object HiObject in method l1 is never used + object HiObject { def f = this } // warn + ^ +warn-unused-locals.scala:26: warning: local class Hi is never used + class Hi { // warn + ^ +warn-unused-locals.scala:30: warning: local class DingDongDoobie is never used + class DingDongDoobie // warn + ^ +warn-unused-locals.scala:33: warning: local type OtherThing is never used + type OtherThing = String // warn + ^ +warn-unused-locals.scala:18: warning: local var x in method f2 is never updated: consider using immutable val + var x = 100 // warn about it being a var + ^ +error: No warnings can be incurred under -Xfatal-warnings. +7 warnings found +one error found diff --git a/test/files/neg/warn-unused-locals.flags b/test/files/neg/warn-unused-locals.flags new file mode 100644 index 000000000000..d5de20558576 --- /dev/null +++ b/test/files/neg/warn-unused-locals.flags @@ -0,0 +1 @@ +-Ywarn-unused:locals -Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/warn-unused-locals.scala b/test/files/neg/warn-unused-locals.scala new file mode 100644 index 000000000000..712f3c221a1e --- /dev/null +++ b/test/files/neg/warn-unused-locals.scala @@ -0,0 +1,36 @@ +class Outer { + class Inner +} + +trait Locals { + def f0 = { + var x = 1 // warn + var y = 2 // no warn + y = 3 + y + y + } + def f1 = { + val a = new Outer // no warn + val b = new Outer // warn + new a.Inner + } + def f2 = { + var x = 100 // warn about it being a var + x + } +} + +object Types { + def l1() = { + object HiObject { def f = this } // warn + class Hi { // warn + def f1: Hi = new Hi + def f2(x: Hi) = x + } + class DingDongDoobie // warn + class Bippy // no warn + type Something = Bippy // no warn + type OtherThing = String // warn + (new Bippy): Something + } +} diff --git a/test/files/neg/warn-unused-privates.check b/test/files/neg/warn-unused-privates.check index cdb5f21b48d2..36fe7eae1e21 100644 --- a/test/files/neg/warn-unused-privates.check +++ b/test/files/neg/warn-unused-privates.check @@ -40,18 +40,9 @@ warn-unused-privates.scala:70: warning: private default argument in trait Defaul warn-unused-privates.scala:70: warning: private default argument in trait DefaultArgs is never used private def bippy(x1: Int, x2: Int = 10, x3: Int = 15): Int = x1 + x2 + x3 ^ -warn-unused-privates.scala:86: warning: local var x in method f0 is never used - var x = 1 // warn - ^ -warn-unused-privates.scala:93: warning: local val b in method f1 is never used - val b = new Outer // warn - ^ warn-unused-privates.scala:103: warning: private object Dongo in object Types is never used private object Dongo { def f = this } // warn ^ -warn-unused-privates.scala:113: warning: local object HiObject in method l1 is never used - object HiObject { def f = this } // warn - ^ warn-unused-privates.scala:136: warning: private method x_= in class OtherNames is never used private def x_=(i: Int): Unit = () ^ @@ -61,60 +52,21 @@ warn-unused-privates.scala:137: warning: private method x in class OtherNames is warn-unused-privates.scala:138: warning: private method y_= in class OtherNames is never used private def y_=(i: Int): Unit = () ^ -warn-unused-privates.scala:97: warning: local var x in method f2 is never updated: consider using immutable val - var x = 100 // warn about it being a var - ^ warn-unused-privates.scala:104: warning: private class Bar1 in object Types is never used private class Bar1 // warn ^ warn-unused-privates.scala:106: warning: private type Alias1 in object Types is never used private type Alias1 = String // warn ^ -warn-unused-privates.scala:114: warning: local class Hi is never used - class Hi { // warn - ^ -warn-unused-privates.scala:118: warning: local class DingDongDoobie is never used - class DingDongDoobie // warn - ^ -warn-unused-privates.scala:121: warning: local type OtherThing is never used - type OtherThing = String // warn - ^ warn-unused-privates.scala:216: warning: private class for your eyes only in object not even using companion privates is never used private implicit class `for your eyes only`(i: Int) { // warn ^ warn-unused-privates.scala:232: warning: private class D in class nonprivate alias is enclosing is never used private class D extends C2 // warn ^ -warn-unused-privates.scala:153: warning: pattern var x in method f is never used; `x@_' suppresses this warning - val C(x, y, Some(z)) = c // warn - ^ -warn-unused-privates.scala:153: warning: pattern var y in method f is never used; `y@_' suppresses this warning - val C(x, y, Some(z)) = c // warn - ^ -warn-unused-privates.scala:153: warning: pattern var z in method f is never used; `z@_' suppresses this warning - val C(x, y, Some(z)) = c // warn - ^ -warn-unused-privates.scala:161: warning: pattern var z in method h is never used; `z@_' suppresses this warning - val C(x @ _, y @ _, z @ Some(_)) = c // warn for z? - ^ -warn-unused-privates.scala:166: warning: pattern var x in method v is never used; `x@_' suppresses this warning - val D(x) = d // warn - ^ -warn-unused-privates.scala:201: warning: pattern var z in method f is never used; `z@_' suppresses this warning - case z => "warn" - ^ -warn-unused-privates.scala:208: warning: pattern var z in method f is never used; `z@_' suppresses this warning - case Some(z) => "warn" - ^ -warn-unused-privates.scala:20: warning: parameter value msg0 in class B3 is never used -class B3(msg0: String) extends A("msg") - ^ -warn-unused-privates.scala:136: warning: parameter value i in method x_= is never used - private def x_=(i: Int): Unit = () - ^ -warn-unused-privates.scala:138: warning: parameter value i in method y_= is never used - private def y_=(i: Int): Unit = () - ^ +warn-unused-privates.scala:97: warning: local var x in method f2 is never updated: consider using immutable val + var x = 100 // warn about it being a var + ^ error: No warnings can be incurred under -Xfatal-warnings. -39 warnings found +23 warnings found one error found diff --git a/test/files/neg/warn-unused-privates.flags b/test/files/neg/warn-unused-privates.flags index 25474aefb362..9479643bd5c2 100644 --- a/test/files/neg/warn-unused-privates.flags +++ b/test/files/neg/warn-unused-privates.flags @@ -1 +1 @@ --Ywarn-unused -Xfatal-warnings +-Ywarn-unused:privates -Xfatal-warnings From 56f60d18f1d96476c5e25fcd16a0bdaaa6d54f2b Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Fri, 28 Jun 2019 20:58:35 -0400 Subject: [PATCH 1773/2793] [nomerge] set every sourcesInBase := false This would have saved me a headache, although it would also have been saved by me being minimally hygienic and not leaving `.scala` files lying around in my git repos. --- build.sbt | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build.sbt b/build.sbt index 9ee4c76a3fb0..12d20a4a2e52 100644 --- a/build.sbt +++ b/build.sbt @@ -240,6 +240,9 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories baseDirectory in Compile := (baseDirectory in ThisBuild).value, baseDirectory in Test := (baseDirectory in ThisBuild).value, + // Don't pick up source files from the project root. + sourcesInBase := false, + // Don't log process output (e.g. of forked `compiler/runMain ...Main`), just pass it // directly to stdout outputStrategy in run := Some(StdoutOutput) From 2d3a47723de3fce630c2360896f79e879786cf37 Mon Sep 17 00:00:00 2001 From: tgodzik Date: Tue, 2 Jul 2019 17:30:30 +0200 Subject: [PATCH 1774/2793] Fix the issue when completions would break in case comments between qualifier and the point at which we do completion. --- .../scala/tools/nsc/interactive/Global.scala | 6 ++--- .../nsc/interpreter/CompletionTest.scala | 22 +++++++++++++++++++ 2 files changed, 25 insertions(+), 3 deletions(-) diff --git a/src/interactive/scala/tools/nsc/interactive/Global.scala b/src/interactive/scala/tools/nsc/interactive/Global.scala index 082a9b825b41..0594ff25ae5e 100644 --- a/src/interactive/scala/tools/nsc/interactive/Global.scala +++ b/src/interactive/scala/tools/nsc/interactive/Global.scala @@ -1247,9 +1247,9 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") val qualPos = qual.pos def fallback = qualPos.end + 2 val source = pos.source - val nameStart: Int = (qualPos.end + 1 until focus1.pos.end).find(p => - source.identifier(source.position(p)).exists(_.length > 0) - ).getOrElse(fallback) + val nameStart: Int = (focus1.pos.end - 1 to qualPos.end by -1).find(p => + source.identifier(source.position(p)).exists(_.length == 0) + ).map(_ + 1).getOrElse(fallback) typeCompletions(sel, qual, nameStart, name) case Ident(name) => val allMembers = scopeMembers(pos) diff --git a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala index 2873bca8c668..492082b5c487 100644 --- a/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala +++ b/test/junit/scala/tools/nsc/interpreter/CompletionTest.scala @@ -231,6 +231,28 @@ class CompletionTest { checkExact(completer, s"($ident: Int) => tia")(ident) } + @Test + def completionWithComment(): Unit = { + val intp = newIMain() + val completer = new PresentationCompilerCompleter(intp) + + val withMultilineCommit = + """|Array(1, 2, 3) + | .map(_ + 1) /* then we do reverse */ + | .rev""".stripMargin + assert( + completer.complete(withMultilineCommit).candidates.contains("reverseMap") + ) + + val withInlineCommit = + """|Array(1, 2, 3) + | .map(_ + 1) // then we do reverse + | .rev""".stripMargin + assert( + completer.complete(withInlineCommit).candidates.contains("reverseMap") + ) + } + @Test def dependentTypeImplicits_t10353(): Unit = { val code = From 347127707ed9eafd3100a6552d16c75f1727e862 Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Thu, 4 Jul 2019 19:57:18 +0900 Subject: [PATCH 1775/2793] optimize immutable.TreeSet#{max, min} --- .../scala/collection/immutable/TreeSet.scala | 16 +++++++++++ .../collection/immutable/TreeSetTest.scala | 28 +++++++++++++++++++ 2 files changed, 44 insertions(+) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 38cee881482b..f6157085c6f6 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -68,6 +68,22 @@ final class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: O override def tail = new TreeSet(RB.delete(tree, firstKey)) override def init = new TreeSet(RB.delete(tree, lastKey)) + override def min[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + head + } else { + super.min(ord) + } + } + + override def max[A1 >: A](implicit ord: Ordering[A1]): A = { + if ((ord eq ordering) && nonEmpty) { + last + } else { + super.max(ord) + } + } + override def drop(n: Int) = { if (n <= 0) this else if (n >= size) empty diff --git a/test/junit/scala/collection/immutable/TreeSetTest.scala b/test/junit/scala/collection/immutable/TreeSetTest.scala index 8efe1bfeb8fb..7921a5cfea0a 100644 --- a/test/junit/scala/collection/immutable/TreeSetTest.scala +++ b/test/junit/scala/collection/immutable/TreeSetTest.scala @@ -17,4 +17,32 @@ class TreeSetTest { assertEquals(set, set drop Int.MinValue) assertEquals(set, set dropRight Int.MinValue) } + + @Test + def min(): Unit = { + assertEquals(1, TreeSet(1, 2, 3).min) + assertEquals(3, TreeSet(1, 2, 3).min(implicitly[Ordering[Int]].reverse)) + + try { + TreeSet.empty[Int].min + fail("expect UnsupportedOperationException") + } catch { + case e: UnsupportedOperationException => + assertEquals("empty.min", e.getMessage) + } + } + + @Test + def max(): Unit = { + assertEquals(3, TreeSet(1, 2, 3).max) + assertEquals(1, TreeSet(1, 2, 3).max(implicitly[Ordering[Int]].reverse)) + + try { + TreeSet.empty[Int].max + fail("expect UnsupportedOperationException") + } catch { + case e: UnsupportedOperationException => + assertEquals("empty.max", e.getMessage) + } + } } From 8f7fdc4ee96059f14c2ab645e3447a1fe3ec2173 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Tue, 26 Mar 2019 23:33:59 -0400 Subject: [PATCH 1776/2793] Use AdoptOpenJDK JDK 8 for testing Ref scala/scala-dev#587 --- .travis.yml | 34 ++++++++++++++++++++++------------ 1 file changed, 22 insertions(+), 12 deletions(-) diff --git a/.travis.yml b/.travis.yml index 25c9a81c3b5e..b73010ba6016 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,18 @@ -# GCE VMs have better performance (will be upgrading to premium VMs soon) -sudo: required - +dist: xenial +group: stable language: scala -jdk: openjdk8 -cache: - directories: - - $HOME/.ivy2/cache - - $HOME/.sbt - - $HOME/.rvm/ +before_install: + # adding $HOME/.sdkman to cache would create an empty directory, which interferes with the initial installation + - "[[ -d $HOME/.sdkman/bin/ ]] || rm -rf $HOME/.sdkman/" + - curl -sL https://get.sdkman.io | bash + - echo sdkman_auto_answer=true > $HOME/.sdkman/etc/config + - source "$HOME/.sdkman/bin/sdkman-init.sh" + +install: + - sdk install java $(sdk list java | grep -o "$ADOPTOPENJDK\.[0-9\.]*hs-adpt" | head -1) + - java -Xmx32m -version + - javac -J-Xmx32m -version stages: - name: build @@ -38,6 +42,7 @@ jobs: # pull request validation (w/ mini-bootstrap) - stage: build + name: "JDK 8 pr validation" if: type = pull_request script: - sbt -warn setupPublishCore generateBuildCharacterPropertiesFile headerCheck publishLocal @@ -48,10 +53,8 @@ jobs: - stage: build language: ruby install: - - rvm install 2.2 - - rvm use 2.2 - - rvm info - ruby -v + - gem install bundler - bundler --version - bundle install script: @@ -62,6 +65,7 @@ jobs: env: global: + - ADOPTOPENJDK=8 - secure: "TuJOUtALynPd+MV1AuMeIpVb8BUBHr7Ul7FS48XhS2PyuTRpEBkSWybYcNg3AXyzmWDAuOjUxbaNMQBvP8vvehTbIYls5H5wTGKvj0D0TNVaPIXjF8bA8KyNat9xGNzhnWm2/2BMaWpKBJWRF7Jb+zHhijMYCJEbkMtoiE5R/mY=" # PRIV_KEY_SECRET, for scripts/travis-publish-spec.sh - secure: "T1fxtvLTxioyXJYiC/zVYdNYsBOt+0Piw+xE04rB1pzeKahm9+G2mISdcAyqv6/vze9eIJt6jNHHpKX32/Z3Cs1/Ruha4m3k+jblj3S0SbxV6ht2ieJXLT5WoUPFRrU68KXI8wqUadXpjxeJJV53qF2FC4lhfMUsw1IwwMhdaE8=" # PRIVATE_REPO_PASS - secure: "feE5A8mYNpkNQKVwCj3aXrwjVrJWh/4ENpRfFlr2HOD9ORk1GORD5Yq907WZd+dTkYK54Lh1gA+qHOCIDgJHbi9ZLU+kjzEjtYKF6lQy6Wb0LI8smTOnAA6IWVVYifiXw8d66MI2MKZb2jjGeIzy8Q00SZjLhEGjLyTeCIB88Ws=" # SONA_USER @@ -73,6 +77,12 @@ before_cache: # Cleanup the cached directories to avoid unnecessary cache updates - find $HOME/.ivy2/cache -name "ivydata-*.properties" -print -delete - find $HOME/.sbt -name "*.lock" -print -delete +cache: + directories: + - $HOME/.ivy2/cache + - $HOME/.sbt + - $HOME/.rvm/ + - $HOME/.sdkman notifications: webhooks: https://scala-ci.typesafe.com/benchq/webhooks/travis From df4df5f4cc767f176f13d38b37f62e3c54d0ba1d Mon Sep 17 00:00:00 2001 From: Seth Tisue Date: Tue, 9 Jul 2019 19:03:43 -0700 Subject: [PATCH 1777/2793] [nomerge] add sbt-whitesource --- build.sbt | 15 ++++++++++++--- project/plugins.sbt | 2 ++ 2 files changed, 14 insertions(+), 3 deletions(-) diff --git a/build.sbt b/build.sbt index 12d20a4a2e52..cd146b85fccc 100644 --- a/build.sbt +++ b/build.sbt @@ -53,6 +53,7 @@ val scalacheckDep = "org.scalacheck" % "scalacheck_2.12" % "1.13.4" val jolDep = "org.openjdk.jol" % "jol-core" % "0.5" val asmDep = "org.scala-lang.modules" % "scala-asm" % versionProps("scala-asm.version") val jlineDep = "jline" % "jline" % versionProps("jline.version") +val jansiDep = "org.fusesource.jansi" % "jansi" % "1.12" val antDep = "org.apache.ant" % "ant" % "1.9.4" /** Publish to ./dists/maven-sbt, similar to the Ant build which publishes to ./dists/maven. This @@ -329,7 +330,8 @@ val disablePublishing = Seq[Setting[_]]( publishArtifact := false, // The above is enough for Maven repos but it doesn't prevent publishing of ivy.xml files publish := {}, - publishLocal := {} + publishLocal := {}, + whitesourceIgnore := true ) lazy val setJarLocation: Setting[_] = @@ -454,8 +456,10 @@ lazy val compiler = configureAsSubproject(project) name := "scala-compiler", description := "Scala Compiler", libraryDependencies ++= Seq(antDep, asmDep), - // These are only needed for the POM: - libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional"), + // These are only needed for the POM. (And, note that the jansi dependency is a fiction + // for WhiteSource purposes; the JLine JAR contains a shaded jansi, but WhiteSource + // won't know about that unless we tell it.) + libraryDependencies ++= Seq(scalaXmlDep, jlineDep % "optional", jansiDep % "optional"), buildCharacterPropertiesFile := (resourceManaged in Compile).value / "scala-buildcharacter.properties", resourceGenerators in Compile += generateBuildCharacterPropertiesFile.map(file => Seq(file)).taskValue, // this a way to make sure that classes from interactive and scaladoc projects @@ -1358,3 +1362,8 @@ def findJar(files: Seq[Attributed[File]], dep: ModuleID): Option[Attributed[File def extract(m: ModuleID) = (m.organization, m.name) files.find(_.get(moduleID.key).map(extract _) == Some(extract(dep))) } + +// WhiteSource +whitesourceProduct := "Lightbend Reactive Platform" +whitesourceAggregateProjectName := "scala-2.12-stable" +whitesourceIgnoredScopes := Vector("test", "scala-tool") diff --git a/project/plugins.sbt b/project/plugins.sbt index 73ea2e392f49..2ee6b5408ebf 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -34,3 +34,5 @@ concurrentRestrictions in Global := Seq( addSbtPlugin("de.heikoseeberger" % "sbt-header" % "5.0.0") addSbtPlugin("pl.project13.scala" % "sbt-jmh" % "0.2.27") + +addSbtPlugin("com.lightbend" % "sbt-whitesource" % "0.1.16") From 0aec26384fd1a59f2c81d30962c6f9e577c4892b Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 10 Jul 2019 18:37:25 +1000 Subject: [PATCH 1778/2793] Fix scalac compatibility with JDK 13 This change in the JDK: https://github.com/openjdk/jdk/commit/ac6c642cf4fe243d88c2b762502860fdd41676f4#diff-e9881878ce74700a8063f67f65ec0657 Led us to the "cannot be represented as URI" exception. This commit just hard-codes jrt:/ as the URI --- src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 96a4b51e9f0c..17e71f047dd6 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -216,7 +216,7 @@ final class JrtClassPath(fs: java.nio.file.FileSystem) extends ClassPath with No if (inPackage == "") ClassPathEntries(packages(inPackage), Nil) else ClassPathEntries(packages(inPackage), classes(inPackage)) - def asURLs: Seq[URL] = Seq(dir.toUri.toURL) + def asURLs: Seq[URL] = Seq(new URL("jrt:/")) // We don't yet have a scheme to represent the JDK modules in our `-classpath`. // java models them as entries in the new "module path", we'll probably need to follow this. def asClassPathStrings: Seq[String] = Nil From 54e6b7f566c09c1a8a07a98e0deffb9e5d9d2352 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 16 Nov 2018 11:59:49 +0100 Subject: [PATCH 1779/2793] [backport] Inline: list all lambda methods in `$deserializeLambda$` When inlining an IndyLambda from the classpath, ensure that the lambda impl method is listed in the class's `$deserializeLambda$`. Backport of 88960039cb. --- .../backend/jvm/opt/ByteCodeRepository.scala | 25 ++++++++++++-- .../nsc/backend/jvm/opt/BytecodeUtils.scala | 24 +++---------- test/files/run/t11255.flags | 1 + test/files/run/t11255/A_1.scala | 4 +++ test/files/run/t11255/Test_2.scala | 14 ++++++++ .../nsc/backend/jvm/opt/InlinerTest.scala | 34 +++++++++++++++++++ 6 files changed, 81 insertions(+), 21 deletions(-) create mode 100644 test/files/run/t11255.flags create mode 100644 test/files/run/t11255/A_1.scala create mode 100644 test/files/run/t11255/Test_2.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index a74982f68d65..2a97e5b89cee 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -32,7 +32,7 @@ import scala.tools.nsc.backend.jvm.opt.BytecodeUtils._ abstract class ByteCodeRepository extends PerRunInit { val postProcessor: PostProcessor - import postProcessor.{bTypes, bTypesFromClassfile} + import postProcessor.{bTypes, bTypesFromClassfile, callGraph} import bTypes._ import frontendAccess.{backendClassPath, recordPerRunCache} @@ -252,6 +252,27 @@ abstract class ByteCodeRepository extends PerRunInit { } } + private def removeLineNumbersAndAddLMFImplMethods(classNode: ClassNode): Unit = { + for (m <- classNode.methods.asScala) { + val iter = m.instructions.iterator + while (iter.hasNext) { + val insn = iter.next() + insn.getType match { + case AbstractInsnNode.LINE => + iter.remove() + case AbstractInsnNode.INVOKE_DYNAMIC_INSN => insn match { + case callGraph.LambdaMetaFactoryCall(_, _, implMethod, _) => + postProcessor.backendUtils.addIndyLambdaImplMethod(classNode.name, implMethod) + case _ => + } + case _ => + } + } + + } + } + + private def parseClass(internalName: InternalName): Either[ClassNotFound, ClassNode] = { val fullName = internalName.replace('/', '.') backendClassPath.findClassFile(fullName) map { classFile => @@ -271,7 +292,7 @@ abstract class ByteCodeRepository extends PerRunInit { // attribute that contains JSR-45 data that encodes debugging info. // http://docs.oracle.com/javase/specs/jvms/se7/html/jvms-4.html#jvms-4.7.11 // https://jcp.org/aboutJava/communityprocess/final/jsr045/index.html - removeLineNumberNodes(classNode) + removeLineNumbersAndAddLMFImplMethods(classNode) classNode } match { case Some(node) => Right(node) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala index 6036c720756b..b99b0e747251 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/BytecodeUtils.scala @@ -14,17 +14,15 @@ package scala.tools.nsc package backend.jvm package opt -import scala.annotation.{tailrec, switch} - -import scala.collection.mutable +import scala.annotation.{switch, tailrec} +import scala.collection.JavaConverters._ import scala.reflect.internal.util.Collections._ +import scala.tools.asm.Opcodes._ import scala.tools.asm.commons.CodeSizeEvaluator +import scala.tools.asm.tree._ import scala.tools.asm.tree.analysis._ import scala.tools.asm.{Label, Type} -import scala.tools.asm.Opcodes._ -import scala.tools.asm.tree._ -import GenBCode._ -import scala.collection.JavaConverters._ +import scala.tools.nsc.backend.jvm.GenBCode._ import scala.tools.nsc.backend.jvm.analysis.InstructionStackEffect object BytecodeUtils { @@ -301,18 +299,6 @@ object BytecodeUtils { (maxSize(caller) + maxSize(callee) > maxMethodSizeAfterInline) } - def removeLineNumberNodes(classNode: ClassNode): Unit = { - for (m <- classNode.methods.asScala) removeLineNumberNodes(m.instructions) - } - - def removeLineNumberNodes(instructions: InsnList): Unit = { - val iter = instructions.iterator() - while (iter.hasNext) iter.next() match { - case _: LineNumberNode => iter.remove() - case _ => - } - } - def cloneLabels(methodNode: MethodNode): Map[LabelNode, LabelNode] = { methodNode.instructions.iterator().asScala.collect({ case labelNode: LabelNode => (labelNode, newLabelNode) diff --git a/test/files/run/t11255.flags b/test/files/run/t11255.flags new file mode 100644 index 000000000000..0d25de8ef6a9 --- /dev/null +++ b/test/files/run/t11255.flags @@ -0,0 +1 @@ +-opt:l:inline -opt-inline-from:** diff --git a/test/files/run/t11255/A_1.scala b/test/files/run/t11255/A_1.scala new file mode 100644 index 000000000000..f18a5c632468 --- /dev/null +++ b/test/files/run/t11255/A_1.scala @@ -0,0 +1,4 @@ +class K(val f: Int => Int) extends Serializable +class A { + @inline final def f = new K(x => x + 1) +} diff --git a/test/files/run/t11255/Test_2.scala b/test/files/run/t11255/Test_2.scala new file mode 100644 index 000000000000..4a252e44b57e --- /dev/null +++ b/test/files/run/t11255/Test_2.scala @@ -0,0 +1,14 @@ +object Test { + def serializeDeserialize(obj: Object): Object = { + import java.io._ + val buffer = new ByteArrayOutputStream + val out = new ObjectOutputStream(buffer) + out.writeObject(obj) + val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + in.readObject + } + + def main(args: Array[String]): Unit = { + assert(serializeDeserialize((new A).f).asInstanceOf[K].f(10) == 11) + } +} diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala index 0d4408998989..4eb55688340d 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlinerTest.scala @@ -1773,4 +1773,38 @@ class InlinerTest extends BytecodeTesting { val i = getMethod(t, "bar") assertSameCode(i.instructions, List(Label(0), LineNumber(7, Label(0)), VarOp(ALOAD, 1), Invoke(INVOKEVIRTUAL, "java/lang/Object", "toString", "()Ljava/lang/String;", false), Op(ARETURN), Label(5))) } + + @Test + def t11255(): Unit = { + val codeA = + """class K(val f: Int => Int) extends Serializable + |class A { + | @inline final def f = new K(x => x + 1) + |} + """.stripMargin + val codeB = + """class C { + | def serializeDeserialize(obj: Object): Object = { + | import java.io._ + | val buffer = new ByteArrayOutputStream + | val out = new ObjectOutputStream(buffer) + | out.writeObject(obj) + | val in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray)) + | in.readObject + | } + | + | def t = { + | serializeDeserialize((new A).f).asInstanceOf[K].f(10) + | } + |} + """.stripMargin + val List(a, c, k) = compileClassesSeparately(List(codeA, codeB), extraArgs = "-opt:l:inline -opt-inline-from:**") + val m = getMethod(c, "$deserializeLambda$") + val args = m.instructions collect { + case InvokeDynamic(opcode, name, desc, bsm, bsmArgs) => + val mh = bsmArgs.head.asInstanceOf[MethodHandle] + List(mh.owner, mh.name) + } + assertEquals(List("A", "$anonfun$f$1"), args.head) + } } From 3b3eaf4805b890d9e334db64e90a07ab83eb7559 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 16 Nov 2018 14:10:31 +0100 Subject: [PATCH 1780/2793] [backport] Fix LambdaMetaFactoryCall for java-generated LMF Fix issues with Java LMF where the impl method is non-static, or a constructor. Test inlining of Java methods that have an LMF. Backport of 4903f691b0. --- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 25 +++++++-- .../backend/jvm/opt/ClosureOptimizer.scala | 48 +++++++++------- test/files/run/indyLambdaKinds.check | 25 +++++++++ test/files/run/indyLambdaKinds/A_1.java | 16 ++++++ test/files/run/indyLambdaKinds/Test_2.scala | 55 +++++++++++++++++++ 5 files changed, 142 insertions(+), 27 deletions(-) create mode 100644 test/files/run/indyLambdaKinds.check create mode 100644 test/files/run/indyLambdaKinds/A_1.java create mode 100644 test/files/run/indyLambdaKinds/Test_2.scala diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index 11fd4df644e1..f637343a554b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -472,7 +472,8 @@ abstract class CallGraph { // The check below ensures that // (1) the implMethod type has the expected signature (captured types plus argument types // from instantiatedMethodType) - // (2) the receiver of the implMethod matches the first captured type + // (2) the receiver of the implMethod matches the first captured type, if any, otherwise + // the first parameter type of instantiatedMethodType // (3) all parameters that are not the same in samMethodType and instantiatedMethodType // are reference types, so that we can insert casts to perform the same adaptation // that the closure object would. @@ -480,14 +481,26 @@ abstract class CallGraph { val isStatic = implMethod.getTag == Opcodes.H_INVOKESTATIC val indyParamTypes = Type.getArgumentTypes(indy.desc) val instantiatedMethodArgTypes = instantiatedMethodType.getArgumentTypes - val expectedImplMethodType = { - val paramTypes = (if (isStatic) indyParamTypes else indyParamTypes.tail) ++ instantiatedMethodArgTypes - Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*) - } + + val (receiverType, expectedImplMethodType) = + if (isStatic) { + val paramTypes = indyParamTypes ++ instantiatedMethodArgTypes + (None, Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) + } else if (implMethod.getTag == Opcodes.H_NEWINVOKESPECIAL) { + (Some(instantiatedMethodType.getReturnType), Type.getMethodType(Type.VOID_TYPE, instantiatedMethodArgTypes: _*)) + } else { + if (indyParamTypes.nonEmpty) { + val paramTypes = indyParamTypes.tail ++ instantiatedMethodArgTypes + (Some(indyParamTypes(0)), Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) + } else { + val paramTypes = instantiatedMethodArgTypes.tail + (Some(instantiatedMethodArgTypes(0)), Type.getMethodType(instantiatedMethodType.getReturnType, paramTypes: _*)) + } + } val isIndyLambda = ( Type.getType(implMethod.getDesc) == expectedImplMethodType // (1) - && (isStatic || implMethod.getOwner == indyParamTypes(0).getInternalName) // (2) + && receiverType.forall(rt => implMethod.getOwner == rt.getInternalName) // (2) && samMethodType.getArgumentTypes.corresponds(instantiatedMethodArgTypes)((samArgType, instArgType) => samArgType == instArgType || isReference(samArgType) && isReference(instArgType)) // (3) ) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala index b3f6765abc7f..f84bc84e0945 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ClosureOptimizer.scala @@ -316,6 +316,14 @@ abstract class ClosureOptimizer { // drop the closure from the stack ownerMethod.instructions.insertBefore(invocation, new InsnNode(POP)) + val isNew = lambdaBodyHandle.getTag == H_NEWINVOKESPECIAL + + if (isNew) { + val insns = ownerMethod.instructions + insns.insertBefore(invocation, new TypeInsnNode(NEW, lambdaBodyHandle.getOwner)) + insns.insertBefore(invocation, new InsnNode(DUP)) + } + // load captured values and arguments insertLoadOps(invocation, ownerMethod, localsForCapturedValues) insertLoadOps(invocation, ownerMethod, argumentLocalsList) @@ -323,7 +331,7 @@ abstract class ClosureOptimizer { // update maxStack // One slot per value is correct for long / double, see comment in the `analysis` package object. val numCapturedValues = localsForCapturedValues.locals.length - val invocationStackHeight = stackHeight + numCapturedValues - 1 // -1 because the closure is gone + val invocationStackHeight = stackHeight + numCapturedValues - 1 + (if (isNew) 2 else 0) // -1 because the closure is gone if (invocationStackHeight > ownerMethod.maxStack) ownerMethod.maxStack = invocationStackHeight @@ -333,30 +341,28 @@ abstract class ClosureOptimizer { case H_INVOKESTATIC => INVOKESTATIC case H_INVOKESPECIAL => INVOKESPECIAL case H_INVOKEINTERFACE => INVOKEINTERFACE - case H_NEWINVOKESPECIAL => - val insns = ownerMethod.instructions - insns.insertBefore(invocation, new TypeInsnNode(NEW, lambdaBodyHandle.getOwner)) - insns.insertBefore(invocation, new InsnNode(DUP)) - INVOKESPECIAL + case H_NEWINVOKESPECIAL => INVOKESPECIAL } val bodyInvocation = new MethodInsnNode(bodyOpcode, lambdaBodyHandle.getOwner, lambdaBodyHandle.getName, lambdaBodyHandle.getDesc, lambdaBodyHandle.isInterface) ownerMethod.instructions.insertBefore(invocation, bodyInvocation) - val bodyReturnType = Type.getReturnType(lambdaBodyHandle.getDesc) - val invocationReturnType = Type.getReturnType(invocation.desc) - if (isPrimitiveType(invocationReturnType) && bodyReturnType.getDescriptor == ObjectRef.descriptor) { - val op = - if (invocationReturnType.getSort == Type.VOID) getPop(1) - else getScalaUnbox(invocationReturnType) - ownerMethod.instructions.insertBefore(invocation, op) - } else if (isPrimitiveType(bodyReturnType) && invocationReturnType.getDescriptor == ObjectRef.descriptor) { - val op = - if (bodyReturnType.getSort == Type.VOID) getBoxedUnit - else getScalaBox(bodyReturnType) - ownerMethod.instructions.insertBefore(invocation, op) - } else { - // see comment of that method - fixLoadedNothingOrNullValue(bodyReturnType, bodyInvocation, ownerMethod, bTypes) + if (!isNew) { + val bodyReturnType = Type.getReturnType(lambdaBodyHandle.getDesc) + val invocationReturnType = Type.getReturnType(invocation.desc) + if (isPrimitiveType(invocationReturnType) && bodyReturnType.getDescriptor == ObjectRef.descriptor) { + val op = + if (invocationReturnType.getSort == Type.VOID) getPop(1) + else getScalaUnbox(invocationReturnType) + ownerMethod.instructions.insertBefore(invocation, op) + } else if (isPrimitiveType(bodyReturnType) && invocationReturnType.getDescriptor == ObjectRef.descriptor) { + val op = + if (bodyReturnType.getSort == Type.VOID) getBoxedUnit + else getScalaBox(bodyReturnType) + ownerMethod.instructions.insertBefore(invocation, op) + } else { + // see comment of that method + fixLoadedNothingOrNullValue(bodyReturnType, bodyInvocation, ownerMethod, bTypes) + } } ownerMethod.instructions.remove(invocation) diff --git a/test/files/run/indyLambdaKinds.check b/test/files/run/indyLambdaKinds.check new file mode 100644 index 000000000000..e661b8878db7 --- /dev/null +++ b/test/files/run/indyLambdaKinds.check @@ -0,0 +1,25 @@ +Inline into Main$.t1a: inlined A_1.a. Before: 7 ins, inlined: 3 ins. +Inline into Main$.t1b: inlined A_1.a. Before: 11 ins, inlined: 3 ins. +Inline into Main$.t2a: inlined A_1.b. Before: 7 ins, inlined: 3 ins. +Inline into Main$.t2b: inlined A_1.b. Before: 10 ins, inlined: 3 ins. +Inline into Main$.t3a: inlined A_1.c. Before: 7 ins, inlined: 3 ins. +Inline into Main$.t3b: inlined A_1.c. Before: 10 ins, inlined: 3 ins. +Inline into Main$.t4a: failed A_1.d. A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inline into Main$.t4b: failed A_1.d. A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; is annotated @inline but could not be inlined: The callee A_1::d(Ljava/lang/String;)Ljava/util/function/BiFunction; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/BiFunction; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$d$0(Ljava/lang/String;LA_1;Ljava/lang/String;)Ljava/lang/String;, (LA_1;Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inline into Main$.t5a: failed A_1.e. A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$1(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inline into Main$.t5b: failed A_1.e. A_1::e(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::e(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$e$1(Ljava/lang/String;Ljava/lang/String;)Ljava/lang/String;, (Ljava/lang/String;)Ljava/lang/String; ] that would cause an IllegalAccessError when inlined into class Main$. +Inline into Main$.t6a: failed A_1.f. A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$2(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. +Inline into Main$.t6b: failed A_1.f. A_1::f(Ljava/lang/String;)Ljava/util/function/Function; is annotated @inline but could not be inlined: The callee A_1::f(Ljava/lang/String;)Ljava/util/function/Function; contains the instruction INVOKEDYNAMIC apply(Ljava/lang/String;)Ljava/util/function/Function; [ // handle kind 0x6 : INVOKESTATIC java/lang/invoke/LambdaMetafactory.metafactory(Ljava/lang/invoke/MethodHandles$Lookup;Ljava/lang/String;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodType;Ljava/lang/invoke/MethodHandle;Ljava/lang/invoke/MethodType;)Ljava/lang/invoke/CallSite; // arguments: (Ljava/lang/Object;)Ljava/lang/Object;, // handle kind 0x6 : INVOKESTATIC A_1.lambda$f$2(Ljava/lang/String;Ljava/lang/String;)LA_1;, (Ljava/lang/String;)LA_1; ] that would cause an IllegalAccessError when inlined into class Main$. +warning: there were 6 inliner warnings; re-run enabling -opt-warnings for details, or try -help +m1 +m1 +m2 +m2 +m1 +m1 +m1 +m1 +m2 +m2 +m1 +m1 diff --git a/test/files/run/indyLambdaKinds/A_1.java b/test/files/run/indyLambdaKinds/A_1.java new file mode 100644 index 000000000000..ecd9c52d3848 --- /dev/null +++ b/test/files/run/indyLambdaKinds/A_1.java @@ -0,0 +1,16 @@ +import java.util.function.*; +import java.lang.annotation.Annotation; + +public class A_1 { + public final String m1(String x) { return "m1"; } + public final static String m2(String x) { return "m2"; } + public A_1(String x) { } + + public final BiFunction a() { return A_1::m1; } + public final Function b() { return A_1::m2; } + public final Function c() { return A_1::new; } + + public final BiFunction d(String x) { return (a, s) -> a.m1(s + x); } + public final Function e(String x) { return s -> A_1.m2(s + x); } + public final Function f(String x) { return s -> new A_1(s + x); } +} diff --git a/test/files/run/indyLambdaKinds/Test_2.scala b/test/files/run/indyLambdaKinds/Test_2.scala new file mode 100644 index 000000000000..d876dd5fd72b --- /dev/null +++ b/test/files/run/indyLambdaKinds/Test_2.scala @@ -0,0 +1,55 @@ +import tools.partest.DirectTest +import reflect.internal.util._ + +object Test extends DirectTest { + + override def extraSettings: String = s"-usejavacp -cp ${testOutput.path} -opt:l:inline -opt-inline-from:** -Yopt-log-inline _ -d ${testOutput.path}" + + override def code = """object Main { + @noinline def t1a(a: A_1) = a.a(): @inline + @noinline def t1b(a: A_1) = (a.a(): @inline).apply(a, "") + + @noinline def t2a(a: A_1) = a.b(): @inline + @noinline def t2b(a: A_1) = (a.b(): @inline).apply("") + + @noinline def t3a(a: A_1) = a.c(): @inline + @noinline def t3b(a: A_1) = (a.c(): @inline).apply("") + + @noinline def t4a(a: A_1) = a.d(""): @inline + @noinline def t4b(a: A_1) = (a.d(""): @inline).apply(a, "") + + @noinline def t5a(a: A_1) = a.e(""): @inline + @noinline def t5b(a: A_1) = (a.e(""): @inline).apply("") + + @noinline def t6a(a: A_1) = a.f(""): @inline + @noinline def t6b(a: A_1) = (a.f(""): @inline).apply("") + + def main(args: Array[String]): Unit = { + val a = new A_1("") + + println(t1a(a).apply(a, "")) + println(t1b(a)) + + println(t2a(a).apply("")) + println(t2b(a)) + + println(t3a(a).apply("").m1("")) + println(t3b(a).m1("")) + + println(t4a(a).apply(a, "")) + println(t4b(a)) + + println(t5a(a).apply("")) + println(t5b(a)) + + println(t6a(a).apply("").m1("")) + println(t6b(a).m1("")) + } +}""" + + override def show(): Unit = { + compile() + ScalaClassLoader(getClass.getClassLoader) run ("Main", Nil) + + } +} From cdfc33132f28f68e00dded3924a56c096b15b174 Mon Sep 17 00:00:00 2001 From: "ta.tanaka" Date: Thu, 11 Jul 2019 07:14:56 +0900 Subject: [PATCH 1781/2793] [nomerge] remove unused import. --- build.sbt | 13 ++++++------- .../reflect/macros/runtime/MacroRuntimes.scala | 2 -- src/compiler/scala/tools/nsc/CompilerCommand.scala | 1 - src/compiler/scala/tools/nsc/Global.scala | 7 +++---- src/compiler/scala/tools/nsc/MainBench.scala | 1 - src/compiler/scala/tools/nsc/PickleExtractor.scala | 1 - src/compiler/scala/tools/nsc/PipelineMain.scala | 7 ++----- src/compiler/scala/tools/nsc/ScriptRunner.scala | 2 +- .../scala/tools/nsc/backend/jvm/BCodeHelpers.scala | 6 +----- .../tools/nsc/backend/jvm/BCodeSkelBuilder.scala | 1 - .../tools/nsc/backend/jvm/ClassfileWriters.scala | 4 ++-- .../scala/tools/nsc/backend/jvm/CodeGen.scala | 1 - .../nsc/backend/jvm/GeneratedClassHandler.scala | 1 - .../scala/tools/nsc/backend/jvm/PostProcessor.scala | 2 -- .../nsc/backend/jvm/analysis/BackendUtils.scala | 2 +- .../nsc/backend/jvm/opt/ByteCodeRepository.scala | 1 - .../tools/nsc/classpath/AggregateClassPath.scala | 1 - .../tools/nsc/classpath/DirectoryClassPath.scala | 7 ++----- .../nsc/classpath/VirtualDirectoryClassPath.scala | 2 +- src/compiler/scala/tools/nsc/io/SourceReader.scala | 2 +- src/compiler/scala/tools/nsc/plugins/Plugin.scala | 4 +--- src/compiler/scala/tools/nsc/plugins/Plugins.scala | 4 ---- src/compiler/scala/tools/nsc/profile/Profiler.scala | 5 ++--- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 - .../scala/tools/nsc/symtab/SymbolLoaders.scala | 1 - .../nsc/symtab/classfile/AbstractFileReader.scala | 1 - .../nsc/symtab/classfile/ClassfileParser.scala | 6 ++---- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 3 +-- .../nsc/symtab/classfile/ReusableDataReader.scala | 3 +-- .../scala/tools/nsc/transform/SpecializeTypes.scala | 1 - .../scala/tools/nsc/typechecker/Infer.scala | 2 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 2 -- .../scala/tools/nsc/typechecker/NamesDefaults.scala | 1 - .../scala/tools/nsc/typechecker/Typers.scala | 5 ++--- .../scala/tools/reflect/ToolBoxFactory.scala | 2 +- .../scala/collection/mutable/WrappedArray.scala | 1 - .../concurrent/impl/ExecutionContextImpl.scala | 4 ++-- src/manual/scala/tools/docutil/EmitHtml.scala | 1 - .../scala/reflect/internal/SymbolTable.scala | 1 - .../scala/reflect/internal/util/ChromeTrace.scala | 3 +-- .../reflect/internal/util/OwnerOnlyChmod.scala | 1 - src/reflect/scala/reflect/io/AbstractFile.scala | 1 - src/reflect/scala/reflect/io/IOStats.scala | 1 - src/reflect/scala/reflect/io/PlainFile.scala | 3 --- .../tools/nsc/interpreter/jline/JLineReader.scala | 3 +-- src/repl/scala/tools/nsc/interpreter/ILoop.scala | 3 +-- src/repl/scala/tools/nsc/interpreter/IMain.scala | 1 - .../nsc/interpreter/PresentationCompilation.scala | 3 +-- .../interpreter/PresentationCompilerCompleter.scala | 2 +- .../scala/tools/nsc/interpreter/ReplGlobal.scala | 1 - src/repl/scala/tools/nsc/interpreter/Scripted.scala | 4 +--- .../scala/tools/nsc/doc/ScaladocAnalyzer.scala | 1 - .../scala/tools/nsc/doc/html/HtmlFactory.scala | 2 +- .../scala/tools/nsc/doc/html/page/Entity.scala | 1 - .../scala/PartialFunctionSerializationTest.scala | 1 - test/junit/scala/collection/IndexedSeqTest.scala | 2 +- .../scala/collection/IterableViewLikeTest.scala | 1 - test/junit/scala/collection/IteratorTest.scala | 1 - test/junit/scala/collection/NewBuilderTest.scala | 2 +- test/junit/scala/collection/SeqViewTest.scala | 1 - .../scala/collection/TraversableOnceTest.scala | 2 -- .../convert/WrapperSerializationTest.scala | 1 - .../scala/collection/mutable/AnyRefMapTest.scala | 1 - .../junit/scala/collection/mutable/VectorTest.scala | 1 - test/junit/scala/concurrent/FutureTest.scala | 2 -- .../scala/concurrent/impl/DefaultPromiseTest.scala | 2 +- test/junit/scala/io/SourceTest.scala | 1 - test/junit/scala/lang/primitives/BoxUnboxTest.scala | 1 - test/junit/scala/math/BigIntTest.scala | 1 - test/junit/scala/math/NumericTest.scala | 1 - test/junit/scala/reflect/ClassTagTest.scala | 1 - test/junit/scala/reflect/QTest.scala | 1 - test/junit/scala/reflect/internal/ScopeTest.scala | 2 -- .../scala/reflect/internal/util/StringOpsTest.scala | 1 - .../reflect/internal/util/WeakHashSetTest.scala | 1 - test/junit/scala/tools/nsc/DeterminismTest.scala | 1 - test/junit/scala/tools/nsc/FileUtils.scala | 2 +- .../tools/nsc/backend/jvm/DefaultMethodTest.scala | 2 -- .../backend/jvm/NestedClassesCollectorTest.scala | 1 - .../jvm/opt/EmptyLabelsAndLineNumbersTest.scala | 1 - .../nsc/backend/jvm/opt/SimplifyJumpsTest.scala | 1 - .../scala/tools/nsc/doc/html/HtmlDocletTest.scala | 1 - .../tools/nsc/reporters/ConsoleReporterTest.scala | 2 +- .../scala/tools/nsc/settings/ScalaVersionTest.scala | 1 - .../tools/nsc/transform/SpecializationTest.scala | 2 +- .../scala/tools/nsc/typechecker/Implicits.scala | 1 - .../junit/scala/tools/nsc/util/StackTraceTest.scala | 2 -- test/junit/scala/tools/testing/AssertUtilTest.scala | 1 - test/junit/scala/util/matching/CharRegexTest.scala | 1 - test/scalacheck/CheckEither.scala | 4 +--- test/scalacheck/array-new.scala | 1 - test/scalacheck/array-old.scala | 1 - .../scala/collection/parallel/IntValues.scala | 4 ---- .../scala/collection/parallel/PairValues.scala | 4 ---- .../collection/parallel/ParallelHashTrieCheck.scala | 4 ---- .../collection/parallel/ParallelIterableCheck.scala | 1 - .../collection/parallel/ParallelMapCheck1.scala | 6 ------ .../collection/parallel/ParallelRangeCheck.scala | 4 ---- .../collection/parallel/ParallelSeqCheck.scala | 2 -- .../collection/parallel/ParallelSetCheck.scala | 6 ------ .../parallel/immutable/ParallelVectorCheck.scala | 5 ----- .../parallel/mutable/ParallelArrayCheck.scala | 2 -- .../parallel/mutable/ParallelCtrieCheck.scala | 4 ---- .../parallel/mutable/ParallelHashMapCheck.scala | 4 ---- .../parallel/mutable/ParallelHashSetCheck.scala | 4 ---- .../quasiquotes/ArbitraryTreesAndNames.scala | 4 ++-- .../reflect/quasiquotes/DeprecationProps.scala | 1 - .../scala/reflect/quasiquotes/ErrorProps.scala | 1 - .../scala/reflect/quasiquotes/ForProps.scala | 2 +- .../scala/reflect/quasiquotes/LiftableProps.scala | 3 +-- .../quasiquotes/PatternConstructionProps.scala | 2 +- .../quasiquotes/PatternDeconstructionProps.scala | 2 +- .../reflect/quasiquotes/QuasiquoteProperties.scala | 4 ++-- .../reflect/quasiquotes/RuntimeErrorProps.scala | 3 +-- .../reflect/quasiquotes/TermConstructionProps.scala | 2 +- .../quasiquotes/TermDeconstructionProps.scala | 2 +- .../reflect/quasiquotes/TypeConstructionProps.scala | 2 +- .../quasiquotes/TypeDeconstructionProps.scala | 2 +- .../reflect/quasiquotes/TypecheckedProps.scala | 3 +-- .../scala/reflect/quasiquotes/UnliftableProps.scala | 1 - .../scala/tools/nsc/scaladoc/HtmlFactoryTest.scala | 2 -- test/scalacheck/t2460.scala | 1 - test/scalacheck/treeset.scala | 1 - 123 files changed, 61 insertions(+), 221 deletions(-) diff --git a/build.sbt b/build.sbt index cd146b85fccc..eb0d96e2c63a 100644 --- a/build.sbt +++ b/build.sbt @@ -32,10 +32,7 @@ * - to modularize the Scala compiler or library further */ -import java.io.{PrintWriter, StringWriter} - import sbt.TestResult -import sbt.testing.TestSelector import scala.build._ import VersionUtil._ @@ -188,6 +185,7 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // END: Copy/pasted from SBT }, fork in run := true, + scalacOptions += "-Ywarn-unused:imports", scalacOptions in Compile in doc ++= Seq( "-doc-footer", "epfl", "-diagrams", @@ -817,6 +815,7 @@ lazy val test = project fork in IntegrationTest := true, // enable this in 2.13, when tests pass //scalacOptions in Compile += "-Yvalidate-pos:parser,typer", + scalacOptions -= "-Ywarn-unused:imports", javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), @@ -1006,13 +1005,13 @@ lazy val root: Project = (project in file(".")) def findRootCauses(i: Incomplete, currentTask: String): Vector[(String, Option[Throwable])] = { val sk = i.node match { case Some(t: Task[_]) => - t.info.attributes.entries.collect { case e if e.key == Keys.taskDefinitionKey => e.value.asInstanceOf[Def.ScopedKey[_]] } - .headOption.map(showScopedKey) + t.info.attributes.entries.collectFirst { case e if e.key == Keys.taskDefinitionKey => e.value.asInstanceOf[Def.ScopedKey[_]] } + .map(showScopedKey) case _ => None } val task = sk.getOrElse(currentTask) - val dup = sk.map(s => !loggedAny.add(s)).getOrElse(false) - if(sk.map(s => !loggedThis.add(s)).getOrElse(false)) Vector.empty + val dup = sk.exists(s => !loggedAny.add(s)) + if(sk.exists(s => !loggedThis.add(s))) Vector.empty else i.directCause match { case Some(e) => Vector((task, if(dup) None else Some(e))) case None => i.causes.toVector.flatMap(ch => findRootCauses(ch, task)) diff --git a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala index 557385744ac2..66589f76f509 100644 --- a/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala +++ b/src/compiler/scala/reflect/macros/runtime/MacroRuntimes.scala @@ -13,11 +13,9 @@ package scala.reflect.macros package runtime -import java.net.URLClassLoader import scala.reflect.internal.Flags._ import scala.reflect.runtime.ReflectionUtils -import scala.reflect.internal.util.AbstractFileClassLoader trait MacroRuntimes extends JavaReflectionRuntimes { self: scala.tools.nsc.typechecker.Analyzer => diff --git a/src/compiler/scala/tools/nsc/CompilerCommand.scala b/src/compiler/scala/tools/nsc/CompilerCommand.scala index 86f9e0aa6c1e..315bc20136d5 100644 --- a/src/compiler/scala/tools/nsc/CompilerCommand.scala +++ b/src/compiler/scala/tools/nsc/CompilerCommand.scala @@ -14,7 +14,6 @@ package scala.tools.nsc import java.nio.file.Files -import io.File /** A class representing command line info for scalac */ class CompilerCommand(arguments: List[String], val settings: Settings) { diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 9bf44d789767..c05cecd49a33 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -14,16 +14,16 @@ package scala package tools package nsc -import java.io.{File, FileNotFoundException, IOException} +import java.io.{FileNotFoundException, IOException} import java.net.URL import java.nio.charset.{Charset, CharsetDecoder, IllegalCharsetNameException, UnsupportedCharsetException} import scala.collection.{immutable, mutable} -import io.{AbstractFile, Path, SourceReader} +import io.{AbstractFile, SourceReader} import reporters.Reporter import util.{ClassPath, returning} import scala.reflect.ClassTag -import scala.reflect.internal.util.{BatchSourceFile, FreshNameCreator, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile, StatisticsStatics} +import scala.reflect.internal.util.{BatchSourceFile, FreshNameCreator, NoSourceFile, ScalaClassLoader, ScriptSourceFile, SourceFile} import scala.reflect.internal.pickling.PickleBuffer import symtab.{Flags, SymbolTable, SymbolTrackers} import symtab.classfile.Pickler @@ -35,7 +35,6 @@ import transform.patmat.PatternMatching import transform._ import backend.{JavaPlatform, ScalaPrimitives} import backend.jvm.{BackendStats, GenBCode} -import scala.concurrent.Future import scala.language.postfixOps import scala.tools.nsc.ast.{TreeGen => AstTreeGen} import scala.tools.nsc.classpath._ diff --git a/src/compiler/scala/tools/nsc/MainBench.scala b/src/compiler/scala/tools/nsc/MainBench.scala index 34914c3734d9..d84a2eee6ec8 100644 --- a/src/compiler/scala/tools/nsc/MainBench.scala +++ b/src/compiler/scala/tools/nsc/MainBench.scala @@ -12,7 +12,6 @@ package scala.tools.nsc -import scala.reflect.internal.util.Statistics /** The main class for NSC, a compiler for the programming * language Scala. diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala index 4e35defdbec9..42c552c24334 100644 --- a/src/compiler/scala/tools/nsc/PickleExtractor.scala +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -12,7 +12,6 @@ package scala.tools.nsc -import java.io.Closeable import java.nio.file.attribute.BasicFileAttributes import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor, _} diff --git a/src/compiler/scala/tools/nsc/PipelineMain.scala b/src/compiler/scala/tools/nsc/PipelineMain.scala index 44f46cbc9366..ee977974470a 100644 --- a/src/compiler/scala/tools/nsc/PipelineMain.scala +++ b/src/compiler/scala/tools/nsc/PipelineMain.scala @@ -12,11 +12,9 @@ package scala.tools.nsc -import java.io.{BufferedOutputStream, File} +import java.io.File import java.lang.Thread.UncaughtExceptionHandler -import java.nio.file.attribute.FileTime import java.nio.file.{Files, Path, Paths} -import java.time.Instant import java.util.concurrent.ConcurrentHashMap import java.util.{Collections, Locale} import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger} @@ -28,9 +26,8 @@ import scala.collection.JavaConverters._ import scala.collection.{immutable, mutable, parallel} import scala.concurrent._ import scala.concurrent.duration.Duration -import scala.reflect.internal.pickling.PickleBuffer import scala.reflect.internal.util.{BatchSourceFile, FakePos, NoPosition, Position} -import scala.reflect.io.{PlainNioFile, RootPath} +import scala.reflect.io.PlainNioFile import scala.tools.nsc.io.AbstractFile import scala.tools.nsc.reporters.{ConsoleReporter, Reporter} import scala.tools.nsc.util.ClassPath diff --git a/src/compiler/scala/tools/nsc/ScriptRunner.scala b/src/compiler/scala/tools/nsc/ScriptRunner.scala index b6c2fcd7d959..79eaba84bb34 100644 --- a/src/compiler/scala/tools/nsc/ScriptRunner.scala +++ b/src/compiler/scala/tools/nsc/ScriptRunner.scala @@ -13,7 +13,7 @@ package scala package tools.nsc -import io.{AbstractFile, Directory, File, Path} +import io.{Directory, File, Path} import java.io.IOException import scala.tools.nsc.classpath.DirectoryClassPath import scala.tools.nsc.reporters.{Reporter,ConsoleReporter} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala index df9aa82a6792..6a6dfc17d327 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeHelpers.scala @@ -15,12 +15,9 @@ package tools.nsc package backend.jvm import scala.tools.asm -import scala.tools.nsc.io.AbstractFile import GenBCode._ import BackendReporting._ -import scala.reflect.internal.Flags -import scala.tools.asm.{ByteVector, ClassWriter} -import scala.reflect.internal.Flags +import scala.tools.asm.ClassWriter import scala.tools.nsc.reporters.NoReporter /* @@ -36,7 +33,6 @@ abstract class BCodeHelpers extends BCodeIdiomatic { import bTypes._ import coreBTypes._ import genBCode.postProcessor.backendUtils - import BTypes.{InternalName, InlineInfo, MethodInlineInfo} /** * True for classes generated by the Scala compiler that are considered top-level in terms of diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala index 1643d6ac4b10..6cf2a1a7536d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BCodeSkelBuilder.scala @@ -19,7 +19,6 @@ import scala.tools.nsc.symtab._ import scala.tools.asm import GenBCode._ import BackendReporting._ -import scala.tools.nsc.backend.jvm.BCodeHelpers.InvokeStyle /* * diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 629316fed6b0..5419937e020b 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -22,8 +22,8 @@ import java.util import java.util.concurrent.ConcurrentHashMap import java.util.zip.{CRC32, Deflater, ZipEntry, ZipOutputStream} -import scala.reflect.internal.util.{NoPosition, Statistics} -import scala.reflect.io.{PlainNioFile, VirtualFile} +import scala.reflect.internal.util.NoPosition +import scala.reflect.io.PlainNioFile import scala.tools.nsc.Global import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.io.AbstractFile diff --git a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala index bc090f145b8d..62915c6557cc 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/CodeGen.scala @@ -14,7 +14,6 @@ package scala.tools.nsc package backend.jvm import scala.collection.mutable.ListBuffer -import scala.reflect.internal.util.Statistics import scala.tools.asm.tree.ClassNode abstract class CodeGen[G <: Global](val global: G) extends PerRunInit { diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala index ce02b31a1a58..aeda19019cf7 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GeneratedClassHandler.scala @@ -14,7 +14,6 @@ package scala.tools.nsc package backend.jvm import java.nio.channels.ClosedByInterruptException -import java.nio.file.Path import java.util.concurrent.ThreadPoolExecutor.CallerRunsPolicy import java.util.concurrent._ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala index 52b39e40d204..d70ae20bba61 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/PostProcessor.scala @@ -13,8 +13,6 @@ package scala.tools.nsc package backend.jvm -import java.nio.channels.ClosedByInterruptException -import java.nio.channels.ClosedByInterruptException import java.util.concurrent.ConcurrentHashMap import scala.reflect.internal.util.{NoPosition, Position, StringContextStripMarginOps} diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index 40543b2fce4c..c60b829ca5e6 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -25,7 +25,7 @@ import scala.tools.asm import scala.tools.asm.Opcodes._ import scala.tools.asm.tree._ import scala.tools.asm.tree.analysis._ -import scala.tools.asm.{Handle, Label, Type} +import scala.tools.asm.{Handle, Type} import scala.tools.nsc.backend.jvm.BTypes._ import scala.tools.nsc.backend.jvm.GenBCode._ import scala.tools.nsc.backend.jvm.analysis.BackendUtils._ diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala index 2a97e5b89cee..b99c8ff6d05a 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/ByteCodeRepository.scala @@ -14,7 +14,6 @@ package scala.tools.nsc package backend.jvm package opt -import java.util.concurrent.atomic.AtomicLong import scala.collection.JavaConverters._ import scala.collection.{concurrent, mutable} diff --git a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala index 68fb3000b8c0..f82e9a72b08e 100644 --- a/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/AggregateClassPath.scala @@ -13,7 +13,6 @@ package scala.tools.nsc.classpath import java.net.URL -import scala.annotation.tailrec import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.FatalError import scala.reflect.io.AbstractFile diff --git a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala index 17e71f047dd6..6ecae9a7ca18 100644 --- a/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/DirectoryClassPath.scala @@ -13,17 +13,14 @@ package scala.tools.nsc.classpath import java.io.{Closeable, File} -import java.net.{URI, URL} -import java.nio.file.{FileSystems, Files, SimpleFileVisitor} -import java.util.function.IntFunction +import java.net.URL +import java.nio.file.{FileSystems, Files} import java.util -import java.util.Comparator import scala.reflect.io.{AbstractFile, PlainFile, PlainNioFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} import FileUtils._ import scala.collection.JavaConverters._ -import scala.collection.immutable import scala.reflect.internal.JDK9Reflectors import scala.tools.nsc.CloseableRegistry import scala.tools.nsc.classpath.PackageNameUtils.{packageContains, separatePkgAndClassNames} diff --git a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala index 04ddc61b2107..af13a720d795 100644 --- a/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala +++ b/src/compiler/scala/tools/nsc/classpath/VirtualDirectoryClassPath.scala @@ -13,7 +13,7 @@ package scala.tools.nsc.classpath import scala.tools.nsc.util.ClassRepresentation -import scala.reflect.io.{AbstractFile, Path, PlainFile, VirtualDirectory} +import scala.reflect.io.{AbstractFile, VirtualDirectory} import FileUtils._ import java.net.URL diff --git a/src/compiler/scala/tools/nsc/io/SourceReader.scala b/src/compiler/scala/tools/nsc/io/SourceReader.scala index 88d8091d2e35..13b5e435bf6a 100644 --- a/src/compiler/scala/tools/nsc/io/SourceReader.scala +++ b/src/compiler/scala/tools/nsc/io/SourceReader.scala @@ -15,7 +15,7 @@ package io import java.io.{FileInputStream, IOException} import java.nio.{ByteBuffer, CharBuffer} -import java.nio.channels.{AsynchronousCloseException, Channels, ClosedByInterruptException, ReadableByteChannel} +import java.nio.channels.{Channels, ClosedByInterruptException, ReadableByteChannel} import java.nio.charset.{CharsetDecoder, CoderResult} import scala.tools.nsc.reporters._ diff --git a/src/compiler/scala/tools/nsc/plugins/Plugin.scala b/src/compiler/scala/tools/nsc/plugins/Plugin.scala index a38cbf8a504e..83da2b1d992b 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugin.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugin.scala @@ -13,10 +13,8 @@ package scala.tools.nsc package plugins -import scala.tools.nsc.io.Jar import scala.reflect.internal.util.ScalaClassLoader -import scala.reflect.io.{Directory, File, Path} -import java.io.InputStream +import scala.reflect.io.{File, Path} import scala.collection.mutable import scala.tools.nsc.classpath.FileBasedCache diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 8d47bfa329dc..8b84c93aa163 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -13,14 +13,10 @@ package scala.tools.nsc package plugins -import java.net.URL import scala.reflect.internal.util.ScalaClassLoader import scala.reflect.io.Path -import scala.tools.nsc -import scala.tools.nsc.io.Jar import scala.tools.nsc.plugins.Plugin.pluginClassLoadersCache -import scala.tools.nsc.typechecker.Macros import scala.tools.nsc.util.ClassPath import scala.tools.util.PathResolver.Defaults diff --git a/src/compiler/scala/tools/nsc/profile/Profiler.scala b/src/compiler/scala/tools/nsc/profile/Profiler.scala index 68cfab2f16e3..a3a9360d076c 100644 --- a/src/compiler/scala/tools/nsc/profile/Profiler.scala +++ b/src/compiler/scala/tools/nsc/profile/Profiler.scala @@ -14,7 +14,7 @@ package scala.tools.nsc.profile import java.io.{FileWriter, PrintWriter} import java.lang.management.ManagementFactory -import java.nio.file.{Files, Paths} +import java.nio.file.Paths import java.util.ServiceLoader import java.util.concurrent.TimeUnit import java.util.concurrent.atomic.AtomicInteger @@ -22,10 +22,9 @@ import java.util.concurrent.atomic.AtomicInteger import javax.management.openmbean.CompositeData import javax.management.{Notification, NotificationEmitter, NotificationListener} -import scala.collection.mutable import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.util.ChromeTrace -import scala.reflect.io.{AbstractFile, File} +import scala.reflect.io.AbstractFile import scala.tools.nsc.{Global, Phase, Settings} object Profiler { diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 1ef4b8c3120c..b0bb402d394a 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -412,7 +412,6 @@ trait ScalaSettings extends AbsScalaSettings val YoptLogInline = StringSetting("-Yopt-log-inline", "package/Class.method", "Print a summary of inliner activity; `_` to print all, prefix match to select.", "") - import scala.reflect.internal.util.Statistics val Ystatistics = PhasesSetting("-Ystatistics", "Print compiler statistics for specific phases", "parser,typer,patmat,erasure,cleanup,jvm") override def YstatisticsEnabled = Ystatistics.value.nonEmpty diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 847b1837bbe7..1acf781be3dd 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -18,7 +18,6 @@ import java.io.IOException import scala.reflect.internal.MissingRequirementError import scala.reflect.io.{AbstractFile, NoAbstractFile} import scala.tools.nsc.util.{ClassPath, ClassRepresentation} -import scala.reflect.internal.TypesStats import scala.reflect.internal.util.{ReusableInstance, StatisticsStatics} /** This class ... diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala index 17d70998f3d9..6278db055798 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/AbstractFileReader.scala @@ -17,7 +17,6 @@ package classfile import java.io.{ByteArrayInputStream, DataInputStream} import java.lang.Double.longBitsToDouble import java.lang.Float.intBitsToFloat -import java.util import scala.tools.nsc.io.AbstractFile diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index f637f28d4ecf..8dca47caf779 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -15,9 +15,8 @@ package tools.nsc package symtab package classfile -import java.io.{ByteArrayInputStream, DataInputStream, File, IOException} +import java.io.{File, IOException} import java.lang.Integer.toHexString -import java.nio.ByteBuffer import scala.collection.{immutable, mutable} import scala.collection.mutable.{ArrayBuffer, ListBuffer} @@ -25,8 +24,7 @@ import scala.annotation.switch import scala.reflect.internal.JavaAccFlags import scala.reflect.internal.pickling.ByteCodecs import scala.reflect.internal.util.ReusableInstance -import scala.reflect.io.{NoAbstractFile, VirtualFile} -import scala.reflect.internal.util.Collections._ +import scala.reflect.io.NoAbstractFile import scala.tools.nsc.util.ClassPath import scala.tools.nsc.io.AbstractFile import scala.util.control.NonFatal diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index b7fb20f590ca..b00441981f06 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -16,7 +16,6 @@ package classfile import java.lang.Float.floatToIntBits import java.lang.Double.doubleToLongBits -import java.nio.file.Paths import scala.io.Codec import scala.reflect.internal.pickling.{PickleBuffer, PickleFormat} @@ -24,7 +23,7 @@ import scala.reflect.internal.util.shortClassOfInstance import scala.collection.mutable import PickleFormat._ import Flags._ -import scala.reflect.io.{AbstractFile, NoAbstractFile, PlainFile, PlainNioFile} +import scala.reflect.io.PlainFile /** * Serialize a top-level module and/or class. diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala index 8bbbc4a3cce6..07a10fc44a30 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ReusableDataReader.scala @@ -12,8 +12,7 @@ package scala.tools.nsc.symtab.classfile -import java.io.{ByteArrayInputStream, DataInputStream, InputStream} -import java.nio.channels.Channels +import java.io.{DataInputStream, InputStream} import java.nio.{BufferUnderflowException, ByteBuffer} final class ReusableDataReader() extends DataReader { diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 6cb15fdf2e28..dee93f362444 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -16,7 +16,6 @@ package transform import scala.tools.nsc.symtab.Flags import scala.collection.{immutable, mutable} -import scala.annotation.tailrec /** Specialize code on types. * diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 3a0dd470244d..c598cea92fda 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -14,7 +14,7 @@ package scala.tools.nsc package typechecker import scala.collection.mutable.ListBuffer -import scala.collection.{immutable, mutable} +import scala.collection.immutable import scala.util.control.ControlThrowable import symtab.Flags._ import scala.reflect.internal.Depth diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 898fce90cef3..a58e6073b654 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -15,8 +15,6 @@ package typechecker import scala.reflect.NameTransformer import symtab.Flags._ -import scala.reflect.internal.util.StringOps.ojoin -import scala.reflect.internal.util.ListOfNil /** Logic related to method synthesis which involves cooperation between * Namer and Typer. diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 0218a6717401..784c98ddcbcc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -134,7 +134,6 @@ trait NamesDefaults { self: Analyzer => import typer._ import typer.infer._ val context = typer.context - import context.unit /* * Transform a function into a block, and passing context.namedApplyBlockInfo to diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 4bdf7b2b118d..4f4610d99811 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -20,13 +20,12 @@ package scala package tools.nsc package typechecker -import scala.collection.{immutable, mutable} +import scala.collection.mutable import scala.reflect.internal.util.{FreshNameCreator, ListOfNil, Statistics, StatisticsStatics} import scala.reflect.internal.TypesStats -import mutable.{ArrayBuffer, ListBuffer} +import mutable.ListBuffer import symtab.Flags._ import Mode._ -import scala.reflect.macros.whitebox // Suggestion check whether we can do without priming scopes with symbols of outer scopes, // like the IDE does. diff --git a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala index 429657465044..fa573ca00a36 100644 --- a/src/compiler/scala/tools/reflect/ToolBoxFactory.scala +++ b/src/compiler/scala/tools/reflect/ToolBoxFactory.scala @@ -18,7 +18,7 @@ import scala.tools.cmd.CommandLineParser import scala.tools.nsc.reporters._ import scala.tools.nsc.CompilerCommand import scala.tools.nsc.io.{AbstractFile, VirtualDirectory} -import scala.reflect.internal.util.{AbstractFileClassLoader, FreshNameCreator, NoSourceFile} +import scala.reflect.internal.util.{AbstractFileClassLoader, NoSourceFile} import scala.reflect.internal.Flags._ import java.lang.{Class => jClass} import scala.compat.Platform.EOL diff --git a/src/library/scala/collection/mutable/WrappedArray.scala b/src/library/scala/collection/mutable/WrappedArray.scala index 26f955f0a855..8640e1212df1 100644 --- a/src/library/scala/collection/mutable/WrappedArray.scala +++ b/src/library/scala/collection/mutable/WrappedArray.scala @@ -15,7 +15,6 @@ package collection package mutable import scala.reflect.ClassTag -import scala.runtime.BoxedUnit import scala.collection.generic._ import scala.collection.parallel.mutable.ParArray import scala.util.hashing.MurmurHash3 diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 4473f122267b..5ec1eac4d9e6 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -12,8 +12,8 @@ package scala.concurrent.impl -import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, ForkJoinTask, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit } -import java.util.concurrent.atomic.{AtomicInteger, AtomicReference} +import java.util.concurrent.{ ForkJoinPool, ForkJoinWorkerThread, Callable, Executor, ExecutorService, ThreadFactory, TimeUnit } +import java.util.concurrent.atomic.AtomicInteger import java.util.Collection import scala.concurrent.{ BlockContext, ExecutionContext, CanAwait, ExecutionContextExecutor, ExecutionContextExecutorService } import scala.annotation.tailrec diff --git a/src/manual/scala/tools/docutil/EmitHtml.scala b/src/manual/scala/tools/docutil/EmitHtml.scala index 731123c4b134..c703f5b7a9a2 100644 --- a/src/manual/scala/tools/docutil/EmitHtml.scala +++ b/src/manual/scala/tools/docutil/EmitHtml.scala @@ -7,7 +7,6 @@ package scala.tools.docutil object EmitHtml { - import scala.xml.{Node, NodeBuffer, NodeSeq, XML} import ManPage._ val out = Console diff --git a/src/reflect/scala/reflect/internal/SymbolTable.scala b/src/reflect/scala/reflect/internal/SymbolTable.scala index 1fcc0f575137..029c8e404058 100644 --- a/src/reflect/scala/reflect/internal/SymbolTable.scala +++ b/src/reflect/scala/reflect/internal/SymbolTable.scala @@ -21,7 +21,6 @@ import scala.collection.mutable import util._ import java.util.concurrent.TimeUnit -import scala.collection.mutable.ArrayBuffer import scala.reflect.internal.settings.MutableSettings import scala.reflect.internal.{TreeGen => InternalTreeGen} import scala.reflect.io.AbstractFile diff --git a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala index 69da5d5982c9..ac0207c521a6 100644 --- a/src/reflect/scala/reflect/internal/util/ChromeTrace.scala +++ b/src/reflect/scala/reflect/internal/util/ChromeTrace.scala @@ -14,8 +14,7 @@ package scala.reflect.internal.util import java.io.Closeable import java.lang.management.ManagementFactory -import java.nio.file.{Files, Path} -import java.util +import java.nio.file.Path import java.util.concurrent.TimeUnit import scala.collection.mutable diff --git a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala index 27891f58124e..0cd249fb1982 100644 --- a/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala +++ b/src/reflect/scala/reflect/internal/util/OwnerOnlyChmod.scala @@ -15,7 +15,6 @@ package scala.reflect.internal.util import java.nio.ByteBuffer import java.nio.file.StandardOpenOption.{CREATE, TRUNCATE_EXISTING, WRITE} import java.nio.file.attribute.PosixFilePermission.{OWNER_EXECUTE, OWNER_READ, OWNER_WRITE} -import java.nio.file.attribute.PosixFilePermissions.asFileAttribute import java.nio.file.attribute._ import java.nio.file.{Files, Path} import java.util.EnumSet diff --git a/src/reflect/scala/reflect/io/AbstractFile.scala b/src/reflect/scala/reflect/io/AbstractFile.scala index 996725a65a9a..714f4f4b5274 100644 --- a/src/reflect/scala/reflect/io/AbstractFile.scala +++ b/src/reflect/scala/reflect/io/AbstractFile.scala @@ -17,7 +17,6 @@ package io import java.io.{ IOException, InputStream, OutputStream, BufferedOutputStream, ByteArrayOutputStream } import java.io.{ File => JFile } import java.net.URL -import java.nio.ByteBuffer /** * An abstraction over files for use in the reflection/compiler libraries. diff --git a/src/reflect/scala/reflect/io/IOStats.scala b/src/reflect/scala/reflect/io/IOStats.scala index fd3b6f6f7917..e7ba5e04075d 100644 --- a/src/reflect/scala/reflect/io/IOStats.scala +++ b/src/reflect/scala/reflect/io/IOStats.scala @@ -13,7 +13,6 @@ package scala package reflect.io -import scala.reflect.internal.util.Statistics // Due to limitations in the Statistics machinery, these are only // reported if this patch is applied. diff --git a/src/reflect/scala/reflect/io/PlainFile.scala b/src/reflect/scala/reflect/io/PlainFile.scala index cb1f73b41644..ad7d1c8f5953 100644 --- a/src/reflect/scala/reflect/io/PlainFile.scala +++ b/src/reflect/scala/reflect/io/PlainFile.scala @@ -14,9 +14,6 @@ package scala package reflect package io -import java.nio.ByteBuffer -import java.nio.file.StandardOpenOption -import java.util /** ''Note: This library is considered experimental and should not be used unless you know what you are doing.'' */ class PlainDirectory(givenPath: Directory) extends PlainFile(givenPath) { diff --git a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala index 68c21c69e258..b8a9f2e5bf51 100644 --- a/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala +++ b/src/repl-jline/scala/tools/nsc/interpreter/jline/JLineReader.scala @@ -15,8 +15,7 @@ package scala.tools.nsc.interpreter.jline import java.util.{Collection => JCollection, List => JList} import _root_.jline.{console => jconsole} -import jline.console.ConsoleReader -import jline.console.completer.{CandidateListCompletionHandler, Completer, CompletionHandler} +import jline.console.completer.{CandidateListCompletionHandler, Completer} import jconsole.history.{History => JHistory} import scala.tools.nsc.interpreter diff --git a/src/repl/scala/tools/nsc/interpreter/ILoop.scala b/src/repl/scala/tools/nsc/interpreter/ILoop.scala index a32e2aa02ee7..ba4f67b735be 100644 --- a/src/repl/scala/tools/nsc/interpreter/ILoop.scala +++ b/src/repl/scala/tools/nsc/interpreter/ILoop.scala @@ -28,8 +28,7 @@ import scala.reflect.internal.util.{BatchSourceFile, ScalaClassLoader, NoPositio import scala.reflect.io.{Directory, File, Path} import scala.tools.util._ import io.AbstractFile -import scala.concurrent.{ExecutionContext, Await, Future} -import ExecutionContext.Implicits._ +import scala.concurrent.{Await, Future} import java.io.BufferedReader import scala.util.{Try, Success, Failure} diff --git a/src/repl/scala/tools/nsc/interpreter/IMain.scala b/src/repl/scala/tools/nsc/interpreter/IMain.scala index 73cbc828eea4..4883fbcdb74e 100644 --- a/src/repl/scala/tools/nsc/interpreter/IMain.scala +++ b/src/repl/scala/tools/nsc/interpreter/IMain.scala @@ -16,7 +16,6 @@ package interpreter import PartialFunction.cond import scala.language.implicitConversions -import scala.beans.BeanProperty import scala.collection.mutable import scala.concurrent.{ExecutionContext, Future} import scala.reflect.runtime.{universe => ru} diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala index e03f4cdc3c2d..7d3ceca191ff 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilation.scala @@ -13,10 +13,9 @@ package scala.tools.nsc.interpreter import scala.reflect.internal.util.{Position, RangePosition} -import scala.reflect.io.AbstractFile import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.util.ClassPath -import scala.tools.nsc.{CloseableRegistry, Settings, interactive} +import scala.tools.nsc.{Settings, interactive} import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.classpath._ diff --git a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala index 9e469041d54d..83a982ae29ca 100644 --- a/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala +++ b/src/repl/scala/tools/nsc/interpreter/PresentationCompilerCompleter.scala @@ -12,7 +12,7 @@ package scala.tools.nsc.interpreter -import scala.reflect.internal.util.{RangePosition, StringOps} +import scala.reflect.internal.util.StringOps import scala.tools.nsc.interpreter.Completion.Candidates import scala.util.control.NonFatal diff --git a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala index f3455a2b094a..c93bde3d2280 100644 --- a/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala +++ b/src/repl/scala/tools/nsc/interpreter/ReplGlobal.scala @@ -13,7 +13,6 @@ package scala.tools.nsc package interpreter -import scala.tools.nsc.backend.JavaPlatform import scala.tools.nsc.classpath.{AggregateClassPath, ClassPathFactory} import scala.tools.nsc.util.ClassPath import typechecker.Analyzer diff --git a/src/repl/scala/tools/nsc/interpreter/Scripted.scala b/src/repl/scala/tools/nsc/interpreter/Scripted.scala index c3ba908d5a37..10e3e3c2c831 100644 --- a/src/repl/scala/tools/nsc/interpreter/Scripted.scala +++ b/src/repl/scala/tools/nsc/interpreter/Scripted.scala @@ -14,14 +14,12 @@ package scala package tools.nsc package interpreter -import scala.language.dynamics import scala.beans.BeanProperty import scala.collection.JavaConverters._ -import scala.reflect.classTag import scala.reflect.internal.util.Position import scala.tools.nsc.util.stringFromReader -import javax.script._, ScriptContext.{ ENGINE_SCOPE, GLOBAL_SCOPE } +import javax.script._ import java.io.{ Closeable, Reader } /* A REPL adaptor for the javax.script API. */ diff --git a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala index 35dcbb7af93d..671339a5a0c9 100644 --- a/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala +++ b/src/scaladoc/scala/tools/nsc/doc/ScaladocAnalyzer.scala @@ -15,7 +15,6 @@ package doc import scala.tools.nsc.ast.parser.{ SyntaxAnalyzer, BracePatch } import typechecker.Analyzer -import scala.reflect.internal.Chars._ import scala.reflect.internal.util.{ BatchSourceFile, Position } import scala.tools.nsc.doc.base.{ CommentFactoryBase, MemberLookupBase, LinkTo } diff --git a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala index 71c4123b9f80..6fab81aaf733 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -25,7 +25,7 @@ import scala.reflect.internal.Reporter * @author David Bernard * @author Gilles Dubochet */ class HtmlFactory(val universe: doc.Universe, val reporter: Reporter) { - import page.{IndexScript, EntityPage} + import page.IndexScript /** The character encoding to be used for generated Scaladoc sites. * This value is currently always UTF-8. */ diff --git a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala index 9c701e960508..240c5587b6e8 100644 --- a/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala +++ b/src/scaladoc/scala/tools/nsc/doc/html/page/Entity.scala @@ -30,7 +30,6 @@ import model.diagram._ import diagram._ trait EntityPage extends HtmlPage { - import ScalaDoc.SummaryReporter def universe: doc.Universe def generator: DiagramGenerator diff --git a/test/junit/scala/PartialFunctionSerializationTest.scala b/test/junit/scala/PartialFunctionSerializationTest.scala index 2019e3a4259c..6618012ad5de 100644 --- a/test/junit/scala/PartialFunctionSerializationTest.scala +++ b/test/junit/scala/PartialFunctionSerializationTest.scala @@ -1,7 +1,6 @@ package scala import org.junit.Test -import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/collection/IndexedSeqTest.scala b/test/junit/scala/collection/IndexedSeqTest.scala index a33849e60b0b..4c89f7274ec9 100644 --- a/test/junit/scala/collection/IndexedSeqTest.scala +++ b/test/junit/scala/collection/IndexedSeqTest.scala @@ -275,7 +275,7 @@ package IndexedTestImpl { import java.lang.{Double => jlDouble} import java.lang.{Character => jlChar} - import scala.collection.immutable.{StringLike, StringOps, WrappedString} + import scala.collection.immutable.{StringOps, WrappedString} import scala.collection.mutable import scala.runtime.BoxedUnit trait DataProvider[E] { diff --git a/test/junit/scala/collection/IterableViewLikeTest.scala b/test/junit/scala/collection/IterableViewLikeTest.scala index 435a43c215ee..a5a02bad7386 100644 --- a/test/junit/scala/collection/IterableViewLikeTest.scala +++ b/test/junit/scala/collection/IterableViewLikeTest.scala @@ -4,7 +4,6 @@ import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import language.postfixOps @RunWith(classOf[JUnit4]) class IterableViewLikeTest { diff --git a/test/junit/scala/collection/IteratorTest.scala b/test/junit/scala/collection/IteratorTest.scala index 9099b6b85d10..d674f1b18790 100644 --- a/test/junit/scala/collection/IteratorTest.scala +++ b/test/junit/scala/collection/IteratorTest.scala @@ -358,7 +358,6 @@ class IteratorTest { assertTrue(hi.hasNext) } @Test def `flatMap is memory efficient in previous element`(): Unit = { - import java.lang.ref._ // Array.iterator holds onto array reference; by contrast, iterating over List walks tail. // Avoid reaching seq1 through test class. var seq1 = Array("first", "second") // captured, need to set to null diff --git a/test/junit/scala/collection/NewBuilderTest.scala b/test/junit/scala/collection/NewBuilderTest.scala index fdc6af113df9..5033d010460c 100644 --- a/test/junit/scala/collection/NewBuilderTest.scala +++ b/test/junit/scala/collection/NewBuilderTest.scala @@ -1,7 +1,7 @@ package scala.collection import scala.{collection => sc} -import scala.collection.{mutable => scm, immutable => sci, parallel => scp, concurrent => scc} +import scala.collection.{mutable => scm, immutable => sci, parallel => scp} import scala.collection.parallel.{mutable => scpm, immutable => scpi} import org.junit.runner.RunWith diff --git a/test/junit/scala/collection/SeqViewTest.scala b/test/junit/scala/collection/SeqViewTest.scala index 24474fc4b9a2..f9e9e69706c3 100644 --- a/test/junit/scala/collection/SeqViewTest.scala +++ b/test/junit/scala/collection/SeqViewTest.scala @@ -2,7 +2,6 @@ package scala.collection import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import org.junit.Assert._ import org.junit.Test @RunWith(classOf[JUnit4]) diff --git a/test/junit/scala/collection/TraversableOnceTest.scala b/test/junit/scala/collection/TraversableOnceTest.scala index 1d835024d6a8..4ad8bd76cd89 100644 --- a/test/junit/scala/collection/TraversableOnceTest.scala +++ b/test/junit/scala/collection/TraversableOnceTest.scala @@ -1,10 +1,8 @@ package scala.collection -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.util.Random @RunWith(classOf[JUnit4]) /* Test for scala/bug#7614 */ diff --git a/test/junit/scala/collection/convert/WrapperSerializationTest.scala b/test/junit/scala/collection/convert/WrapperSerializationTest.scala index d398be806a6d..35d48ed328b9 100644 --- a/test/junit/scala/collection/convert/WrapperSerializationTest.scala +++ b/test/junit/scala/collection/convert/WrapperSerializationTest.scala @@ -1,6 +1,5 @@ package scala.collection.convert -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/collection/mutable/AnyRefMapTest.scala b/test/junit/scala/collection/mutable/AnyRefMapTest.scala index 6c12296950c3..1f7aa74da458 100644 --- a/test/junit/scala/collection/mutable/AnyRefMapTest.scala +++ b/test/junit/scala/collection/mutable/AnyRefMapTest.scala @@ -5,7 +5,6 @@ import org.junit.runners.JUnit4 import org.junit.Test import org.junit.Assert.assertTrue -import scala.collection.mutable.AnyRefMap /* Test for scala/bug#10540 */ @RunWith(classOf[JUnit4]) diff --git a/test/junit/scala/collection/mutable/VectorTest.scala b/test/junit/scala/collection/mutable/VectorTest.scala index 34dc775042d1..a844d13b84b7 100644 --- a/test/junit/scala/collection/mutable/VectorTest.scala +++ b/test/junit/scala/collection/mutable/VectorTest.scala @@ -3,7 +3,6 @@ package scala.collection.mutable import org.junit.runner.RunWith import org.junit.runners.JUnit4 import org.junit.Test -import scala.collection.mutable @RunWith(classOf[JUnit4]) /* Test for scala/bug#8014 and ++ in general */ diff --git a/test/junit/scala/concurrent/FutureTest.scala b/test/junit/scala/concurrent/FutureTest.scala index cd687479e3ee..9e5adcd2f29a 100644 --- a/test/junit/scala/concurrent/FutureTest.scala +++ b/test/junit/scala/concurrent/FutureTest.scala @@ -1,7 +1,6 @@ package scala.concurrent -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 @@ -9,7 +8,6 @@ import org.junit.runners.JUnit4 import scala.tools.testing.AssertUtil._ import scala.util.Try -import java.util.concurrent.CountDownLatch @RunWith(classOf[JUnit4]) class FutureTest { diff --git a/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala b/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala index f3a75e24d00e..70e935685346 100644 --- a/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala +++ b/test/junit/scala/concurrent/impl/DefaultPromiseTest.scala @@ -3,7 +3,7 @@ package scala.concurrent.impl import java.util.concurrent.ConcurrentLinkedQueue import java.util.concurrent.CountDownLatch import org.junit.Assert._ -import org.junit.{ After, Before, Test } +import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.annotation.tailrec diff --git a/test/junit/scala/io/SourceTest.scala b/test/junit/scala/io/SourceTest.scala index 3fe48940a0d9..a5914d8428ab 100644 --- a/test/junit/scala/io/SourceTest.scala +++ b/test/junit/scala/io/SourceTest.scala @@ -6,7 +6,6 @@ import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil._ import java.io.{ Console => _, _ } diff --git a/test/junit/scala/lang/primitives/BoxUnboxTest.scala b/test/junit/scala/lang/primitives/BoxUnboxTest.scala index 07795ef07a54..a35d4ec07fe5 100644 --- a/test/junit/scala/lang/primitives/BoxUnboxTest.scala +++ b/test/junit/scala/lang/primitives/BoxUnboxTest.scala @@ -13,7 +13,6 @@ object BoxUnboxTest { @RunWith(classOf[JUnit4]) class BoxUnboxTest extends RunTesting { - import runner._ @Test def boxUnboxInt(): Unit = { diff --git a/test/junit/scala/math/BigIntTest.scala b/test/junit/scala/math/BigIntTest.scala index 5a5694a77567..98860c671d24 100644 --- a/test/junit/scala/math/BigIntTest.scala +++ b/test/junit/scala/math/BigIntTest.scala @@ -1,6 +1,5 @@ package scala.math -import java.math.{BigInteger => BI, MathContext => MC} import org.junit.Test import org.junit.runner.RunWith diff --git a/test/junit/scala/math/NumericTest.scala b/test/junit/scala/math/NumericTest.scala index bae68bb04ce8..e525aa761fe2 100644 --- a/test/junit/scala/math/NumericTest.scala +++ b/test/junit/scala/math/NumericTest.scala @@ -5,7 +5,6 @@ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.math.Numeric.FloatAsIfIntegral @RunWith(classOf[JUnit4]) diff --git a/test/junit/scala/reflect/ClassTagTest.scala b/test/junit/scala/reflect/ClassTagTest.scala index 49022dccda01..8305eae45ddb 100644 --- a/test/junit/scala/reflect/ClassTagTest.scala +++ b/test/junit/scala/reflect/ClassTagTest.scala @@ -5,7 +5,6 @@ import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil._ class Misc diff --git a/test/junit/scala/reflect/QTest.scala b/test/junit/scala/reflect/QTest.scala index 24c35dc4010f..d3e7a8ca24dc 100644 --- a/test/junit/scala/reflect/QTest.scala +++ b/test/junit/scala/reflect/QTest.scala @@ -6,7 +6,6 @@ import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil._ @RunWith(classOf[JUnit4]) class QTest { diff --git a/test/junit/scala/reflect/internal/ScopeTest.scala b/test/junit/scala/reflect/internal/ScopeTest.scala index 1ab24facac98..53d5434dba47 100644 --- a/test/junit/scala/reflect/internal/ScopeTest.scala +++ b/test/junit/scala/reflect/internal/ScopeTest.scala @@ -1,13 +1,11 @@ package scala.reflect.internal -import scala.tools.nsc.symtab import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil.assertThrows import scala.tools.nsc.symtab.SymbolTableForUnitTesting @RunWith(classOf[JUnit4]) diff --git a/test/junit/scala/reflect/internal/util/StringOpsTest.scala b/test/junit/scala/reflect/internal/util/StringOpsTest.scala index 13d3a6435e8d..9ab4f2691523 100644 --- a/test/junit/scala/reflect/internal/util/StringOpsTest.scala +++ b/test/junit/scala/reflect/internal/util/StringOpsTest.scala @@ -1,6 +1,5 @@ package scala.reflect.internal.util -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala b/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala index 7e3b35c7d66b..fab4edd00261 100644 --- a/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala +++ b/test/junit/scala/reflect/internal/util/WeakHashSetTest.scala @@ -1,6 +1,5 @@ package scala.reflect.internal.util -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/tools/nsc/DeterminismTest.scala b/test/junit/scala/tools/nsc/DeterminismTest.scala index deadd7fa218d..97f99402e2bf 100644 --- a/test/junit/scala/tools/nsc/DeterminismTest.scala +++ b/test/junit/scala/tools/nsc/DeterminismTest.scala @@ -9,7 +9,6 @@ import javax.tools.ToolProvider import org.junit.Test import scala.collection.JavaConverters.seqAsJavaListConverter -import scala.language.implicitConversions import scala.reflect.internal.util.{BatchSourceFile, SourceFile} import scala.tools.nsc.reporters.StoreReporter import FileUtils._ diff --git a/test/junit/scala/tools/nsc/FileUtils.scala b/test/junit/scala/tools/nsc/FileUtils.scala index a3443febc036..b3d426a310f4 100644 --- a/test/junit/scala/tools/nsc/FileUtils.scala +++ b/test/junit/scala/tools/nsc/FileUtils.scala @@ -5,7 +5,7 @@ import java.nio.file.{FileVisitResult, Files, Path, SimpleFileVisitor} import difflib.DiffUtils -import scala.collection.JavaConverters.{asJavaIteratorConverter, asScalaBufferConverter, asScalaIteratorConverter} +import scala.collection.JavaConverters.{asScalaBufferConverter, asScalaIteratorConverter} import scala.reflect.io.PlainNioFile import scala.tools.nsc.backend.jvm.AsmUtils diff --git a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala index 841e850b491b..6a6c3262969e 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/DefaultMethodTest.scala @@ -3,13 +3,11 @@ package scala.tools.nsc.backend.jvm import org.junit.Assert._ import org.junit.Test -import scala.collection.JavaConverters import scala.collection.JavaConverters._ import scala.reflect.internal.Flags import scala.tools.asm.Opcodes import scala.tools.asm.tree.ClassNode import scala.tools.testing.BytecodeTesting -import scala.tools.testing.BytecodeTesting._ class DefaultMethodTest extends BytecodeTesting { import compiler._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/NestedClassesCollectorTest.scala b/test/junit/scala/tools/nsc/backend/jvm/NestedClassesCollectorTest.scala index 43afa480fdd8..50ce5e2bed71 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/NestedClassesCollectorTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/NestedClassesCollectorTest.scala @@ -5,7 +5,6 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import org.junit.Assert._ -import scala.tools.asm.tree.ClassNode import scala.tools.nsc.backend.jvm.BTypes.InternalName import scala.tools.nsc.backend.jvm.analysis.BackendUtils.NestedClassesCollector diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala index 81d609551e22..ecf351a2377b 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/EmptyLabelsAndLineNumbersTest.scala @@ -8,7 +8,6 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.tools.asm.Opcodes._ -import scala.tools.partest.ASMConverters import scala.tools.partest.ASMConverters._ import scala.tools.testing.AssertUtil._ import scala.tools.testing.BytecodeTesting._ diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala index 3eb7acb14b4e..19842ee238c9 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/SimplifyJumpsTest.scala @@ -8,7 +8,6 @@ import org.junit.runner.RunWith import org.junit.runners.JUnit4 import scala.tools.asm.Opcodes._ -import scala.tools.partest.ASMConverters import scala.tools.partest.ASMConverters._ import scala.tools.testing.BytecodeTesting._ diff --git a/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala b/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala index 6c88f61e603b..e91bf4898e61 100644 --- a/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala +++ b/test/junit/scala/tools/nsc/doc/html/HtmlDocletTest.scala @@ -5,7 +5,6 @@ import org.junit.Assert._ import org.junit.runner.RunWith import org.junit.runners.JUnit4 -import scala.tools.testing.AssertUtil._ @RunWith(classOf[JUnit4]) class HtmlDocletTest { diff --git a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala index 70958c20fbf2..0256230530c5 100644 --- a/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala +++ b/test/junit/scala/tools/nsc/reporters/ConsoleReporterTest.scala @@ -2,7 +2,7 @@ package scala package tools.nsc package reporters -import java.io.{ByteArrayOutputStream, StringReader, BufferedReader, PrintStream, PrintWriter} +import java.io.{ByteArrayOutputStream, StringReader, BufferedReader, PrintWriter} import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith diff --git a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala index 7f42f8bde5bd..6efb856aa71f 100644 --- a/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala +++ b/test/junit/scala/tools/nsc/settings/ScalaVersionTest.scala @@ -20,7 +20,6 @@ class ScalaVersionTest { // scala/bug#9167 @Test def `version parses with rigor`() { import settings.{ SpecificScalaVersion => V } - import ScalaVersion._ // no-brainers assertEquals(V(2,11,7,Final), ScalaVersion("2.11.7")) diff --git a/test/junit/scala/tools/nsc/transform/SpecializationTest.scala b/test/junit/scala/tools/nsc/transform/SpecializationTest.scala index 02dff1983094..33cf83d2103a 100644 --- a/test/junit/scala/tools/nsc/transform/SpecializationTest.scala +++ b/test/junit/scala/tools/nsc/transform/SpecializationTest.scala @@ -1,7 +1,7 @@ package scala.tools.nsc.transform import org.junit.Assert.assertEquals -import org.junit.{Assert, Test} +import org.junit.Test import scala.tools.nsc.symtab.SymbolTableForUnitTesting diff --git a/test/junit/scala/tools/nsc/typechecker/Implicits.scala b/test/junit/scala/tools/nsc/typechecker/Implicits.scala index 75f4e70827aa..9cf01091d425 100644 --- a/test/junit/scala/tools/nsc/typechecker/Implicits.scala +++ b/test/junit/scala/tools/nsc/typechecker/Implicits.scala @@ -1,7 +1,6 @@ package scala.tools.nsc package typechecker -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/tools/nsc/util/StackTraceTest.scala b/test/junit/scala/tools/nsc/util/StackTraceTest.scala index e7654244c52f..57d77d822abe 100644 --- a/test/junit/scala/tools/nsc/util/StackTraceTest.scala +++ b/test/junit/scala/tools/nsc/util/StackTraceTest.scala @@ -3,10 +3,8 @@ package scala.tools.nsc.util import scala.language.reflectiveCalls import scala.util._ -import PartialFunction.cond import Properties.isJavaAtLeast -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/tools/testing/AssertUtilTest.scala b/test/junit/scala/tools/testing/AssertUtilTest.scala index 03d8815ab267..24e28600d126 100644 --- a/test/junit/scala/tools/testing/AssertUtilTest.scala +++ b/test/junit/scala/tools/testing/AssertUtilTest.scala @@ -1,7 +1,6 @@ package scala.tools package testing -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/junit/scala/util/matching/CharRegexTest.scala b/test/junit/scala/util/matching/CharRegexTest.scala index 50fdcd9d4660..9312d3716df7 100644 --- a/test/junit/scala/util/matching/CharRegexTest.scala +++ b/test/junit/scala/util/matching/CharRegexTest.scala @@ -1,7 +1,6 @@ package scala.util.matching -import org.junit.Assert._ import org.junit.Test import org.junit.runner.RunWith import org.junit.runners.JUnit4 diff --git a/test/scalacheck/CheckEither.scala b/test/scalacheck/CheckEither.scala index 48b90c1d9b08..3cbfb6bda7f2 100644 --- a/test/scalacheck/CheckEither.scala +++ b/test/scalacheck/CheckEither.scala @@ -1,9 +1,7 @@ -import org.scalacheck.{ Arbitrary, Prop, Properties } +import org.scalacheck.{ Arbitrary, Properties } import org.scalacheck.Arbitrary.{arbitrary, arbThrowable} import org.scalacheck.Gen.oneOf import org.scalacheck.Prop._ -import org.scalacheck.Test.check -import Function.tupled object CheckEitherTest extends Properties("Either") { implicit def arbitraryEither[X, Y](implicit xa: Arbitrary[X], ya: Arbitrary[Y]): Arbitrary[Either[X, Y]] = diff --git a/test/scalacheck/array-new.scala b/test/scalacheck/array-new.scala index de2df68b3a85..fdaab252359b 100644 --- a/test/scalacheck/array-new.scala +++ b/test/scalacheck/array-new.scala @@ -5,7 +5,6 @@ import Gen._ import Arbitrary._ import util._ import Buildable._ -import scala.collection.mutable.ArraySeq object ArrayNewTest extends Properties("Array") { /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out. diff --git a/test/scalacheck/array-old.scala b/test/scalacheck/array-old.scala index 953263666045..639b264cb979 100644 --- a/test/scalacheck/array-old.scala +++ b/test/scalacheck/array-old.scala @@ -4,7 +4,6 @@ import Gen._ import Arbitrary._ import util._ import Buildable._ -import scala.collection.mutable.ArraySeq object ArrayOldTest extends Properties("Array") { /** At this moment the authentic scalacheck Array Builder/Arb bits are commented out. diff --git a/test/scalacheck/scala/collection/parallel/IntValues.scala b/test/scalacheck/scala/collection/parallel/IntValues.scala index cab60ead7624..4054efe6e19c 100644 --- a/test/scalacheck/scala/collection/parallel/IntValues.scala +++ b/test/scalacheck/scala/collection/parallel/IntValues.scala @@ -4,11 +4,7 @@ package scala.collection.parallel.ops -import org.scalacheck._ -import org.scalacheck.Gen import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties import org.scalacheck.Arbitrary._ diff --git a/test/scalacheck/scala/collection/parallel/PairValues.scala b/test/scalacheck/scala/collection/parallel/PairValues.scala index 864dad2425d9..e5f487af252e 100644 --- a/test/scalacheck/scala/collection/parallel/PairValues.scala +++ b/test/scalacheck/scala/collection/parallel/PairValues.scala @@ -6,10 +6,6 @@ package scala.collection.parallel.ops import org.scalacheck._ import org.scalacheck.Gen -import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ diff --git a/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala index e1df95e051e3..c3ac5d6812c2 100644 --- a/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala +++ b/test/scalacheck/scala/collection/parallel/ParallelHashTrieCheck.scala @@ -5,10 +5,6 @@ package immutable import org.scalacheck._ import org.scalacheck.Gen -import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ import scala.collection.parallel.ops._ diff --git a/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala index 7e7ef2ce1bcd..96e21e07b709 100644 --- a/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala +++ b/test/scalacheck/scala/collection/parallel/ParallelIterableCheck.scala @@ -9,7 +9,6 @@ import org.scalacheck.Prop._ import org.scalacheck.Properties import scala.collection._ -import scala.collection.parallel._ diff --git a/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala b/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala index 50aa4ad0c776..497a805c2437 100644 --- a/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala +++ b/test/scalacheck/scala/collection/parallel/ParallelMapCheck1.scala @@ -2,14 +2,8 @@ package scala.collection.parallel -import org.scalacheck._ -import org.scalacheck.Gen -import org.scalacheck.Gen._ import org.scalacheck.Prop._ -import org.scalacheck.Properties -import scala.collection._ -import scala.collection.parallel._ diff --git a/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala index 5b783fadf2bf..6bf19165d103 100644 --- a/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala +++ b/test/scalacheck/scala/collection/parallel/ParallelRangeCheck.scala @@ -7,12 +7,8 @@ package immutable import org.scalacheck._ import org.scalacheck.Gen import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ -import scala.collection.parallel.ops._ diff --git a/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala index 48c3d3f74504..00f2a99333da 100644 --- a/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala +++ b/test/scalacheck/scala/collection/parallel/ParallelSeqCheck.scala @@ -6,10 +6,8 @@ import org.scalacheck._ import org.scalacheck.Gen import org.scalacheck.Gen._ import org.scalacheck.Prop._ -import org.scalacheck.Properties import scala.collection._ -import scala.collection.parallel._ diff --git a/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala b/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala index c22dddf96d80..ef5abb78aca2 100644 --- a/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala +++ b/test/scalacheck/scala/collection/parallel/ParallelSetCheck.scala @@ -2,14 +2,8 @@ package scala.collection.parallel -import org.scalacheck._ -import org.scalacheck.Gen -import org.scalacheck.Gen._ import org.scalacheck.Prop._ -import org.scalacheck.Properties -import scala.collection._ -import scala.collection.parallel._ diff --git a/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala b/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala index 1afcf2ce4c04..6532cf6e6dd4 100644 --- a/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala +++ b/test/scalacheck/scala/collection/parallel/immutable/ParallelVectorCheck.scala @@ -6,16 +6,11 @@ package parallel.immutable import org.scalacheck._ import org.scalacheck.Gen import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ import scala.collection.parallel.ops._ -import immutable.Vector -import immutable.VectorBuilder import scala.collection.parallel.TaskSupport diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala index 39370f8c384a..4f7640b51430 100644 --- a/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala +++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelArrayCheck.scala @@ -7,8 +7,6 @@ import org.scalacheck._ import org.scalacheck.Gen import org.scalacheck.Gen._ import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ import scala.collection.parallel.ops._ diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala index ebdcf78bea48..5608b9a00dd0 100644 --- a/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala +++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelCtrieCheck.scala @@ -5,10 +5,6 @@ package mutable import org.scalacheck._ import org.scalacheck.Gen -import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ import scala.collection.parallel.ops._ diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala index 06fdb6608044..1d136a7c7a5f 100644 --- a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala +++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashMapCheck.scala @@ -5,10 +5,6 @@ package mutable import org.scalacheck._ import org.scalacheck.Gen -import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ import scala.collection.parallel.ops._ diff --git a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala index a968ed053f21..4203f08ad0cc 100644 --- a/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala +++ b/test/scalacheck/scala/collection/parallel/mutable/ParallelHashSetCheck.scala @@ -5,10 +5,6 @@ package mutable import org.scalacheck._ import org.scalacheck.Gen -import org.scalacheck.Gen._ -import org.scalacheck.Prop._ -import org.scalacheck.Properties -import org.scalacheck.Arbitrary._ import scala.collection._ import scala.collection.parallel.ops._ diff --git a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala index 2f2be704031c..7f9e74b0e624 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/ArbitraryTreesAndNames.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, internal._, Flag._ +import org.scalacheck._, Gen._, Arbitrary._ +import scala.reflect.runtime.universe._, Flag._ trait ArbitraryTreesAndNames { def smallList[T](size: Int, g: Gen[T]) = { diff --git a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala index 9662586aef65..477da9635c3e 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/DeprecationProps.scala @@ -1,6 +1,5 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._ object DeprecationProps extends QuasiquoteProperties("deprecation") { diff --git a/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala b/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala index cbfc08a8588b..0c544810d4b0 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/ErrorProps.scala @@ -1,6 +1,5 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ object ErrorProps extends QuasiquoteProperties("errors") { property("can't extract two .. rankinalities in a row") = fails( diff --git a/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala b/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala index d19ead87927f..57a1982135e9 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/ForProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._ +import scala.reflect.runtime.universe._, internal.reificationSupport._ object ForProps extends QuasiquoteProperties("for") { case class ForEnums(val value: List[Tree]) diff --git a/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala index 90e5adba58d1..962c0d9b481d 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/LiftableProps.scala @@ -1,7 +1,6 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object LiftableProps extends QuasiquoteProperties("liftable") { property("unquote byte") = test { diff --git a/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala index e62a004adc1d..425a89939d1c 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/PatternConstructionProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object PatternConstructionProps extends QuasiquoteProperties("pattern construction") { property("unquote bind") = forAll { (bind: Bind) => diff --git a/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala index 182e905c04c0..4cb4d2b1241b 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/PatternDeconstructionProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object PatternDeconstructionProps extends QuasiquoteProperties("pattern deconstruction") { property("extract bind") = forAll { (bind: Bind) => diff --git a/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala b/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala index 13e231891d79..acfd579cb70d 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/QuasiquoteProperties.scala @@ -1,9 +1,9 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ +import org.scalacheck._, Prop._ import scala.tools.reflect.{ToolBox, ToolBoxError} import scala.reflect.runtime.currentMirror -import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.setSymbol +import scala.reflect.runtime.universe._, internal.reificationSupport.setSymbol abstract class QuasiquoteProperties(name: String) extends Properties(name) with ArbitraryTreesAndNames with Helpers diff --git a/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala b/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala index 4e389f15601a..d971f58dd897 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/RuntimeErrorProps.scala @@ -1,7 +1,6 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object RuntimeErrorProps extends QuasiquoteProperties("errors") { def testFails[T](block: =>T) = test { diff --git a/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala index e9f2d137ffbf..6bdc72d34787 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/TermConstructionProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object TermConstructionProps extends QuasiquoteProperties("term construction") { property("unquote single tree return tree itself") = forAll { (t: Tree) => diff --git a/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala index 3c1667938d81..8d80726caafb 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/TermDeconstructionProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object TermDeconstructionProps extends QuasiquoteProperties("term deconstruction") { property("f(..x) = f") = test { diff --git a/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala index c96018b31723..70986d0224f4 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/TypeConstructionProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport.ScalaDot +import scala.reflect.runtime.universe._, internal.reificationSupport.ScalaDot object TypeConstructionProps extends QuasiquoteProperties("type construction") { property("bare idents contain type names") = test { diff --git a/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala index fc8554d61f07..54b7f3624e3e 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/TypeDeconstructionProps.scala @@ -1,7 +1,7 @@ package scala.reflect.quasiquotes import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._ +import scala.reflect.runtime.universe._ object TypeDeconstructionProps extends QuasiquoteProperties("type deconstruction") { property("ident(type name)") = forAll { (name: TypeName) => diff --git a/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala b/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala index 4646388c8696..169d657db3b4 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/TypecheckedProps.scala @@ -1,7 +1,6 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ -import scala.reflect.runtime.universe._, Flag._, internal.reificationSupport._ +import scala.reflect.runtime.universe._ object TypecheckedProps extends QuasiquoteProperties("typechecked") with TypecheckedTypes { diff --git a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala index 4c2f2280ca73..aa598206c860 100644 --- a/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala +++ b/test/scalacheck/scala/reflect/quasiquotes/UnliftableProps.scala @@ -1,6 +1,5 @@ package scala.reflect.quasiquotes -import org.scalacheck._, Prop._, Gen._, Arbitrary._ import scala.reflect.runtime.universe._, Flag._ object UnliftableProps extends QuasiquoteProperties("unliftable") { diff --git a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala index 289e04987ad7..85beb9acd3c7 100644 --- a/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala +++ b/test/scalacheck/scala/tools/nsc/scaladoc/HtmlFactoryTest.scala @@ -2,8 +2,6 @@ package scala.tools.nsc.scaladoc import org.scalacheck._ import org.scalacheck.Prop._ -import java.net.{URLClassLoader, URLDecoder} -import java.nio.file.{Files, Paths} import scala.collection.mutable import scala.xml.NodeSeq diff --git a/test/scalacheck/t2460.scala b/test/scalacheck/t2460.scala index 42ff3ecfe6ab..b83551583e69 100644 --- a/test/scalacheck/t2460.scala +++ b/test/scalacheck/t2460.scala @@ -1,6 +1,5 @@ import org.scalacheck.Prop.forAll import org.scalacheck.Properties -import org.scalacheck.{Test => SCTest} import org.scalacheck.Gen object SI2460Test extends Properties("Regex : Ticket 2460") { diff --git a/test/scalacheck/treeset.scala b/test/scalacheck/treeset.scala index ec6de406936f..b61c90a9ed08 100644 --- a/test/scalacheck/treeset.scala +++ b/test/scalacheck/treeset.scala @@ -3,7 +3,6 @@ import org.scalacheck._ import Prop._ import Gen._ import Arbitrary._ -import util._ object TreeSetTest extends Properties("TreeSet") { def genTreeSet[A: Arbitrary: Ordering]: Gen[TreeSet[A]] = From b5e76cc6ba4f8290e821b8c480190bd22fef66e9 Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Thu, 11 Jul 2019 10:28:41 -0400 Subject: [PATCH 1782/2793] unset JAVA_HOME --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index b73010ba6016..025649e55666 100644 --- a/.travis.yml +++ b/.travis.yml @@ -11,6 +11,7 @@ before_install: install: - sdk install java $(sdk list java | grep -o "$ADOPTOPENJDK\.[0-9\.]*hs-adpt" | head -1) + - unset JAVA_HOME - java -Xmx32m -version - javac -J-Xmx32m -version From 57b8204c0df639ae41777501be6ee033270a0ee3 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Jul 2019 11:15:19 +1000 Subject: [PATCH 1783/2793] Optimize check for by-name expressions Rule out some cases by the type of tree of flags before looking at the symbol's info. --- src/compiler/scala/tools/nsc/transform/UnCurry.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index bd2baa102f67..b85842b26fee 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -130,6 +130,7 @@ abstract class UnCurry extends InfoTransform def isByNameRef(tree: Tree) = ( tree.isTerm && (tree.symbol ne null) + && !(tree.symbol.hasPackageFlag || tree.isInstanceOf[This] || tree.isInstanceOf[Super]) && isByName(tree.symbol) && !byNameArgs(tree) ) From f8215506c96fdfda4a161047f2cb4cb59c5b8ebc Mon Sep 17 00:00:00 2001 From: Georgi Krastev Date: Wed, 16 Jan 2019 08:21:36 +0100 Subject: [PATCH 1784/2793] [backport] Performance tweak - avoid mapList in isSubArgs (cherry picked from commit 401154823a3ebf6ecb86226955aa88c59e74d0a6) --- src/reflect/scala/reflect/internal/Types.scala | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Types.scala b/src/reflect/scala/reflect/internal/Types.scala index 6710f0abbe7f..381b9ff350e4 100644 --- a/src/reflect/scala/reflect/internal/Types.scala +++ b/src/reflect/scala/reflect/internal/Types.scala @@ -4232,12 +4232,12 @@ trait Types } def isSubArgs(tps1: List[Type], tps2: List[Type], tparams: List[Symbol], depth: Depth): Boolean = { - def isSubArg(t1: Type, t2: Type, variance: Variance) = ( - (variance.isCovariant || isSubType(t2, t1, depth)) // The order of these two checks can be material for performance (scala/bug#8478) - && (variance.isContravariant || isSubType(t1, t2, depth)) + def isSubArg(t1: Type, t2: Type, tparam: Symbol) = ( + (tparam.isCovariant || isSubType(t2, t1, depth)) // The order of these two checks can be material for performance (scala/bug#8478) + && (tparam.isContravariant || isSubType(t1, t2, depth)) ) - corresponds3(tps1, tps2, mapList(tparams)(_.variance))(isSubArg) + corresponds3(tps1, tps2, tparams)(isSubArg) } def specializesSym(tp: Type, sym: Symbol, depth: Depth): Boolean = { From 8d537d703dc9946330c4b391a1a48eac4ee7c286 Mon Sep 17 00:00:00 2001 From: "Diego E. Alonso Blas" Date: Fri, 1 Jun 2018 00:00:00 +0100 Subject: [PATCH 1785/2793] Fixes Issue 493, to avoid unnecessary allocation. This commit avoids unnecessary memory use noticed in Issue 493 of https://github.com/scala/scala-dev. The line: `refinementParents :::= currentBaseClass.parentSymbols` in each iteration pre-prends `currentBaseClass.parentSymbols` to the previous `refinementParens`. Thus, at the end of the loop, the length of `refinementParens` is the sum of the `parentSymbols` lists obtained from each symbol in `initBaseClasses` which is a refinement classes. That creates as many cons (`::`) objects. Moreover, since `parentSymbols` is not a `val` but a `def`, it creates a list of length `m`, copies it (to prepend it) and the throws it away. To prevent these allocations, we replace the flattened `refinedParents` list by a `refinedClasses` list, which stores the classes whose parents we have not yet looked into. We just use the `exists` methods of the List class to look for a refinedClass with at least one parent is the currentBaseClass. (cherry picked from commit 0db7dd4e251ec7be75f6f8178977faae9c8274e9) --- .../reflect/internal/tpe/FindMembers.scala | 27 +++++++++---------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index 85be6f12f34a..8d288f1d7e63 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -22,7 +22,7 @@ trait FindMembers { /** Implementation of `Type#{findMember, findMembers}` */ private[internal] abstract class FindMemberBase[T](tpe: Type, name: Name, excludedFlags: Long, requiredFlags: Long) { - protected val initBaseClasses: List[Symbol] = tpe.baseClasses + protected[this] final val initBaseClasses: List[Symbol] = tpe.baseClasses // The first base class, or the symbol of the ThisType // e.g in: @@ -81,9 +81,9 @@ trait FindMembers { // Have we seen a candidate deferred member? var deferredSeen = false - // All direct parents of refinement classes in the base class sequence + // All refinement classes in the base class sequence // from the current `walkBaseClasses` - var refinementParents: List[Symbol] = Nil + var refinementClasses: List[Symbol] = Nil // Has the current `walkBaseClasses` encountered a non-refinement class? var seenFirstNonRefinementClass = false @@ -101,7 +101,7 @@ trait FindMembers { if (meetsRequirements) { val excl: Long = flags & excluded val isExcluded: Boolean = excl != 0L - if (!isExcluded && isPotentialMember(sym, flags, currentBaseClass, seenFirstNonRefinementClass, refinementParents)) { + if (!isExcluded && isPotentialMember(sym, flags, currentBaseClass, seenFirstNonRefinementClass, refinementClasses)) { if (shortCircuit(sym)) return false else addMemberIfNew(sym) } else if (excl == DEFERRED) { @@ -118,7 +118,7 @@ trait FindMembers { // the component types T1, ..., Tn and the refinement {R } // // => private members should be included from T1, ... Tn. (scala/bug#7475) - refinementParents :::= currentBaseClass.parentSymbols + refinementClasses ::= currentBaseClass else if (currentBaseClass.isClass) seenFirstNonRefinementClass = true // only inherit privates of refinement parents after this point @@ -138,23 +138,22 @@ trait FindMembers { // Q. When does a potential member fail to be an actual member? // A. if it is subsumed by an member in a subclass. private def isPotentialMember(sym: Symbol, flags: Long, owner: Symbol, - seenFirstNonRefinementClass: Boolean, refinementParents: List[Symbol]): Boolean = { + seenFirstNonRefinementClass: Boolean, refinementClasses: List[Symbol]): Boolean = { // conservatively (performance wise) doing this with flags masks rather than `sym.isPrivate` // to avoid multiple calls to `Symbol#flags`. val isPrivate = (flags & PRIVATE) == PRIVATE val isPrivateLocal = (flags & PrivateLocal) == PrivateLocal // TODO Is the special handling of `private[this]` vs `private` backed up by the spec? - def admitPrivate(sym: Symbol): Boolean = - (selectorClass == owner) || ( - !isPrivateLocal // private[this] only a member from within the selector class. (Optimization only? Does the spec back this up?) - && ( - !seenFirstNonRefinementClass - || refinementParents.contains(owner) - ) + def admitPrivate: Boolean = + // private[this] only a member from within the selector class. + // (Optimization only? Does the spec back this up?) + !isPrivateLocal && ( !seenFirstNonRefinementClass || + refinementClasses.exists(_.info.parents.exists(_.typeSymbol == owner)) ) - (!isPrivate || admitPrivate(sym)) && (sym.name != nme.CONSTRUCTOR || owner == initBaseClasses.head) + (sym.name != nme.CONSTRUCTOR || owner == initBaseClasses.head) && + (!isPrivate || owner == selectorClass || admitPrivate) } // True unless the already-found member of type `memberType` matches the candidate symbol `other`. From 6f4f12d38e119852a1e5cbfa2dae0fb89567fd8d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Jul 2019 13:52:06 +1000 Subject: [PATCH 1786/2793] Tweak URI creation in RootPath for better windows support The `getPath` I used was incomplete, it doesn't include the server name on windows UNC paths (\\servername\a\b) I could have tried the `toString` or `toAsciiString`, but after reading about pitfalls in: https://stackoverflow.com/questions/9873845/java-7-zip-file-system-provider-doesnt-seem-to-accept-spaces-in-uri I opted instead to just find the relevent FileSystemProvider and use the Path itself. --- src/reflect/scala/reflect/io/RootPath.scala | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala index 6634d323481e..146b4fa32611 100644 --- a/src/reflect/scala/reflect/io/RootPath.scala +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -14,7 +14,10 @@ package scala.reflect.io import java.io.Closeable import java.nio -import java.nio.file.{FileSystems, Files} +import java.nio.file.Files +import java.nio.file.spi.FileSystemProvider + +import scala.collection.JavaConverters.collectionAsScalaIterableConverter abstract class RootPath extends Closeable { @@ -22,10 +25,9 @@ abstract class RootPath extends Closeable { } object RootPath { + private lazy val jarFsProvider = FileSystemProvider.installedProviders().asScala.find(_.getScheme == "jar").getOrElse(throw new RuntimeException("No jar filesystem provider")) def apply(path: nio.file.Path, writable: Boolean): RootPath = { if (path.getFileName.toString.endsWith(".jar")) { - import java.net.URI - val zipFile = URI.create("jar:file:" + path.toUri.getPath) val env = new java.util.HashMap[String, String]() if (!Files.exists(path.getParent)) Files.createDirectories(path.getParent) @@ -34,7 +36,8 @@ object RootPath { if (Files.exists(path)) Files.delete(path) } - val zipfs = FileSystems.newFileSystem(zipFile, env) + val zipfs = jarFsProvider.newFileSystem(path, env) + new RootPath { def root = zipfs.getRootDirectories.iterator().next() def close(): Unit = { From 1a842d15b93c82aecdff5fb1573a537ac3841c4d Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Jul 2019 16:18:53 +1000 Subject: [PATCH 1787/2793] [backport] List.filter optimizations from 2.13.x Binary compatibilty constraints won't let us actually do this as an override in `List` (we tried that originally but reverted.) But we are free to type-case List in the inherited implementation. --- .../scala/collection/TraversableLike.scala | 97 +++++++++++++++++-- 1 file changed, 91 insertions(+), 6 deletions(-) diff --git a/src/library/scala/collection/TraversableLike.scala b/src/library/scala/collection/TraversableLike.scala index 0ec682a3227a..b560ea9c8619 100644 --- a/src/library/scala/collection/TraversableLike.scala +++ b/src/library/scala/collection/TraversableLike.scala @@ -15,9 +15,10 @@ package collection import generic._ import mutable.Builder -import scala.annotation.migration -import scala.annotation.unchecked.{ uncheckedVariance => uV } +import scala.annotation.{migration, tailrec} +import scala.annotation.unchecked.{uncheckedVariance => uV} import parallel.ParIterable +import scala.collection.immutable.{::, List, Nil} import scala.language.higherKinds /** A template trait for traversable collections of type `Traversable[A]`. @@ -246,11 +247,95 @@ trait TraversableLike[+A, +Repr] extends Any } private[scala] def filterImpl(p: A => Boolean, isFlipped: Boolean): Repr = { - val b = newBuilder - for (x <- this) - if (p(x) != isFlipped) b += x + this match { + case as: List[A] => + filterImplList(as, p, isFlipped).asInstanceOf[Repr] + case _ => + val b = newBuilder + for (x <- this) + if (p(x) != isFlipped) b += x + + b.result + } + } - b.result + private[this] def filterImplList[A](self: List[A], p: A => Boolean, isFlipped: Boolean): List[A] = { + + // everything seen so far so far is not included + @tailrec def noneIn(l: List[A]): List[A] = { + if (l.isEmpty) + Nil + else { + val h = l.head + val t = l.tail + if (p(h) != isFlipped) + allIn(l, t) + else + noneIn(t) + } + } + + // everything from 'start' is included, if everything from this point is in we can return the origin + // start otherwise if we discover an element that is out we must create a new partial list. + @tailrec def allIn(start: List[A], remaining: List[A]): List[A] = { + if (remaining.isEmpty) + start + else { + val x = remaining.head + if (p(x) != isFlipped) + allIn(start, remaining.tail) + else + partialFill(start, remaining) + } + } + + // we have seen elements that should be included then one that should be excluded, start building + def partialFill(origStart: List[A], firstMiss: List[A]): List[A] = { + val newHead = new ::(origStart.head, Nil) + var toProcess = origStart.tail + var currentLast = newHead + + // we know that all elements are :: until at least firstMiss.tail + while (!(toProcess eq firstMiss)) { + val newElem = new ::(toProcess.head, Nil) + currentLast.tl = newElem + currentLast = newElem + toProcess = toProcess.tail + } + + // at this point newHead points to a list which is a duplicate of all the 'in' elements up to the first miss. + // currentLast is the last element in that list. + + // now we are going to try and share as much of the tail as we can, only moving elements across when we have to. + var next = firstMiss.tail + var nextToCopy = next // the next element we would need to copy to our list if we cant share. + while (!next.isEmpty) { + // generally recommended is next.isNonEmpty but this incurs an extra method call. + val head: A = next.head + if (p(head) != isFlipped) { + next = next.tail + } else { + // its not a match - do we have outstanding elements? + while (!(nextToCopy eq next)) { + val newElem = new ::(nextToCopy.head, Nil) + currentLast.tl = newElem + currentLast = newElem + nextToCopy = nextToCopy.tail + } + nextToCopy = next.tail + next = next.tail + } + } + + // we have remaining elements - they are unchanged attach them to the end + if (!nextToCopy.isEmpty) + currentLast.tl = nextToCopy + + newHead + } + + val result = noneIn(self) + result } /** Selects all elements of this $coll which satisfy a predicate. From b6ce0ce2a68189af7be79a4e1f7d15174e9b1b97 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Jul 2019 16:55:20 +1000 Subject: [PATCH 1788/2793] Reduce the overhead of active analyzer plugins --- .../nsc/typechecker/AnalyzerPlugins.scala | 33 ++++++++++--------- 1 file changed, 18 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index 4c089196f0da..1688d92b83c4 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -326,23 +326,26 @@ trait AnalyzerPlugins { self: Analyzer => } /** @see AnalyzerPlugin.pluginsPt */ - def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = - // performance opt - if (analyzerPlugins.isEmpty) pt - else invoke(new CumulativeOp[Type] { - def default = pt - def accumulate = (pt, p) => p.pluginsPt(pt, typer, tree, mode) - }) + def pluginsPt(pt: Type, typer: Typer, tree: Tree, mode: Mode): Type = { + var result = pt + var plugins = analyzerPlugins + while (!plugins.isEmpty) { // OPT use loop rather than the invoke combinator to reduce allocations + result = plugins.head.pluginsPt(result, typer, tree, mode) + plugins = plugins.tail + } + result + } /** @see AnalyzerPlugin.pluginsTyped */ - def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = - // performance opt - if (analyzerPlugins.isEmpty) addAnnotations(tree, tpe) - else invoke(new CumulativeOp[Type] { - // support deprecated methods in annotation checkers - def default = addAnnotations(tree, tpe) - def accumulate = (tpe, p) => p.pluginsTyped(tpe, typer, tree, mode, pt) - }) + def pluginsTyped(tpe: Type, typer: Typer, tree: Tree, mode: Mode, pt: Type): Type = { + var result = addAnnotations(tree, tpe) + var plugins = analyzerPlugins + while (!plugins.isEmpty) { // OPT use loop rather than the invoke combinator to reduce allocations + result = plugins.head.pluginsTyped(result, typer, tree, mode, pt) + plugins = plugins.tail + } + result + } /** @see AnalyzerPlugin.pluginsTypeSig */ def pluginsTypeSig(tpe: Type, typer: Typer, defTree: Tree, pt: Type): Type = invoke(new CumulativeOp[Type] { From baaab6261646c650a9f60d3decffb1d3a2dc8934 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Jul 2019 10:30:49 +1000 Subject: [PATCH 1789/2793] Optimize macro plugin infrastructure to reduce allocations --- .../nsc/typechecker/AnalyzerPlugins.scala | 25 +++++++++++++++---- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala index 1688d92b83c4..66aff8e440b2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala +++ b/src/compiler/scala/tools/nsc/typechecker/AnalyzerPlugins.scala @@ -410,12 +410,27 @@ trait AnalyzerPlugins { self: Analyzer => private def invoke[T](op: NonCumulativeOp[T]): T = { if (macroPlugins.isEmpty) op.default else { - val results = macroPlugins.filter(_.isActive()).map(plugin => (plugin, op.custom(plugin))) - results.flatMap { case (p, Some(result)) => Some((p, result)); case _ => None } match { - case (p1, _) :: (p2, _) :: _ => typer.context.error(op.position, s"both $p1 and $p2 want to ${op.description}"); op.default - case (_, custom) :: Nil => custom - case Nil => op.default + var result: Option[T] = None + var resultPlugin: MacroPlugin = null + var plugins = macroPlugins + while (!plugins.isEmpty) { + val plugin = plugins.head + if (plugin.isActive()) { + op.custom(plugin) match { + case None => + case s @ Some(custom) => + if (result.isDefined) { + typer.context.error(op.position, s"both $resultPlugin and $plugin want to ${op.description}") + op.default + } else { + result = s + resultPlugin = plugin + } + } + } + plugins = plugins.tail } + result.getOrElse(op.default) } } From 5329b9ef4cd329a8e5c3a39f940f9faa4858143a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Jul 2019 10:58:11 +1000 Subject: [PATCH 1790/2793] Reduce allocations for virtual compilation units --- .../scala/tools/nsc/CompilationUnits.scala | 43 +++++++++++-------- .../scala/tools/nsc/transform/Mixin.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 2 +- 3 files changed, 26 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/CompilationUnits.scala b/src/compiler/scala/tools/nsc/CompilationUnits.scala index 46386beb58e7..504aec559b2e 100644 --- a/src/compiler/scala/tools/nsc/CompilationUnits.scala +++ b/src/compiler/scala/tools/nsc/CompilationUnits.scala @@ -69,29 +69,34 @@ trait CompilationUnits { global: Global => * To get their sourcefiles, you need to dereference with .sourcefile */ private[this] val _depends = mutable.HashSet[Symbol]() - // sbt compatibility (scala/bug#6875) - // - // imagine we have a file named A.scala, which defines a trait named Foo and a module named Main - // Main contains a call to a macro, which calls compileLate to define a mock for Foo - // compileLate creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo, - // and macro expansion instantiates FooMock. the stage is now set. let's see what happens next. - // - // without this workaround in scalac or without being patched itself, sbt will think that - // * Virt35af32 depends on A (because it extends Foo from A) - // * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32) - // - // after compiling A.scala, sbt will notice that it has a new source file named Virt35af32. - // it will also think that this file hasn't yet been compiled and since A depends on it - // it will think that A needs to be recompiled. - // - // recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock, - // producing another virtual file, say, Virtee509a, which will again trick sbt into thinking that A needs a recompile, - // which will lead to another macro expansion, which will produce another virtual file and so on - def depends = if (exists && !source.file.isVirtual) _depends else mutable.HashSet[Symbol]() + @deprecated("Not supported and no longer used by Zinc", "2.12.9") + def depends = _depends + def registerDependency(symbol: Symbol): Unit = { + // sbt compatibility (scala/bug#6875) + // + // imagine we have a file named A.scala, which defines a trait named Foo and a module named Main + // Main contains a call to a macro, which calls compileLate to define a mock for Foo + // compileLate creates a virtual file Virt35af32.scala, which contains a class named FooMock extending Foo, + // and macro expansion instantiates FooMock. the stage is now set. let's see what happens next. + // + // without this workaround in scalac or without being patched itself, sbt will think that + // * Virt35af32 depends on A (because it extends Foo from A) + // * A depends on Virt35af32 (because it contains a macro expansion referring to FooMock from Virt35af32) + // + // after compiling A.scala, sbt will notice that it has a new source file named Virt35af32. + // it will also think that this file hasn't yet been compiled and since A depends on it + // it will think that A needs to be recompiled. + // + // recompilation will lead to another macro expansion. that another macro expansion might choose to create a fresh mock, + // producing another virtual file, say, Virtee509a, which will again trick sbt into thinking that A needs a recompile, + // which will lead to another macro expansion, which will produce another virtual file and so on + if (exists && !source.file.isVirtual) _depends += symbol + } /** so we can relink */ private[this] val _defined = mutable.HashSet[Symbol]() + @deprecated("Not supported", "2.12.9") def defined = if (exists && !source.file.isVirtual) _defined else mutable.HashSet[Symbol]() /** Synthetic definitions generated by namer, eliminated by typer. diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index d6c5aa5e2888..24bdb65bbda5 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -355,7 +355,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL with AccessorSynthes for (mc <- clazz.mixinClasses ; if mc.isTrait) { // @SEAN: adding trait tracking so we don't have to recompile transitive closures - unit.depends += mc + unit.registerDependency(mc) publicizeTraitMethods(mc) mixinTraitMembers(mc) mixinTraitForwarders(mc) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index c598cea92fda..b4782d11f5f5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -269,7 +269,7 @@ trait Infer extends Checkable { } // XXX So... what's this for exactly? if (context.unit.exists) - context.unit.depends += sym.enclosingTopLevelClass + context.unit.registerDependency(sym.enclosingTopLevelClass) if (sym.isError) tree setSymbol sym setType ErrorType From 97a43ff29f881ad7d646c999b53a43314e551b47 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Jul 2019 11:16:36 +1000 Subject: [PATCH 1791/2793] Hoist creation of SubstSymMap out of hot loop in deriveSymbols --- src/reflect/scala/reflect/internal/Symbols.scala | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 8d9d87c7c2ad..22d14d6bcbda 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -3622,9 +3622,13 @@ trait Symbols extends api.Symbols { self: SymbolTable => * @return the new list of info-adjusted symbols */ def deriveSymbols(syms: List[Symbol], symFn: Symbol => Symbol): List[Symbol] = { - val syms1 = mapList(syms)(symFn) - syms1.foreach(_.substInfo(syms, syms1)) - syms1 + if (syms.isEmpty) Nil + else { + val syms1 = mapList(syms)(symFn) + val map = new SubstSymMap(syms, syms1) + syms1.foreach(_.modifyInfo(map)) + syms1 + } } /** Derives a new list of symbols from the given list by mapping the given From acec8d3dd722ab6c912646908953ddc256b61982 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 15 Jul 2019 11:55:34 +1000 Subject: [PATCH 1792/2793] Optimise implicit search Reduce the number of substituting type maps that underlie `Type.dealias` by using `Type.normalize` which internally caches its result. --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 29b17d1c85a9..d3ea8299554c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -627,7 +627,7 @@ trait Implicits { loop(restpe, pt) else pt match { case tr @ TypeRef(pre, sym, args) => - if (sym.isAliasType) loop(tp, pt.dealias) + if (sym.isAliasType) loop(tp, pt.normalize) // OPT .normalize caches internally and means the same as .dealias for non higher-kinded TypeRefs else if (sym.isAbstractType) loop(tp, pt.lowerBound) else { val ptFunctionArity = functionArity(pt) From e17cb66a83b68c9c6b1a082d4d7cb01a24218292 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Tue, 16 Jul 2019 15:13:11 +0200 Subject: [PATCH 1793/2793] [nomerge] use 'in Compile' for new scalacOptions Otherwise, the `enableOptimizer` settings that are enabled by `setupPublishCore` and friends doesn't have the desired effect. --- build.sbt | 4 ++-- project/plugins.sbt | 3 +-- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/build.sbt b/build.sbt index eb0d96e2c63a..236bc317f58b 100644 --- a/build.sbt +++ b/build.sbt @@ -185,7 +185,7 @@ lazy val commonSettings = instanceSettings ++ clearSourceAndResourceDirectories // END: Copy/pasted from SBT }, fork in run := true, - scalacOptions += "-Ywarn-unused:imports", + scalacOptions in Compile += "-Ywarn-unused:imports", scalacOptions in Compile in doc ++= Seq( "-doc-footer", "epfl", "-diagrams", @@ -815,7 +815,7 @@ lazy val test = project fork in IntegrationTest := true, // enable this in 2.13, when tests pass //scalacOptions in Compile += "-Yvalidate-pos:parser,typer", - scalacOptions -= "-Ywarn-unused:imports", + scalacOptions in Compile -= "-Ywarn-unused:imports", javaOptions in IntegrationTest ++= List("-Xmx2G", "-Dpartest.exec.in.process=true", "-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testOptions in IntegrationTest += Tests.Argument("-Dfile.encoding=UTF-8", "-Duser.language=en", "-Duser.country=US"), testFrameworks += new TestFramework("scala.tools.partest.sbt.Framework"), diff --git a/project/plugins.sbt b/project/plugins.sbt index 2ee6b5408ebf..a3442552e679 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,5 +1,4 @@ -scalacOptions ++= Seq("-unchecked", "-feature", /*"-deprecation",*/ - "-Xlint" /*, "-Xfatal-warnings"*/) +scalacOptions ++= Seq("-unchecked", "-feature"/*, "-deprecation"*/, "-Xlint" /*, "-Xfatal-warnings"*/) libraryDependencies += "org.apache.commons" % "commons-lang3" % "3.3.2" From 9f7866cc3d7c5265da358142ecb9f1f95f68f03a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 30 May 2019 15:41:13 +1000 Subject: [PATCH 1794/2793] Optimise ClassTag lookup - Optimize ClassTag.apply to avoid testing for primitive classes one-by-one. - Share instance of ClassTag in a ClassValue based cache and rely on this in the compiler where we had previously hoisted hot instances. --- .../nsc/typechecker/StdAttachments.scala | 10 ++--- src/library/scala/reflect/ClassTag.scala | 41 +++++++++++-------- .../scala/reflect/ClassTagBenchmark.scala | 11 +++++ test/files/run/classtags-cached.scala | 10 +++++ 4 files changed, 50 insertions(+), 22 deletions(-) create mode 100644 test/files/run/classtags-cached.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala index 1441823ea16f..2aa75040b800 100644 --- a/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala +++ b/src/compiler/scala/tools/nsc/typechecker/StdAttachments.scala @@ -157,13 +157,12 @@ trait StdAttachments { * typechecks to be a macro application. Then we need to unmark it, expand it and try to treat * its expansion as a macro impl reference. */ - def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type](MacroImplRefAttachmentTag) + def unmarkMacroImplRef(tree: Tree): Tree = tree.removeAttachment[MacroImplRefAttachment.type] /** Determines whether a tree should or should not be adapted, * because someone has put MacroImplRefAttachment on it. */ - def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type](MacroImplRefAttachmentTag) - private[this] val MacroImplRefAttachmentTag: reflect.ClassTag[MacroImplRefAttachment.type] = reflect.classTag[MacroImplRefAttachment.type] + def isMacroImplRef(tree: Tree): Boolean = tree.hasAttachment[MacroImplRefAttachment.type] /** Since mkInvoke, the applyDynamic/selectDynamic/etc desugarer, is disconnected * from typedNamedApply, the applyDynamicNamed argument rewriter, the latter @@ -176,9 +175,8 @@ trait StdAttachments { */ case object DynamicRewriteAttachment def markDynamicRewrite(tree: Tree): Tree = tree.updateAttachment(DynamicRewriteAttachment) - def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type](DynamicRewriteAttachmentTag) - def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type](DynamicRewriteAttachmentTag).isDefined - private[this] val DynamicRewriteAttachmentTag: reflect.ClassTag[DynamicRewriteAttachment.type] = reflect.classTag[DynamicRewriteAttachment.type] + def unmarkDynamicRewrite(tree: Tree): Tree = tree.removeAttachment[DynamicRewriteAttachment.type] + def isDynamicRewrite(tree: Tree): Boolean = tree.attachments.get[DynamicRewriteAttachment.type].isDefined /** * Marks a tree that has been adapted by typer and sets the original tree that was in place before. diff --git a/src/library/scala/reflect/ClassTag.scala b/src/library/scala/reflect/ClassTag.scala index faa647d76867..3d0bc31faf61 100644 --- a/src/library/scala/reflect/ClassTag.scala +++ b/src/library/scala/reflect/ClassTag.scala @@ -121,6 +121,30 @@ object ClassTag { val Nothing : ClassTag[scala.Nothing] = Manifest.Nothing val Null : ClassTag[scala.Null] = Manifest.Null + private[this] val cache = new ClassValue[ClassTag[_]] { + override def computeValue(runtimeClass: jClass[_]): ClassTag[_] = { + runtimeClass match { + case x if x.isPrimitive => primitiveClassTag(runtimeClass) + case ObjectTYPE => ClassTag.Object + case NothingTYPE => ClassTag.Nothing + case NullTYPE => ClassTag.Null + case _ => new GenericClassTag[AnyRef](runtimeClass) + } + } + + private def primitiveClassTag[T](runtimeClass: Class[_]): ClassTag[_] = runtimeClass match { + case java.lang.Byte.TYPE => ClassTag.Byte + case java.lang.Short.TYPE => ClassTag.Short + case java.lang.Character.TYPE => ClassTag.Char + case java.lang.Integer.TYPE => ClassTag.Int + case java.lang.Long.TYPE => ClassTag.Long + case java.lang.Float.TYPE => ClassTag.Float + case java.lang.Double.TYPE => ClassTag.Double + case java.lang.Boolean.TYPE => ClassTag.Boolean + case java.lang.Void.TYPE => ClassTag.Unit + } + } + @SerialVersionUID(1L) private class GenericClassTag[T](val runtimeClass: jClass[_]) extends ClassTag[T] { override def newArray(len: Int): Array[T] = { @@ -128,22 +152,7 @@ object ClassTag { } } - def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = - runtimeClass1 match { - case java.lang.Byte.TYPE => ClassTag.Byte.asInstanceOf[ClassTag[T]] - case java.lang.Short.TYPE => ClassTag.Short.asInstanceOf[ClassTag[T]] - case java.lang.Character.TYPE => ClassTag.Char.asInstanceOf[ClassTag[T]] - case java.lang.Integer.TYPE => ClassTag.Int.asInstanceOf[ClassTag[T]] - case java.lang.Long.TYPE => ClassTag.Long.asInstanceOf[ClassTag[T]] - case java.lang.Float.TYPE => ClassTag.Float.asInstanceOf[ClassTag[T]] - case java.lang.Double.TYPE => ClassTag.Double.asInstanceOf[ClassTag[T]] - case java.lang.Boolean.TYPE => ClassTag.Boolean.asInstanceOf[ClassTag[T]] - case java.lang.Void.TYPE => ClassTag.Unit.asInstanceOf[ClassTag[T]] - case ObjectTYPE => ClassTag.Object.asInstanceOf[ClassTag[T]] - case NothingTYPE => ClassTag.Nothing.asInstanceOf[ClassTag[T]] - case NullTYPE => ClassTag.Null.asInstanceOf[ClassTag[T]] - case _ => new GenericClassTag[T](runtimeClass1) - } + def apply[T](runtimeClass1: jClass[_]): ClassTag[T] = cache.get(runtimeClass1).asInstanceOf[ClassTag[T]] def unapply[T](ctag: ClassTag[T]): Option[Class[_]] = Some(ctag.runtimeClass) } diff --git a/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala b/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala index 0f01aa4a55e8..25bbff4a46ae 100644 --- a/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala +++ b/test/benchmarks/src/main/scala/scala/reflect/ClassTagBenchmark.scala @@ -25,6 +25,7 @@ class ClassTagBenchmark { var refClassTag: ClassTag[_] = null var otherValue: Object = null var arraySize: Int = 100 + private[this] var refClasses: Array[Class[_]] = _ @Setup def setup(): Unit = { unitClassTag = classTag[Unit] @@ -38,6 +39,7 @@ class ClassTagBenchmark { doubleClassTag = classTag[Double] refClassTag = classTag[ClassTagBenchmark] otherValue = new Object + refClasses = Array(classOf[java.lang.Boolean], classOf[java.lang.Character], classOf[java.lang.Short], classOf[java.lang.Integer], classOf[java.lang.Long], classOf[java.lang.Float], classOf[java.lang.Double]) } @Benchmark def primitivesNegOnRefClassTag(bh: Blackhole): Any = { @@ -86,6 +88,15 @@ class ClassTagBenchmark { @Benchmark def refClassTagUnapplyNeg2Direct(bh: Blackhole): Any = unapplyDirect(refClassTag, otherValue) + @Benchmark def lookupClassTag(bh: Blackhole): Any = { + var clss = refClasses + var i = 0 + while (i < clss.length) { + bh.consume(ClassTag.apply(clss(i))) + i += 1 + } + } + def unapplyDirect(ct: ClassTag[_], x: AnyRef): Option[_] = { if (null != x && (ct.runtimeClass.isInstance(x))) Some(x) else None diff --git a/test/files/run/classtags-cached.scala b/test/files/run/classtags-cached.scala new file mode 100644 index 000000000000..fe9a6d743076 --- /dev/null +++ b/test/files/run/classtags-cached.scala @@ -0,0 +1,10 @@ +import reflect.ClassTag + +object Test { + def main(args: Array[String]): Unit = { + assert(implicitly[ClassTag[SomeClass]] eq implicitly[ClassTag[SomeClass]]) + assert(implicitly[ClassTag[Array[SomeClass]]] eq implicitly[ClassTag[Array[SomeClass]]]) + } +} + +class SomeClass From 146a1a4915fe7559d7ddc853f5de60d5f4cfecb5 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Tue, 16 Jul 2019 14:15:59 +1000 Subject: [PATCH 1795/2793] Fix regression in -d out.jar on Windows My refactoring to use nio.file.Path to represent relative paths was wrong-headed -- it is too easy to call .toString and get the system default file separator in places where we actually really want a '/'. Also fix regression in t5717 test on Windows --- .../nsc/backend/jvm/ClassfileWriters.scala | 21 ++++++++++--------- .../tools/nsc/symtab/classfile/Pickler.scala | 2 +- test/files/run/t5717.scala | 4 ++-- 3 files changed, 14 insertions(+), 13 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala index 5419937e020b..ebc3b5e7b59f 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/ClassfileWriters.scala @@ -52,8 +52,8 @@ abstract class ClassfileWriters { */ def close(): Unit - protected def classRelativePath(className: InternalName, suffix: String = ".class"): Path = - Paths.get(className.replace('.', '/') + suffix) + protected def classRelativePath(className: InternalName, suffix: String = ".class"): String = + className.replace('.', '/') + suffix } object ClassfileWriter { @@ -143,7 +143,7 @@ abstract class ClassfileWriters { } sealed trait FileWriter { - def writeFile(relativePath: Path, bytes: Array[Byte]): Unit + def writeFile(relativePath: String, bytes: Array[Byte]): Unit def close(): Unit } @@ -180,8 +180,8 @@ abstract class ClassfileWriters { lazy val crc = new CRC32 - override def writeFile(relativePath: Path, bytes: Array[Byte]): Unit = this.synchronized { - val entry = new ZipEntry(relativePath.toString) + override def writeFile(relativePath: String, bytes: Array[Byte]): Unit = this.synchronized { + val entry = new ZipEntry(relativePath) if (storeOnly) { // When using compression method `STORED`, the ZIP spec requires the CRC and compressed/ // uncompressed sizes to be written before the data. The JarOutputStream could compute the @@ -235,7 +235,7 @@ abstract class ClassfileWriters { private val fastOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE) private val fallbackOpenOptions = util.EnumSet.of(StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING) - override def writeFile(relativePath: Path, bytes: Array[Byte]): Unit = { + override def writeFile(relativePath: String, bytes: Array[Byte]): Unit = { val path = base.resolve(relativePath) try { ensureDirForPath(base, path) @@ -272,13 +272,14 @@ abstract class ClassfileWriters { } private final class VirtualFileWriter(base: AbstractFile) extends FileWriter { - private def getFile(base: AbstractFile, path: Path): AbstractFile = { + private def getFile(base: AbstractFile, path: String): AbstractFile = { def ensureDirectory(dir: AbstractFile): AbstractFile = if (dir.isDirectory) dir else throw new FileConflictException(s"${base.path}/${path}: ${dir.path} is not a directory") + val components = path.split('/') var dir = base - for (i <- 0 until path.getNameCount - 1) dir = ensureDirectory(dir) subdirectoryNamed path.getName(i).toString - ensureDirectory(dir) fileNamed path.getFileName.toString + for (i <- 0 until components.length - 1) dir = ensureDirectory(dir) subdirectoryNamed components(i).toString + ensureDirectory(dir) fileNamed components.last.toString } private def writeBytes(outFile: AbstractFile, bytes: Array[Byte]): Unit = { @@ -287,7 +288,7 @@ abstract class ClassfileWriters { finally out.close() } - override def writeFile(relativePath: Path, bytes: Array[Byte]): Unit = { + override def writeFile(relativePath: String, bytes: Array[Byte]): Unit = { val outFile = getFile(base, relativePath) writeBytes(outFile, bytes) } diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index b00441981f06..2bc9ab499d27 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -109,7 +109,7 @@ abstract class Pickler extends SubComponent { sigWriter.foreach { writer => val binaryName = sym.javaBinaryNameString val binaryClassName = if (sym.isModule) binaryName.stripSuffix(nme.MODULE_SUFFIX_STRING) else binaryName - val relativePath = java.nio.file.Paths.get(binaryClassName + ".sig") + val relativePath = binaryClassName + ".sig" val data = pickle.bytes.take(pickle.writeIndex) writer.writeFile(relativePath, data) } diff --git a/test/files/run/t5717.scala b/test/files/run/t5717.scala index c92ad650fdd8..5e3b94656476 100644 --- a/test/files/run/t5717.scala +++ b/test/files/run/t5717.scala @@ -19,8 +19,8 @@ object Test extends StoreReporterDirectTest { compileCode("package a { class B }") val List(i) = filteredInfos // for some reason, nio doesn't throw the same exception on windows and linux/mac - val path = if(util.Properties.isWin)"\\a" else "/a" - val expected = s"error writing ${testOutput.path}/a/B.class: Can't create directory ${testOutput.path}${path}" + + import File.separator + val expected = s"error writing ${testOutput.path}${separator}a${separator}B.class: Can't create directory ${testOutput.path}${separator}a" + "; there is an existing (non-directory) file in its path" assert(i.msg == expected, i.msg) } From 6c6706586a7014e6d6938532ef94ebfb840c95da Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Wed, 27 Mar 2019 17:41:49 +1000 Subject: [PATCH 1796/2793] [backport] Optimize some hot callers of Symbol.name to use rawname (cherry picked from commit 09398b458251070f7df3233e338e505f227d3908) --- src/reflect/scala/reflect/internal/Scopes.scala | 13 +++++++++---- src/reflect/scala/reflect/internal/Symbols.scala | 6 +++--- .../scala/reflect/internal/tpe/FindMembers.scala | 2 +- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/src/reflect/scala/reflect/internal/Scopes.scala b/src/reflect/scala/reflect/internal/Scopes.scala index 4500b0906929..c7a58d59de80 100644 --- a/src/reflect/scala/reflect/internal/Scopes.scala +++ b/src/reflect/scala/reflect/internal/Scopes.scala @@ -47,6 +47,8 @@ trait Scopes extends api.Scopes { self: SymbolTable => def depth = owner.nestingLevel override def hashCode(): Int = sym.name.start override def toString() = s"$sym (depth=$depth)" + // OPT: compare raw names when pre-flatten, saving needsFlatClasses within the loop + final def name(flat: Boolean): Name = if (flat) sym.name else sym.rawname } private def newScopeEntry(sym: Symbol, owner: Scope): ScopeEntry = { @@ -316,14 +318,15 @@ trait Scopes extends api.Scopes { self: SymbolTable => def lookupEntry(name: Name): ScopeEntry = { val startTime = if (StatisticsStatics.areSomeColdStatsEnabled) statistics.startTimer(statistics.scopeLookupTime) else null var e: ScopeEntry = null + val flat = phase.flatClasses if (hashtable ne null) { e = hashtable(name.start & HASHMASK) - while ((e ne null) && (e.sym.name ne name)) { + while ((e ne null) && (e.name(flat) ne name)) { e = e.tail } } else { e = elems - while ((e ne null) && (e.sym.name ne name)) { + while ((e ne null) && (e.name(flat) ne name)) { e = e.next } } @@ -338,10 +341,12 @@ trait Scopes extends api.Scopes { self: SymbolTable => */ def lookupNextEntry(entry: ScopeEntry): ScopeEntry = { var e = entry + val flat = phase.flatClasses + val entryName = entry.name(flat) if (hashtable ne null) - do { e = e.tail } while ((e ne null) && e.sym.name != entry.sym.name) + do { e = e.tail } while ((e ne null) && e.name(flat) != entryName) else - do { e = e.next } while ((e ne null) && e.sym.name != entry.sym.name) + do { e = e.next } while ((e ne null) && e.name(flat) != entryName) e } diff --git a/src/reflect/scala/reflect/internal/Symbols.scala b/src/reflect/scala/reflect/internal/Symbols.scala index 22d14d6bcbda..a5e2488740f0 100644 --- a/src/reflect/scala/reflect/internal/Symbols.scala +++ b/src/reflect/scala/reflect/internal/Symbols.scala @@ -2864,11 +2864,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isLocalDummy = nme.isLocalDummyName(name) - override def isClassConstructor = name == nme.CONSTRUCTOR - override def isMixinConstructor = name == nme.MIXIN_CONSTRUCTOR + override def isClassConstructor = rawname == nme.CONSTRUCTOR + override def isMixinConstructor = rawname == nme.MIXIN_CONSTRUCTOR override def isConstructor = isClassConstructor || isMixinConstructor - override def isPackageObject = isModule && (name == nme.PACKAGE) + override def isPackageObject = isModule && (rawname == nme.PACKAGE) // The name in comments is what it is being disambiguated from. // TODO - rescue CAPTURED from BYNAMEPARAM so we can see all the names. diff --git a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala index 8d288f1d7e63..2405d3821191 100644 --- a/src/reflect/scala/reflect/internal/tpe/FindMembers.scala +++ b/src/reflect/scala/reflect/internal/tpe/FindMembers.scala @@ -152,7 +152,7 @@ trait FindMembers { refinementClasses.exists(_.info.parents.exists(_.typeSymbol == owner)) ) - (sym.name != nme.CONSTRUCTOR || owner == initBaseClasses.head) && + (!sym.isClassConstructor || owner == initBaseClasses.head) && (!isPrivate || owner == selectorClass || admitPrivate) } From 9931f54898b5adf736b28108fd53f41cef3ea666 Mon Sep 17 00:00:00 2001 From: Stefan Zeiger Date: Fri, 17 May 2019 12:18:58 +0200 Subject: [PATCH 1797/2793] Ensure that DelayedInit fields are non-final Usually fields inside of a class or object body are initialized in an initializer block, which is the correct way to initialize final fields, but when they occur in a `DelayedInit` body, the initialization code is put into a method instead. This is illegal according to https://docs.oracle.com/javase/specs/jls/se11/html/jls-8.html#jls-8.3.1.2. The fix is to emit these fields as non-final. Fixes https://github.com/scala/bug/issues/11412 --- .../tools/nsc/transform/Constructors.scala | 10 ++++- .../tools/nsc/backend/jvm/BytecodeTest.scala | 38 +++++++++++++++++++ 2 files changed, 46 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 14ae6bbba12b..309fe03c9ea5 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -760,8 +760,14 @@ abstract class Constructors extends Statics with Transform with TypingTransforme * particulars. */ val (delayedHookDefs, remainingConstrStatsDelayedInit) = - if (isDelayedInitSubclass && remainingConstrStats.nonEmpty) delayedInitDefsAndConstrStats(defs, remainingConstrStats) - else (Nil, remainingConstrStats) + if (isDelayedInitSubclass && remainingConstrStats.nonEmpty) { + remainingConstrStats foreach { + case Assign(lhs, _ ) => lhs.symbol.setFlag(MUTABLE) // delayed init fields cannot be final, scala/bug#11412 + case _ => + } + delayedInitDefsAndConstrStats(defs, remainingConstrStats) + } else + (Nil, remainingConstrStats) // Assemble final constructor val primaryConstructor = deriveDefDef(primaryConstr)(_ => { diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 1b1eedeceb09..c57b4a21f836 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -10,6 +10,7 @@ import scala.tools.partest.ASMConverters._ import scala.tools.testing.BytecodeTesting import scala.tools.testing.BytecodeTesting._ import scala.collection.JavaConverters._ +import scala.tools.asm.Opcodes @RunWith(classOf[JUnit4]) class BytecodeTest extends BytecodeTesting { @@ -265,4 +266,41 @@ class BytecodeTest extends BytecodeTesting { check(s"$main\n$person") check(s"$person\n$main") } + + @Test + def t11412(): Unit = { + val code = "class A { val a = 0 }; class C extends A with App { val x = 1; val y = x }" + val cs = compileClasses(code) + val c = cs.find(_.name == "C").get + val fs = c.fields.asScala.toList.sortBy(_.name).map(f => (f.name, (f.access & Opcodes.ACC_FINAL) != 0)) + assertEquals(List( + ("executionStart", true), // final in 2.12.x, but that's problem with mixin. was fixed in 2.13 (https://github.com/scala/scala/pull/7028) + ("scala$App$$_args", false), + ("scala$App$$initCode", true), // also a mixin + ("x", false), + ("y", false) + ), fs) + val assignedInConstr = getMethod(c, "").instructions.filter(_.opcode == Opcodes.PUTFIELD) + assertEquals(Nil, assignedInConstr) + } + + @Test + def t11412b(): Unit = { + val code = "class C { def f = { var x = 0; val y = 1; class K extends App { def m = x + y } } }" + val cs = compileClasses(code) + val k = cs.find(_.name == "C$K$1").get + val fs = k.fields.asScala.toList.sortBy(_.name).map(f => (f.name, (f.access & Opcodes.ACC_FINAL) != 0)) + assertEquals(List( + ("$outer", true), // mixin + ("executionStart", true), + ("scala$App$$_args", false), // mixin + ("scala$App$$initCode", true), + ("x$1", true), // captured, assigned in constructor + ("y$1", true) // captured + ), fs) + val assignedInConstr = getMethod(k, "").instructions.filter(_.opcode == Opcodes.PUTFIELD) map { + case f: Field => f.name + } + assertEquals(List("$outer", "x$1", "y$1"), assignedInConstr.sorted) + } } From e355ccd3d2d27692cb7c167b41225af6206fb77a Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 18 Jul 2019 16:29:20 +1000 Subject: [PATCH 1798/2793] Avoid allocation of LinkedHashSet in Refchecks for every member --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 1499e9010893..06ceeb4295dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1437,7 +1437,7 @@ abstract class RefChecks extends Transform { } private def applyRefchecksToAnnotations(tree: Tree): Unit = { - def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = { + def applyChecks(annots: List[AnnotationInfo]): List[AnnotationInfo] = if (annots.isEmpty) Nil else { annots.foreach { ann => checkTypeRef(ann.tpe, tree, skipBounds = false) checkTypeRefBounds(ann.tpe, tree) From 32a3a87d832ca56bf5fe7f85f3e5e53705a50b30 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 18 Jul 2019 16:59:49 +1000 Subject: [PATCH 1799/2793] Avoid temporary strings in backend for non-nested class descriptors --- .../nsc/backend/jvm/analysis/BackendUtils.scala | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala index c60b829ca5e6..1a1813d24772 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/analysis/BackendUtils.scala @@ -340,11 +340,8 @@ abstract class BackendUtils extends PerRunInit { bTypesFromClassfile.classBTypeFromParsedClassfile(internalName).info.get.nestedClasses.force def getClassIfNested(internalName: InternalName): Option[ClassBType] = { - if (internalName.indexOf('$') < 0) None - else { - val c = bTypesFromClassfile.classBTypeFromParsedClassfile(internalName) - if (c.isNestedClass.get) Some(c) else None - } + val c = bTypesFromClassfile.classBTypeFromParsedClassfile(internalName) + if (c.isNestedClass.get) Some(c) else None } def raiseError(msg: String, sig: String, e: Option[Throwable]): Unit = { @@ -699,8 +696,10 @@ object BackendUtils { while (i < desc.length) { if (desc.charAt(i) == 'L') { val start = i + 1 // skip the L - while (desc.charAt(i) != ';') i += 1 - visitInternalName(desc.substring(start, i)) + var seenDollar = false + while ({val ch = desc.charAt(i); seenDollar ||= (ch == '$'); ch != ';'}) i += 1 + if (seenDollar) + visitInternalName(desc.substring(start, i)) } // skips over '[', ')', primitives i += 1 From d1dcaf182e71f4535daaf3b92f4eaf56bcada8c6 Mon Sep 17 00:00:00 2001 From: Harrison Houghton Date: Sun, 12 May 2019 20:27:30 -0400 Subject: [PATCH 1800/2793] Clarify what Null is a subtype of. To wit, any non-value-class class type, and non-class value types so declared, but certainly not value-class types. Okay, okay, that's not what I've made it say. Hopefully this is clearer. Fixes scala/bug#11479. --- spec/03-types.md | 2 +- src/library-aux/scala/Null.scala | 13 ++++++++----- 2 files changed, 9 insertions(+), 6 deletions(-) diff --git a/spec/03-types.md b/spec/03-types.md index 0c618cb4395c..6d8ee3534ec7 100644 --- a/spec/03-types.md +++ b/spec/03-types.md @@ -829,7 +829,7 @@ The conformance relation $(<:)$ is the smallest transitive relation that satisfi - Conformance includes equivalence. If $T \equiv U$ then $T <: U$. - For every value type $T$, `scala.Nothing <: $T$ <: scala.Any`. - For every type constructor $T$ (with any number of type parameters), `scala.Nothing <: $T$ <: scala.Any`. -- For every class type $T$ such that `$T$ <: scala.AnyRef` one has `scala.Null <: $T$`. +- For every value type $T$, `scala.Null <: $T$` unless `$T$ <: scala.AnyVal`. - A type variable or abstract type $t$ conforms to its upper bound and its lower bound conforms to $t$. - A class type or parameterized type conforms to any of its base-types. diff --git a/src/library-aux/scala/Null.scala b/src/library-aux/scala/Null.scala index 66f544dc6374..8d40134fa554 100644 --- a/src/library-aux/scala/Null.scala +++ b/src/library-aux/scala/Null.scala @@ -13,9 +13,12 @@ package scala /** `Null` is - together with [[scala.Nothing]] - at the bottom of the Scala type hierarchy. - * - * `Null` is a subtype of all reference types; its only instance is the `null` reference. - * Since `Null` is not a subtype of value types, `null` is not a member of any such type. For instance, - * it is not possible to assign `null` to a variable of type [[scala.Int]]. - */ + * + * `Null` is the type of the `null` literal. It is a subtype of every type + * except those of value classes. Value classes are subclasses of [[AnyVal]], which includes + * primitive types such as [[Int]], [[Boolean]], and user-defined value classes. + * + * Since `Null` is not a subtype of value types, `null` is not a member of any such type. + * For instance, it is not possible to assign `null` to a variable of type [[scala.Int]]. + */ sealed trait Null From 33620b804ccee6b2512b3a5d1f08ce165f44bb07 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 19 Jul 2019 10:19:56 +1000 Subject: [PATCH 1801/2793] Optimize writing of InlineInfoAttribute ASM seems to call InlineInfoAttribute.write twice: firstly as part of computeAttributeSize and then again as part of writeAttributes. This commit seeks some efficiencies: - Avoid sorting the the method infos twice - Avoid copying them from an Array back to a list as happens in .toList.sorted - Avoid concat / split of name/descriptor by using a Tuple2 as the map key My profiles show that about 0.6% of compilation is spent in this code. --- .../scala/tools/nsc/backend/jvm/BTypes.scala | 11 ++- .../nsc/backend/jvm/BTypesFromClassfile.scala | 4 +- .../nsc/backend/jvm/BTypesFromSymbols.scala | 16 +-- .../tools/nsc/backend/jvm/opt/CallGraph.scala | 2 +- .../backend/jvm/opt/InlineInfoAttribute.scala | 10 +- .../nsc/backend/jvm/opt/InlineInfoTest.scala | 10 +- .../backend/jvm/opt/ScalaInlineInfoTest.scala | 98 +++++++++---------- 7 files changed, 77 insertions(+), 74 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala index 73565e18ee9b..521f5c471b49 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypes.scala @@ -1110,8 +1110,15 @@ object BTypes { */ final case class InlineInfo(isEffectivelyFinal: Boolean, sam: Option[String], - methodInfos: Map[String, MethodInlineInfo], - warning: Option[ClassInlineInfoWarning]) + methodInfos: Map[(String, String), MethodInlineInfo], + warning: Option[ClassInlineInfoWarning]) { + lazy val methodInfosSorted: IndexedSeq[((String, String), MethodInlineInfo)] = { + val result = new Array[((String, String), MethodInlineInfo)](methodInfos.size) + methodInfos.copyToArray(result) + scala.util.Sorting.quickSort(result)(Ordering.by(_._1)) + result + } + } val EmptyInlineInfo = InlineInfo(false, None, Map.empty, None) diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala index 147bc1ab3858..12721aa1e440 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromClassfile.scala @@ -155,12 +155,12 @@ abstract class BTypesFromClassfile { // require special handling. Excluding is OK because they are never inlined. // Here we are parsing from a classfile and we don't need to do anything special. Many of these // primitives don't even exist, for example Any.isInstanceOf. - val methodInfos:Map[String,MethodInlineInfo] = classNode.methods.asScala.map(methodNode => { + val methodInfos:Map[(String, String),MethodInlineInfo] = classNode.methods.asScala.map(methodNode => { val info = MethodInlineInfo( effectivelyFinal = BytecodeUtils.isFinalMethod(methodNode), annotatedInline = false, annotatedNoInline = false) - (methodNode.name + methodNode.desc, info) + ((methodNode.name, methodNode.desc), info) })(scala.collection.breakOut) InlineInfo( isEffectivelyFinal = BytecodeUtils.isFinalClass(classNode), diff --git a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala index 129b69649e7c..927d9a4ec0f2 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/BTypesFromSymbols.scala @@ -576,8 +576,8 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { warning = Some(ClassSymbolInfoFailureSI9111(classSym.fullName)) Nil } else { - val name = methodSym.javaSimpleName.toString // same as in genDefDef - val signature = name + methodBTypeFromSymbol(methodSym).descriptor + val name = methodSym.javaSimpleName.toString // same as in genDefDef + val signature = (name, methodBTypeFromSymbol(methodSym).descriptor) // In `trait T { object O }`, `oSym.isEffectivelyFinalOrNotOverridden` is true, but the // method is abstract in bytecode, `defDef.rhs.isEmpty`. Abstract methods are excluded @@ -588,20 +588,20 @@ abstract class BTypesFromSymbols[G <: Global](val global: G) extends BTypes { val effectivelyFinal = methodSym.isEffectivelyFinalOrNotOverridden && !(methodSym hasFlag DEFERRED | SYNTHESIZE_IMPL_IN_SUBCLASS) val info = MethodInlineInfo( - effectivelyFinal = effectivelyFinal, - annotatedInline = methodSym.hasAnnotation(ScalaInlineClass), + effectivelyFinal = effectivelyFinal, + annotatedInline = methodSym.hasAnnotation(ScalaInlineClass), annotatedNoInline = methodSym.hasAnnotation(ScalaNoInlineClass)) if (needsStaticImplMethod(methodSym)) { val staticName = traitSuperAccessorName(methodSym).toString val selfParam = methodSym.newSyntheticValueParam(methodSym.owner.typeConstructor, nme.SELF) val staticMethodType = methodSym.info match { - case mt @ MethodType(params, res) => copyMethodType(mt, selfParam :: params, res) + case mt@MethodType(params, res) => copyMethodType(mt, selfParam :: params, res) } - val staticMethodSignature = staticName + methodBTypeFromMethodType(staticMethodType, isConstructor = false) + val staticMethodSignature = (staticName, methodBTypeFromMethodType(staticMethodType, isConstructor = false).descriptor) val staticMethodInfo = MethodInlineInfo( - effectivelyFinal = true, - annotatedInline = info.annotatedInline, + effectivelyFinal = true, + annotatedInline = info.annotatedInline, annotatedNoInline = info.annotatedNoInline) if (methodSym.isMixinConstructor) (staticMethodSignature, staticMethodInfo) :: Nil diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala index f637343a554b..4d168fb789be 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/CallGraph.scala @@ -295,7 +295,7 @@ abstract class CallGraph { * Analyze a callsite and gather meta-data that can be used for inlining decisions. */ private def analyzeCallsite(calleeMethodNode: MethodNode, calleeDeclarationClassBType: ClassBType, call: MethodInsnNode, calleeSourceFilePath: Option[String]): CallsiteInfo = { - val methodSignature = calleeMethodNode.name + calleeMethodNode.desc + val methodSignature = (calleeMethodNode.name, calleeMethodNode.desc) try { // The inlineInfo.methodInfos of a ClassBType holds an InlineInfo for each method *declared* diff --git a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala index b4590aabb764..492f472af75d 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/opt/InlineInfoAttribute.scala @@ -67,13 +67,10 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI } // The method count fits in a short (the methods_count in a classfile is also a short) - result.putShort(inlineInfo.methodInfos.size) + result.putShort(inlineInfo.methodInfosSorted.size) // Sort the methodInfos for stability of classfiles - for ((nameAndType, info) <- inlineInfo.methodInfos.toList.sortBy(_._1)) { - val (name, desc) = nameAndType.span(_ != '(') - // Name and desc are added separately because a NameAndType entry also stores them separately. - // This makes sure that we use the existing constant pool entries for the method. + for (((name, desc), info) <- inlineInfo.methodInfosSorted) { result.putShort(cw.newUTF8(name)) result.putShort(cw.newUTF8(desc)) @@ -84,7 +81,6 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI if (info.annotatedNoInline) inlineInfo |= 8 result.putByte(inlineInfo) } - result } @@ -127,7 +123,7 @@ case class InlineInfoAttribute(inlineInfo: InlineInfo) extends Attribute(InlineI // = (inlineInfo & 2) != 0 // no longer used val isInline = (inlineInfo & 4) != 0 val isNoInline = (inlineInfo & 8) != 0 - (name + desc, MethodInlineInfo(isFinal, isInline, isNoInline)) + ((name, desc), MethodInlineInfo(isFinal, isInline, isNoInline)) }).toMap val info = InlineInfo(isFinal, sam, infos, None) diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala index ab750855aeff..f956ada32eb4 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/InlineInfoTest.scala @@ -67,9 +67,9 @@ class InlineInfoTest extends BytecodeTesting { compileClasses("class C { new A }", javaCode = List((jCode, "A.java"))) val info = global.genBCode.bTypes.cachedClassBType("A").info.get.inlineInfo assertEquals(info.methodInfos, Map( - "bar()I" -> MethodInlineInfo(true,false,false), - "()V" -> MethodInlineInfo(false,false,false), - "baz()I" -> MethodInlineInfo(true,false,false))) + ("bar", "()I") -> MethodInlineInfo(true,false,false), + ("", "()V") -> MethodInlineInfo(false,false,false), + ("baz", "()I") -> MethodInlineInfo(true,false,false))) } @Test @@ -88,7 +88,7 @@ class InlineInfoTest extends BytecodeTesting { // the classpath (classfile WatchEvent$Kind.class) instead of the actual companion from the source, so the static method was missing. val info = global.genBCode.bTypes.cachedClassBType("java/nio/file/WatchEvent$Kind").info.get.inlineInfo assertEquals(info.methodInfos, Map( - "HAI()Ljava/lang/String;" -> MethodInlineInfo(true,false,false), - "()V" -> MethodInlineInfo(false,false,false))) + ("HAI", "()Ljava/lang/String;") -> MethodInlineInfo(true,false,false), + ("", "()V") -> MethodInlineInfo(false,false,false))) } } diff --git a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala index d27eb95521e8..ac7d64c1aa32 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/opt/ScalaInlineInfoTest.scala @@ -83,63 +83,63 @@ class ScalaInlineInfoTest extends BytecodeTesting { false, // final class None, // not a sam Map( - ("O()LT$O$;", MethodInlineInfo(false,false,false)), - ("T$$super$toString()Ljava/lang/String;", MethodInlineInfo(true ,false,false)), - ("T$_setter_$x1_$eq(I)V", MethodInlineInfo(false,false,false)), - ("f1()I", MethodInlineInfo(false,false,false)), - ("f1$(LT;)I", MethodInlineInfo(true ,false,false)), - ("f2()I", MethodInlineInfo(true ,false,false)), // no static impl method for private method f2 - ("f3()I", MethodInlineInfo(false,false,false)), - ("f3$(LT;)I", MethodInlineInfo(true ,false,false)), - ("f4()Ljava/lang/String;", MethodInlineInfo(false,true, false)), - ("f4$(LT;)Ljava/lang/String;", MethodInlineInfo(true ,true, false)), - ("f5()I", MethodInlineInfo(true ,false,false)), - ("f5$(LT;)I", MethodInlineInfo(true ,false,false)), - ("f6()I", MethodInlineInfo(false,false,true )), // no static impl method for abstract method f6 - ("x1()I", MethodInlineInfo(false,false,false)), - ("y2()I", MethodInlineInfo(false,false,false)), - ("y2_$eq(I)V", MethodInlineInfo(false,false,false)), - ("x3()I", MethodInlineInfo(false,false,false)), - ("x3_$eq(I)V", MethodInlineInfo(false,false,false)), - ("x4()I", MethodInlineInfo(false,false,false)), - ("x4$(LT;)I", MethodInlineInfo(true ,false,false)), - ("x5()I", MethodInlineInfo(true, false,false)), - ("x5$(LT;)I", MethodInlineInfo(true ,false,false)), - ("L$2(Lscala/runtime/LazyRef;)LT$L$1$;", MethodInlineInfo(true, false,false)), - ("nest$1()I", MethodInlineInfo(true, false,false)), - ("$init$(LT;)V", MethodInlineInfo(true,false,false)), - ("L$lzycompute$1(Lscala/runtime/LazyRef;)LT$L$1$;", MethodInlineInfo(true,false,false)) + (("O", "()LT$O$;"), MethodInlineInfo(false,false,false)), + (("T$$super$toString", "()Ljava/lang/String;"), MethodInlineInfo(true ,false,false)), + (("T$_setter_$x1_$eq", "(I)V"), MethodInlineInfo(false,false,false)), + (("f1", "()I"), MethodInlineInfo(false,false,false)), + (("f1$", "(LT;)I"), MethodInlineInfo(true ,false,false)), + (("f2", "()I"), MethodInlineInfo(true ,false,false)), // no static impl method for private method f2 + (("f3", "()I"), MethodInlineInfo(false,false,false)), + (("f3$", "(LT;)I"), MethodInlineInfo(true ,false,false)), + (("f4", "()Ljava/lang/String;"), MethodInlineInfo(false,true, false)), + (("f4$", "(LT;)Ljava/lang/String;"), MethodInlineInfo(true ,true, false)), + (("f5", "()I"), MethodInlineInfo(true ,false,false)), + (("f5$", "(LT;)I"), MethodInlineInfo(true ,false,false)), + (("f6", "()I"), MethodInlineInfo(false,false,true )), // no static impl method for abstract method f6 + (("x1", "()I"), MethodInlineInfo(false,false,false)), + (("y2", "()I"), MethodInlineInfo(false,false,false)), + (("y2_$eq", "(I)V"), MethodInlineInfo(false,false,false)), + (("x3", "()I"), MethodInlineInfo(false,false,false)), + (("x3_$eq", "(I)V"), MethodInlineInfo(false,false,false)), + (("x4", "()I"), MethodInlineInfo(false,false,false)), + (("x4$", "(LT;)I"), MethodInlineInfo(true ,false,false)), + (("x5", "()I"), MethodInlineInfo(true, false,false)), + (("x5$", "(LT;)I"), MethodInlineInfo(true ,false,false)), + (("L$2", "(Lscala/runtime/LazyRef;)LT$L$1$;"), MethodInlineInfo(true, false,false)), + (("nest$1", "()I"), MethodInlineInfo(true, false,false)), + (("$init$", "(LT;)V"), MethodInlineInfo(true,false,false)), + (("L$lzycompute$1", "(Lscala/runtime/LazyRef;)LT$L$1$;"), MethodInlineInfo(true,false,false)) ), None // warning ) assert(infoT == expectT, mapDiff(expectT.methodInfos, infoT.methodInfos) + infoT) - assertSameMethods(t, expectT.methodInfos.keySet) + assertSameMethods(t, expectT.methodInfos.keySet.map(x => x._1 + x._2)) val infoC = inlineInfo(c) val expectC = InlineInfo(false, None, Map( - "O()LT$O$;" -> MethodInlineInfo(true ,false,false), - "f1()I" -> MethodInlineInfo(false,false,false), - "f3()I" -> MethodInlineInfo(false,false,false), - "f4()Ljava/lang/String;" -> MethodInlineInfo(false,true,false), - "f5()I" -> MethodInlineInfo(true,false,false), - "f6()I" -> MethodInlineInfo(false,false,false), - "x1()I" -> MethodInlineInfo(false,false,false), - "T$_setter_$x1_$eq(I)V" -> MethodInlineInfo(false,false,false), - "y2()I" -> MethodInlineInfo(false,false,false), - "y2_$eq(I)V" -> MethodInlineInfo(false,false,false), - "x3()I" -> MethodInlineInfo(false,false,false), - "x3_$eq(I)V" -> MethodInlineInfo(false,false,false), - "x4$lzycompute()I" -> MethodInlineInfo(true ,false,false), - "x4()I" -> MethodInlineInfo(false,false,false), - "T$$super$toString()Ljava/lang/String;" -> MethodInlineInfo(true ,false,false), - "()V" -> MethodInlineInfo(false,false,false), - "O$lzycompute$1()V" -> MethodInlineInfo(true,false,false) + ("O", "()LT$O$;") -> MethodInlineInfo(true ,false,false), + ("f1", "()I") -> MethodInlineInfo(false,false,false), + ("f3", "()I") -> MethodInlineInfo(false,false,false), + ("f4", "()Ljava/lang/String;") -> MethodInlineInfo(false,true,false), + ("f5", "()I") -> MethodInlineInfo(true,false,false), + ("f6", "()I") -> MethodInlineInfo(false,false,false), + ("x1", "()I") -> MethodInlineInfo(false,false,false), + ("T$_setter_$x1_$eq", "(I)V") -> MethodInlineInfo(false,false,false), + ("y2", "()I") -> MethodInlineInfo(false,false,false), + ("y2_$eq", "(I)V") -> MethodInlineInfo(false,false,false), + ("x3", "()I") -> MethodInlineInfo(false,false,false), + ("x3_$eq", "(I)V") -> MethodInlineInfo(false,false,false), + ("x4$lzycompute", "()I") -> MethodInlineInfo(true ,false,false), + ("x4", "()I") -> MethodInlineInfo(false,false,false), + ("T$$super$toString", "()Ljava/lang/String;") -> MethodInlineInfo(true ,false,false), + ("", "()V") -> MethodInlineInfo(false,false,false), + ("O$lzycompute$1", "()V") -> MethodInlineInfo(true,false,false) ), None) assert(infoC == expectC, mapDiff(expectC.methodInfos, infoC.methodInfos) + infoC) - assertSameMethods(c, expectC.methodInfos.keySet) + assertSameMethods(c, expectC.methodInfos.keySet.map(x => x._1 + x._2)) } @Test @@ -189,10 +189,10 @@ class ScalaInlineInfoTest extends BytecodeTesting { val List(c, om) = compileClasses(code) val infoC = inlineInfo(c) val expected = Map( - "()V" -> MethodInlineInfo(false,false,false), - "O$lzycompute$1()V" -> MethodInlineInfo(true,false,false), - "O()LC$O$;" -> MethodInlineInfo(true,false,false)) + ("", "()V") -> MethodInlineInfo(false,false,false), + ("O$lzycompute$1", "()V") -> MethodInlineInfo(true,false,false), + ("O", "()LC$O$;") -> MethodInlineInfo(true,false,false)) assert(infoC.methodInfos == expected, mapDiff(infoC.methodInfos, expected)) - assertSameMethods(c, expected.keySet) + assertSameMethods(c, expected.keySet.map(x => x._1 + x._2)) } } From 1e7202719c6ee266412f92543ffbd752fbb18ab7 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 19 Jul 2019 11:12:02 +1000 Subject: [PATCH 1802/2793] Avoid use of default classpath in PipelineMainTest That slowed the test down by having the pickle extractor strip all classfiles under the root folder (!). --- test/junit/scala/tools/nsc/PipelineMainTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/junit/scala/tools/nsc/PipelineMainTest.scala b/test/junit/scala/tools/nsc/PipelineMainTest.scala index e779cfc774e7..36847c2f6ffa 100644 --- a/test/junit/scala/tools/nsc/PipelineMainTest.scala +++ b/test/junit/scala/tools/nsc/PipelineMainTest.scala @@ -240,7 +240,7 @@ class PipelineMainTest { this } def argsFile(extraOpts: List[String]): Path = { - val cp = if (classpath.isEmpty) Nil else List("-cp", classpath.mkString(File.pathSeparator)) + val cp = List("-cp", if (classpath.isEmpty) "__DUMMY__" else classpath.mkString(File.pathSeparator)) // Dummy to avoid default classpath of "." val printArgs = if (debug) List("-Xprint-args", "-") else Nil val entries = List( Build.this.scalacOptions.toList, From 63269b9f5b40e8a857d0aa9f2c97a9bdee0e8f15 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Fri, 19 Jul 2019 11:13:44 +1000 Subject: [PATCH 1803/2793] Better error messages when pickle extractor fails. --- .../scala/tools/nsc/PickleExtractor.scala | 19 +++++++++++++------ src/reflect/scala/reflect/io/RootPath.scala | 2 ++ 2 files changed, 15 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/tools/nsc/PickleExtractor.scala b/src/compiler/scala/tools/nsc/PickleExtractor.scala index 42c552c24334..b566b4ae98c5 100644 --- a/src/compiler/scala/tools/nsc/PickleExtractor.scala +++ b/src/compiler/scala/tools/nsc/PickleExtractor.scala @@ -46,12 +46,19 @@ object PickleExtractor { } override def visitFile(file: Path, attrs: BasicFileAttributes): FileVisitResult = { if (file.getFileName.toString.endsWith(".class")) { - stripClassFile(Files.readAllBytes(file)) match { - case Class(out) => - Files.write(outputPath.root.resolve(root.relativize(file).toString), out) - case Pickle(out) => - Files.write(outputPath.root.resolve(root.relativize(file).toString.replaceAll(".class$", ".sig")), out) - case Skip => + try { + stripClassFile(Files.readAllBytes(file)) match { + case Class(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString), out) + case Pickle(out) => + Files.write(outputPath.root.resolve(root.relativize(file).toString.replaceAll(".class$", ".sig")), out) + case Skip => + } + } catch { + case ex: RuntimeException => + throw new RuntimeException("While parsing: " + file + " in " + inputPath + , ex) + } } FileVisitResult.CONTINUE diff --git a/src/reflect/scala/reflect/io/RootPath.scala b/src/reflect/scala/reflect/io/RootPath.scala index 146b4fa32611..080bbee0eeec 100644 --- a/src/reflect/scala/reflect/io/RootPath.scala +++ b/src/reflect/scala/reflect/io/RootPath.scala @@ -43,11 +43,13 @@ object RootPath { def close(): Unit = { zipfs.close() } + override def toString: String = path.toString } } else { new RootPath { override def root: nio.file.Path = path override def close(): Unit = () + override def toString: String = path.toString } } } From fcd99c5228588df6f6f9e5ddf3f5cdbfb9281d10 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Mon, 29 Oct 2018 16:11:07 +1000 Subject: [PATCH 1804/2793] Generate shallower ASTs in pattern translation Given: ``` $ cat sandbox/test.scala class Test { import reflect.runtime.universe._ (null: Tree) match { case Literal(Constant(value)) => } } ``` Emit: ``` $ qscalac -Xprint:patmat sandbox/test.scala [[syntax trees at end of patmat]] // test.scala package { class Test extends scala.AnyRef { def (): Test = { Test.super.(); () }; { case val x1: reflect.runtime.universe.Tree = (null: reflect.runtime.universe.Tree); case11(){ if (x1.ne(null).unary_!) case12(); val o16: Option[reflect.runtime.universe.Literal] = scala.reflect.runtime.`package`.universe.LiteralTag.unapply(x1); if (o16.isEmpty) case12(); val p3: reflect.runtime.universe.Literal = o16.get; if (p3.ne(null).unary_!) case12(); val o15: Option[reflect.runtime.universe.Constant] = scala.reflect.runtime.`package`.universe.Literal.unapply(p3); if (o15.isEmpty) case12(); val p5: reflect.runtime.universe.Constant = o15.get; if (p5.ne(null).unary_!) case12(); val o14: Option[reflect.runtime.universe.Constant] = scala.reflect.runtime.`package`.universe.ConstantTag.unapply(p5); if (o14.isEmpty) case12(); val p7: reflect.runtime.universe.Constant = o14.get; if (p7.ne(null).unary_!) case12(); val o13: Option[Any] = scala.reflect.runtime.`package`.universe.Constant.unapply(p7); if (o13.isEmpty) case12(); matchEnd10(()) }; case12(){ matchEnd10(throw new MatchError(x1)) }; matchEnd10(x: Unit){ x } } } } ``` Rather than: ``` $ scalac-ref 2.13.x -Xprint:patmat sandbox/test.scala [[syntax trees at end of patmat]] // test.scala package { class Test extends scala.AnyRef { def (): Test = { Test.super.(); () }; { case val x1: reflect.runtime.universe.Tree = (null: reflect.runtime.universe.Tree); case11(){ if (x1.ne(null)) { val o16: Option[reflect.runtime.universe.Literal] = scala.reflect.runtime.`package`.universe.LiteralTag.unapply(x1); if (o16.isEmpty.unary_!) { val p3: reflect.runtime.universe.Literal = o16.get; if (p3.ne(null)) { val o15: Option[reflect.runtime.universe.Constant] = scala.reflect.runtime.`package`.universe.Literal.unapply(p3); if (o15.isEmpty.unary_!) { val p5: reflect.runtime.universe.Constant = o15.get; if (p5.ne(null)) { val o14: Option[reflect.runtime.universe.Constant] = scala.reflect.runtime.`package`.universe.ConstantTag.unapply(p5); if (o14.isEmpty.unary_!) { val p7: reflect.runtime.universe.Constant = o14.get; if (p7.ne(null)) { val o13: Option[Any] = scala.reflect.runtime.`package`.universe.Constant.unapply(p7); if (o13.isEmpty.unary_!) matchEnd10(()) else case12() } else case12() } else case12() } else case12() } else case12() } else case12() } else case12() } else case12() }; case12(){ matchEnd10(throw new MatchError(x1)) }; matchEnd10(x: Unit){ x } } } } ``` --- .../scala/tools/nsc/ast/TreeDSL.scala | 6 +- .../nsc/transform/patmat/MatchCodeGen.scala | 36 ++++-- .../transform/patmat/MatchTreeMaking.scala | 8 +- .../scala/tools/partest/ASMConverters.scala | 8 +- test/files/run/macroPlugins-namerHooks.check | 8 +- test/files/run/sd187.check | 107 ++++++++++-------- test/files/run/t6288.check | 25 ++-- .../tools/nsc/backend/jvm/BytecodeTest.scala | 19 ++-- 8 files changed, 133 insertions(+), 84 deletions(-) diff --git a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala index e539bba97e91..6f8c3f5ddce8 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeDSL.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeDSL.scala @@ -136,7 +136,11 @@ trait TreeDSL { def NEW(tpt: Tree, args: Tree*): Tree = New(tpt, List(args.toList)) - def NOT(tree: Tree) = Select(tree, Boolean_not) + def NOT(tree: Tree) = tree match { + case Select(qual, _) if tree.symbol eq Boolean_not => qual + case _ => Select(tree, Boolean_not) + } + def AND(guards: Tree*) = if (guards.isEmpty) EmptyTree else guards reduceLeft gen.mkAnd def IF(tree: Tree) = new IfStart(tree, EmptyTree) diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala index f11d07ad9851..440fdf34751b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchCodeGen.scala @@ -204,7 +204,15 @@ trait MatchCodeGen extends Interface { // res: T // returns MatchMonad[T] def one(res: Tree): Tree = matchEnd APPLY (res) // a jump to a case label is special-cased in typedApply - protected def zero: Tree = nextCase APPLY () + protected final def zero: Tree = nextCase APPLY () + override def ifThenElseZero(c: Tree, thenp: Tree): Tree = { + thenp match { + case Block(stats, expr) => + Block(If(NOT(c), zero, EmptyTree) :: stats, expr) + case _ => + Block(If(NOT(c), zero, EmptyTree) :: Nil, thenp) + } + } // prev: MatchMonad[T] // b: T @@ -212,14 +220,21 @@ trait MatchCodeGen extends Interface { // returns MatchMonad[U] def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = { val prevSym = freshSym(prev.pos, prev.tpe, "o") - BLOCK( - ValDef(prevSym, prev), - // must be isEmpty and get as we don't control the target of the call (prev is an extractor call) + val nextTree = // must be isEmpty and get as we don't control the target of the call (prev is an extractor call) ifThenElseZero( NOT(prevSym DOT vpmName.isEmpty), Substitution(b, prevSym DOT vpmName.get)(next) ) - ) + nextTree match { + case Block(stats, expr) => + Block((ValDef(prevSym, prev) :: stats), expr) + case _ => + BLOCK( + ValDef(prevSym, prev), + // must be isEmpty and get as we don't control the target of the call (prev is an extractor call) + nextTree + ) + } } // cond: Boolean @@ -230,9 +245,14 @@ trait MatchCodeGen extends Interface { def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, next: Tree): Tree = { val rest = ( // only emit a local val for `nextBinder` if it's actually referenced in `next` - if (next.exists(_.symbol eq nextBinder)) - Block(ValDef(nextBinder, res) :: Nil, next) - else next + if (next.exists(_.symbol eq nextBinder)) { + next match { + case Block(stats, expr) => + Block(ValDef(nextBinder, res) :: stats, expr) + case _ => + Block(ValDef(nextBinder, res) :: Nil, next) + } + } else next ) ifThenElseZero(cond, rest) } diff --git a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala index 4a6731744dcd..1c219599142b 100644 --- a/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala +++ b/src/compiler/scala/tools/nsc/transform/patmat/MatchTreeMaking.scala @@ -194,7 +194,13 @@ trait MatchTreeMaking extends MatchCodeGen with Debugging { else { // only store binders actually used val (subPatBindersStored, subPatRefsStored) = stored.filter{case (b, _) => usedBinders(b)}.unzip - Block(map2(subPatBindersStored.toList, subPatRefsStored.toList)(ValDef(_, _)), in) + val bindings = map2(subPatBindersStored.toList, subPatRefsStored.toList)(ValDef(_, _)) + in match { + case Block(stats, expr) => + Block(bindings ::: stats, expr) + case _ => + Block(bindings, in) + } } } } diff --git a/src/partest-extras/scala/tools/partest/ASMConverters.scala b/src/partest-extras/scala/tools/partest/ASMConverters.scala index 831fe5fadc59..97f9d3e625f7 100644 --- a/src/partest-extras/scala/tools/partest/ASMConverters.scala +++ b/src/partest-extras/scala/tools/partest/ASMConverters.scala @@ -81,10 +81,14 @@ object ASMConverters { final override def toString() = { val printOpcode = opcode != -1 productPrefix + ( - if (printOpcode) Iterator(opcodeToString(opcode)) ++ productIterator.drop(1) - else productIterator + if (printOpcode) Iterator(opcodeToString(opcode)) ++ productIterator.drop(1).map(quoteString) + else productIterator.map(quoteString) ).mkString("(", ", ", ")") } + private def quoteString(a: Any): Any = a match { + case s: String => "\"" + s + "\"" + case x => x + } } case class Method(instructions: List[Instruction], handlers: List[ExceptionHandler], localVars: List[LocalVariable]) diff --git a/test/files/run/macroPlugins-namerHooks.check b/test/files/run/macroPlugins-namerHooks.check index 4409f196f0bf..d6446e72dfc2 100644 --- a/test/files/run/macroPlugins-namerHooks.check +++ b/test/files/run/macroPlugins-namerHooks.check @@ -38,8 +38,12 @@ enterStat(super.()) enterSym(case val x1: Int = x$1) enterStat(case val x1: Int = x$1) enterSym(case val x1: Any = x$1) -enterSym(case5(){ if (x1.isInstanceOf[C]) matchEnd4(true) else case6() }) +enterSym(case5(){ if (x1.isInstanceOf[C].unary_!) case6(); matchEnd4(true) }) enterSym(case6(){ matchEnd4(false) }) enterStat(case val x1: Any = x$1) -enterStat(case5(){ if (x1.isInstanceOf[C]) matchEnd4(true) else case6() }) +enterStat(case5(){ if (x1.isInstanceOf[C].unary_!) case6(); matchEnd4(true) }) enterStat(case6(){ matchEnd4(false) }) +enterSym(if (x1.isInstanceOf[C].unary_!) case6()) +enterStat(if (x1.isInstanceOf[C].unary_!) case6()) +enterSym(case6()) +enterStat(case6()) diff --git a/test/files/run/sd187.check b/test/files/run/sd187.check index f88fbc292333..24d79a7a5c58 100644 --- a/test/files/run/sd187.check +++ b/test/files/run/sd187.check @@ -10,30 +10,43 @@ [205] var x3: [205]String = [205][205][205]null.asInstanceOf[[205]String]; [205]{ [205]case val x1: [205]Any = [205]x; - [205]case8(){ - [313]if ([313][313]x1.isInstanceOf[[313]Option[_]]) - [325][325]matchEnd7([325]()) - else - [313][313]case9() + [205]case8()[313]{ + [313]if ([313][313][313]x1.isInstanceOf[[313]Option[_]].unary_!) + [313]{ + [313][313]case9(); + [313]() + }; + [325][325]matchEnd7([325]()) }; - [205]case9(){ - [412]if ([412][412]x1.isInstanceOf[[412]String]) + [205]case9()[412]{ + [412]if ([412][412][412]x1.isInstanceOf[[412]String].unary_!) [412]{ - [412][412]rc6 = [412]true; - [412][412]x3 = [412]([412][412]x1.asInstanceOf[[412]String]: [412]String); - [412]if ([427][427]x3.==([430]"4")) - [512][512]matchEnd7([512][512]x3.hashCode()) - else - [412][412]case10() - } - else - [412][412]case10() + [412][412]case10(); + [412]() + }; + [412][412]rc6 = [412]true; + [412][412]x3 = [412]([412][412]x1.asInstanceOf[[412]String]: [412]String); + [412]{ + [412]if ([412][427][427]x3.==([430]"4").unary_!) + [412]{ + [412][412]case10(); + [412]() + }; + [512][512]matchEnd7([512][512]x3.hashCode()) + } }; - [205]case10(){ - [612]if ([612][612]rc6.&&([627][627]x3.==([630]"6"))) - [712][712]matchEnd7([712][712]x3.hashCode()) - else - [612][612]case11() + [205]case10()[612]{ + [612]if ([612]rc6.unary_!) + [612]{ + [612][612]case11(); + [612]() + }; + [612]if ([612][627][627]x3.==([630]"6").unary_!) + [612]{ + [612][612]case11(); + [612]() + }; + [712][712]matchEnd7([712][712]x3.hashCode()) }; [205]case11(){ [205][205]matchEnd7([205]throw [205][205][205]new [205]MatchError([205]x1)) @@ -45,23 +58,21 @@ }; [1007]def extractor([1017]x: [1020]): [1007]Any = [1027]{ [1027]case val x1: [1027]Any = [1027]x; - [1027]case6(){ - [1120]if ([1120][1120]x1.isInstanceOf[[1120]Product2[T1,T2]]) + [1027]case6()[1120]{ + [1120]if ([1120][1120][1120]x1.isInstanceOf[[1120]Product2[T1,T2]].unary_!) [1120]{ - [1120] val x2: [1120]Product2[T1,T2] = [1120]([1120][1120]x1.asInstanceOf[[1120]Product2[T1,T2]]: [1120]Product2[T1,T2]); - [1112]{ - [1112] val o8: [1112]Option[Product2[T1,T2]] = [1112][1112][1112]scala.Product2.unapply[[1112]T1, [1112]T2]([1112]x2); - [1112]if ([1112]o8.isEmpty.unary_!) - [1112]{ - [1121]val a: [1121]Any = [1121]o8.get._1; - [1210][1210]matchEnd5([1210]a) - } - else - [1112][1112]case7() - } - } - else - [1120][1120]case7() + [1120][1120]case7(); + [1120]() + }; + [1120] val x2: [1120]Product2[T1,T2] = [1120]([1120][1120]x1.asInstanceOf[[1120]Product2[T1,T2]]: [1120]Product2[T1,T2]); + [1112] val o8: [1112]Option[Product2[T1,T2]] = [1112][1112][1112]scala.Product2.unapply[[1112]T1, [1112]T2]([1112]x2); + [1112]if ([1112]o8.isEmpty) + [1112]{ + [1112][1112]case7(); + [1112]() + }; + [1121]val a: [1121]Any = [1121]o8.get._1; + [1210][1210]matchEnd5([1210]a) }; [1027]case7(){ [1027][1027]matchEnd5([1027]throw [1027][1027][1027]new [1027]MatchError([1027]x1)) @@ -75,17 +86,19 @@ } catch { [1505]case [1505](ex6 @ [1505]_) => [1505]{ [1812] val x4: [1812]Throwable = [1812]ex6; - [1505]case9(){ - [1812]if ([1812][1812]x4.ne([1812]null)) + [1505]case9()[1812]{ + [1812]if ([1812][1812][1812]x4.ne([1812]null).unary_!) + [1812]{ + [1812][1812]case10(); + [1812]() + }; + [1812] val x5: [1812]Throwable = [1812]x4; + [1812]if ([1812][1915][1915][1912]"".isEmpty().unary_!) [1812]{ - [1812] val x5: [1812]Throwable = [1812]x4; - [1812]if ([1915][1915][1912]"".isEmpty()) - [2014][2014]matchEnd8([2014][2014]x5.toString()) - else - [1812][1812]case10() - } - else - [1812][1812]case10() + [1812][1812]case10(); + [1812]() + }; + [2014][2014]matchEnd8([2014][2014]x5.toString()) }; [1505]case10(){ [1505][1505]matchEnd8([1505]throw [1505]ex6) diff --git a/test/files/run/t6288.check b/test/files/run/t6288.check index 86b6b95628e9..68b0d0d97f0e 100644 --- a/test/files/run/t6288.check +++ b/test/files/run/t6288.check @@ -10,10 +10,9 @@ [64]case val x1: [64]String = [64]""; [64]case5()[84]{ [84] val o7: [84]Option[Int] = [84][84]Case3.unapply([84]x1); - [84]if ([84]o7.isEmpty.unary_!) - [97][97]matchEnd4([97]()) - else - [84][84]case6() + [84]if ([84]o7.isEmpty) + [84][84]case6(); + [97][97]matchEnd4([97]()) }; [64]case6(){ [64][64]matchEnd4([64]throw [64][64][64]new [64]MatchError([64]x1)) @@ -33,10 +32,11 @@ [175]case val x1: [175]String = [175]""; [175]case5()[195]{ [195] val o7: [195]Option[List[Int]] = [195][195]Case4.unapplySeq([195]x1); - [195]if ([195][195]o7.isEmpty.unary_!.&&([195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)))) - [208][208]matchEnd4([208]()) - else - [195][195]case6() + [195]if ([195]o7.isEmpty) + [195][195]case6(); + [195]if ([195][195][195][195][195]o7.get.!=([195]null).&&([195][195][195][195]o7.get.lengthCompare([195]1).==([195]0)).unary_!) + [195][195]case6(); + [208][208]matchEnd4([208]()) }; [175]case6(){ [175][175]matchEnd4([175]throw [175][175][175]new [175]MatchError([175]x1)) @@ -56,10 +56,11 @@ [273]case val x1: [273]String = [273]""; [273]case5()[293]{ [293] val o7: [293]Option[List[Int]] = [293][293]Case4.unapplySeq([293]x1); - [293]if ([293][293]o7.isEmpty.unary_!.&&([293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0)))) - [304][304]matchEnd4([304]()) - else - [293][293]case6() + [293]if ([293]o7.isEmpty) + [293][293]case6(); + [293]if ([293][293][293][293][293]o7.get.!=([293]null).&&([293][293][293][293]o7.get.lengthCompare([293]0).==([293]0)).unary_!) + [293][293]case6(); + [304][304]matchEnd4([304]()) }; [273]case6(){ [273][273]matchEnd4([273]throw [273][273][273]new [273]MatchError([273]x1)) diff --git a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala index 1b1eedeceb09..22c71414bf00 100644 --- a/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala +++ b/test/junit/scala/tools/nsc/backend/jvm/BytecodeTest.scala @@ -79,24 +79,21 @@ class BytecodeTest extends BytecodeTesting { val unapplyLineNumbers = getInstructions(module, "unapply").filter(_.isInstanceOf[LineNumber]) assert(unapplyLineNumbers == List(LineNumber(2, Label(0))), unapplyLineNumbers) - val expected = List( LineNumber(4, Label(0)), - LineNumber(5, Label(5)), - Jump(IFEQ, Label(20)), + LineNumber(5, Label(4)), + Jump(IFNE, Label(10)), + Jump(GOTO, Label(19)), - LineNumber(6, Label(11)), + LineNumber(6, Label(10)), Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), - Jump(GOTO, Label(33)), - - LineNumber(5, Label(20)), - Jump(GOTO, Label(24)), + Jump(GOTO, Label(28)), - LineNumber(8, Label(24)), + LineNumber(8, Label(19)), Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false), - Jump(GOTO, Label(33)), + Jump(GOTO, Label(28)), - LineNumber(10, Label(33)), + LineNumber(10, Label(28)), Invoke(INVOKEVIRTUAL, "scala/Predef$", "println", "(Ljava/lang/Object;)V", false) ) From 0fe8209e8d2def1fd151db1df4141378ea1a221e Mon Sep 17 00:00:00 2001 From: Eugene Yokota Date: Fri, 19 Jul 2019 13:21:33 -0400 Subject: [PATCH 1805/2793] Update jQuery used in specs page to 3.4.1 Fixes https://github.com/scala/bug/issues/11594 I used jQuery Migrate (https://github.com/jquery/jquery-migrate/) to let it print out deprecation warnings, and updated `.bind` to `.on`. Here's how I locally tested the page: ``` bundle exec jekyll build -s spec/ -d build/spec ruby -run -e httpd build/spec -p 9090 ``` --- spec/_layouts/default.yml | 6 +++--- spec/_layouts/toc.yml | 2 +- spec/public/scripts/toc.js | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/spec/_layouts/default.yml b/spec/_layouts/default.yml index 20ebf22725ee..1aad1c8006bf 100644 --- a/spec/_layouts/default.yml +++ b/spec/_layouts/default.yml @@ -6,6 +6,7 @@ + - - - + +